text stringlengths 4 1.02M | meta dict |
|---|---|
from msrest.serialization import Model
class PacketCaptureQueryStatusResult(Model):
"""Status of packet capture session.
:param name: The name of the packet capture resource.
:type name: str
:param id: The ID of the packet capture resource.
:type id: str
:param capture_start_time: The start time of the packet capture session.
:type capture_start_time: datetime
:param packet_capture_status: The status of the packet capture session.
Possible values include: 'NotStarted', 'Running', 'Stopped', 'Error',
'Unknown'
:type packet_capture_status: str or
~azure.mgmt.network.v2017_09_01.models.PcStatus
:param stop_reason: The reason the current packet capture session was
stopped.
:type stop_reason: str
:param packet_capture_error: List of errors of packet capture session.
:type packet_capture_error: list[str or
~azure.mgmt.network.v2017_09_01.models.PcError]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'capture_start_time': {'key': 'captureStartTime', 'type': 'iso-8601'},
'packet_capture_status': {'key': 'packetCaptureStatus', 'type': 'str'},
'stop_reason': {'key': 'stopReason', 'type': 'str'},
'packet_capture_error': {'key': 'packetCaptureError', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(PacketCaptureQueryStatusResult, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.capture_start_time = kwargs.get('capture_start_time', None)
self.packet_capture_status = kwargs.get('packet_capture_status', None)
self.stop_reason = kwargs.get('stop_reason', None)
self.packet_capture_error = kwargs.get('packet_capture_error', None)
| {
"content_hash": "157fa7cf244a5ac3b0734a0c839c84c5",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 79,
"avg_line_length": 44.04761904761905,
"alnum_prop": 0.6470270270270271,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "6ac877062e185d1d134d76b0b5a4ed92ce3b2bbd",
"size": "2324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/packet_capture_query_status_result.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
"""
This tutorial shows how to use cleverhans.picklable_model
to create models that can be saved for evaluation later.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import numpy as np
import tensorflow as tf
from tensorflow.python.platform import flags
from cleverhans.attacks import FastGradientMethod
from cleverhans.dataset import MNIST
from cleverhans.loss import CrossEntropy
from cleverhans.serial import save
from cleverhans.utils_tf import model_eval, silence
from cleverhans.train import train
from cleverhans.utils import AccuracyReport, set_log_level
from cleverhans_tutorials.tutorial_models import make_basic_picklable_cnn
silence()
FLAGS = flags.FLAGS
NB_EPOCHS = 6
BATCH_SIZE = 128
LEARNING_RATE = .001
NB_FILTERS = 64
CLEAN_TRAIN = True
BACKPROP_THROUGH_ATTACK = False
def mnist_tutorial(train_start=0, train_end=60000, test_start=0,
test_end=10000, nb_epochs=NB_EPOCHS, batch_size=BATCH_SIZE,
learning_rate=LEARNING_RATE,
clean_train=CLEAN_TRAIN,
testing=False,
backprop_through_attack=BACKPROP_THROUGH_ATTACK,
nb_filters=NB_FILTERS, num_threads=None,
label_smoothing=0.1):
"""
MNIST cleverhans tutorial
:param train_start: index of first training set example
:param train_end: index of last training set example
:param test_start: index of first test set example
:param test_end: index of last test set example
:param nb_epochs: number of epochs to train model
:param batch_size: size of training batches
:param learning_rate: learning rate for training
:param clean_train: perform normal training on clean examples only
before performing adversarial training.
:param testing: if true, complete an AccuracyReport for unit tests
to verify that performance is adequate
:param backprop_through_attack: If True, backprop through adversarial
example construction process during
adversarial training.
:param label_smoothing: float, amount of label smoothing for cross entropy
:return: an AccuracyReport object
"""
# Object used to keep track of (and return) key accuracies
report = AccuracyReport()
# Set TF random seed to improve reproducibility
tf.set_random_seed(1234)
# Set logging level to see debug information
set_log_level(logging.DEBUG)
# Create TF session
if num_threads:
config_args = dict(intra_op_parallelism_threads=1)
else:
config_args = {}
sess = tf.Session(config=tf.ConfigProto(**config_args))
# Get MNIST test data
mnist = MNIST(train_start=train_start, train_end=train_end,
test_start=test_start, test_end=test_end)
x_train, y_train = mnist.get_set('train')
x_test, y_test = mnist.get_set('test')
# Use Image Parameters
img_rows, img_cols, nchannels = x_train.shape[1:4]
nb_classes = y_train.shape[1]
# Define input TF placeholder
x = tf.placeholder(tf.float32, shape=(None, img_rows, img_cols,
nchannels))
y = tf.placeholder(tf.float32, shape=(None, nb_classes))
# Train an MNIST model
train_params = {
'nb_epochs': nb_epochs,
'batch_size': batch_size,
'learning_rate': learning_rate
}
eval_params = {'batch_size': batch_size}
fgsm_params = {
'eps': 0.3,
'clip_min': 0.,
'clip_max': 1.
}
rng = np.random.RandomState([2017, 8, 30])
def do_eval(preds, x_set, y_set, report_key, is_adv=None):
acc = model_eval(sess, x, y, preds, x_set, y_set, args=eval_params)
setattr(report, report_key, acc)
if is_adv is None:
report_text = None
elif is_adv:
report_text = 'adversarial'
else:
report_text = 'legitimate'
if report_text:
print('Test accuracy on %s examples: %0.4f' % (report_text, acc))
if clean_train:
model = make_basic_picklable_cnn()
# Tag the model so that when it is saved to disk, future scripts will
# be able to tell what data it was trained on
model.dataset_factory = mnist.get_factory()
preds = model.get_logits(x)
assert len(model.get_params()) > 0
loss = CrossEntropy(model, smoothing=label_smoothing)
def evaluate():
do_eval(preds, x_test, y_test, 'clean_train_clean_eval', False)
train(sess, loss, x_train, y_train, evaluate=evaluate,
args=train_params, rng=rng, var_list=model.get_params())
with sess.as_default():
save("clean_model.joblib", model)
print("Now that the model has been saved, you can evaluate it in a"
" separate process using `evaluate_pickled_model.py`. "
"You should get exactly the same result for both clean and "
"adversarial accuracy as you get within this program.")
# Calculate training error
if testing:
do_eval(preds, x_train, y_train, 'train_clean_train_clean_eval')
# Initialize the Fast Gradient Sign Method (FGSM) attack object and
# graph
fgsm = FastGradientMethod(model, sess=sess)
adv_x = fgsm.generate(x, **fgsm_params)
preds_adv = model.get_logits(adv_x)
# Evaluate the accuracy of the MNIST model on adversarial examples
do_eval(preds_adv, x_test, y_test, 'clean_train_adv_eval', True)
# Calculate training error
if testing:
do_eval(preds_adv, x_train, y_train, 'train_clean_train_adv_eval')
print('Repeating the process, using adversarial training')
# Create a new model and train it to be robust to FastGradientMethod
model2 = make_basic_picklable_cnn()
# Tag the model so that when it is saved to disk, future scripts will
# be able to tell what data it was trained on
model2.dataset_factory = mnist.get_factory()
fgsm2 = FastGradientMethod(model2, sess=sess)
def attack(x):
return fgsm2.generate(x, **fgsm_params)
loss2 = CrossEntropy(model2, smoothing=label_smoothing, attack=attack)
preds2 = model2.get_logits(x)
adv_x2 = attack(x)
if not backprop_through_attack:
# For the fgsm attack used in this tutorial, the attack has zero
# gradient so enabling this flag does not change the gradient.
# For some other attacks, enabling this flag increases the cost of
# training, but gives the defender the ability to anticipate how
# the atacker will change their strategy in response to updates to
# the defender's parameters.
adv_x2 = tf.stop_gradient(adv_x2)
preds2_adv = model2.get_logits(adv_x2)
def evaluate2():
# Accuracy of adversarially trained model on legitimate test inputs
do_eval(preds2, x_test, y_test, 'adv_train_clean_eval', False)
# Accuracy of the adversarially trained model on adversarial examples
do_eval(preds2_adv, x_test, y_test, 'adv_train_adv_eval', True)
# Perform and evaluate adversarial training
train(sess, loss2, x_train, y_train, evaluate=evaluate2,
args=train_params, rng=rng, var_list=model2.get_params())
with sess.as_default():
save("adv_model.joblib", model2)
print("Now that the model has been saved, you can evaluate it in a "
"separate process using "
"`python evaluate_pickled_model.py adv_model.joblib`. "
"You should get exactly the same result for both clean and "
"adversarial accuracy as you get within this program."
" You can also move beyond the tutorials directory and run the "
" real `compute_accuracy.py` script (make sure cleverhans/scripts "
"is in your PATH) to see that this FGSM-trained "
"model is actually not very robust---it's just a model that trains "
" quickly so the tutorial does not take a long time")
# Calculate training errors
if testing:
do_eval(preds2, x_train, y_train, 'train_adv_train_clean_eval')
do_eval(preds2_adv, x_train, y_train, 'train_adv_train_adv_eval')
return report
def main(argv=None):
from cleverhans_tutorials import check_installation
check_installation(__file__)
mnist_tutorial(nb_epochs=FLAGS.nb_epochs, batch_size=FLAGS.batch_size,
learning_rate=FLAGS.learning_rate,
clean_train=FLAGS.clean_train,
backprop_through_attack=FLAGS.backprop_through_attack,
nb_filters=FLAGS.nb_filters)
if __name__ == '__main__':
flags.DEFINE_integer('nb_filters', NB_FILTERS, 'Model size multiplier')
flags.DEFINE_integer('nb_epochs', NB_EPOCHS,
'Number of epochs to train model')
flags.DEFINE_integer('batch_size', BATCH_SIZE, 'Size of training batches')
flags.DEFINE_float('learning_rate', LEARNING_RATE,
'Learning rate for training')
flags.DEFINE_bool('clean_train', CLEAN_TRAIN, 'Train on clean examples')
flags.DEFINE_bool('backprop_through_attack', BACKPROP_THROUGH_ATTACK,
('If True, backprop through adversarial example '
'construction process during adversarial training'))
tf.app.run()
| {
"content_hash": "5bdaa784a49bfb51f5ee37d679f771e8",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 78,
"avg_line_length": 37.735537190082646,
"alnum_prop": 0.6766316250547525,
"repo_name": "carlini/cleverhans",
"id": "2ae9685f666a93cb405f3b1f125548cb7334948e",
"size": "9132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cleverhans_tutorials/mnist_tutorial_picklable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "242"
},
{
"name": "Python",
"bytes": "734893"
},
{
"name": "Shell",
"bytes": "2831"
}
],
"symlink_target": ""
} |
"""
Load and test a pre-trained model against the entire data subset.
python LUNA16_inferenceTesting_ResNet.py -b gpu -i 0 -z 128
"""
from neon import logger as neon_logger
from neon.models import Model
from aeon import DataLoader
from neon.util.argparser import NeonArgparser, extract_valid_args
from neon.transforms import Misclassification, PrecisionRecall
from neon.backends import gen_backend
from neon.data.dataloader_transformers import TypeCast, OneHot
import numpy as np
import pandas as pd
# parse the command line arguments
parser = NeonArgparser(__doc__)
args = parser.parse_args()
#testFileName = 'manifest_subset2_augmented.csv'
testFileName = 'manifest_subset9_augmented.csv'
#testFileName = 'manifest_subset9_ALL.csv'
# hyperparameters
num_epochs = args.epochs
# Next line gets rid of the deterministic warning
args.deterministic = None
if (args.rng_seed is None):
args.rng_seed = 16
print('Batch size = {}'.format(args.batch_size))
# setup backend
be = gen_backend(**extract_valid_args(args, gen_backend))
window_size = 64
# Set up the testset to load via aeon
image_config = dict(height=window_size, width=window_size, channels=3)
label_config = dict(binary=False)
config = dict(type="image,label",
image=image_config,
label=label_config,
manifest_filename=testFileName,
minibatch_size=args.batch_size,
subset_fraction=1, cache_directory='')
test_set = DataLoader(config, be)
test_set = TypeCast(test_set, index=0, dtype=np.float32) # cast image to float
lunaModel = Model('LUNA16_resnet.prm')
def round(arr, threshold=0.5):
'''
Round to an arbitrary threshold.
Above threshold goes to 1. Below goes 0.
'''
out = np.zeros(np.shape(arr))
out[np.where(arr > threshold)[0]] = 1
out[np.where(arr <= threshold)[0]] = 0
return out
prob, target = lunaModel.get_outputs(test_set, return_targets=True)
prob = prob.T[0]
target = target.T[0]
np.set_printoptions(precision=3, suppress=True)
#print(' ')
#print(prob)
#print(' ')
pred = round(prob, threshold=0.9).astype(int)
# print(pred),
# print('predictions')
# print(target),
# print('targets')
# print('Predict 1 count = {}'.format(len(np.where(pred == 1)[0])))
# print('True 1 count= {}'.format(len(np.where(target == 1)[0])))
# print('Predict 0 count = {}'.format(len(np.where(pred == 0)[0])))
# print('True 0 count= {}'.format(len(np.where(target == 0)[0])))
# target_zero_idx = np.where(target == 0)[0]
# target_one_idx = np.where(target == 1)[0]
# false_positive = len(np.where(pred[target_zero_idx] == 1)[0])
# false_negative = len(np.where(pred[target_one_idx] == 0)[0])
# print('False positive count = {}'.format(false_positive))
# print('False negative count = {}'.format(false_negative))
if (True):
# print('All equal = {}'.format(np.array_equal(pred, target)))
# print('Incorrect prediction probabilities = {}'.format(prob[np.where(pred != target)[0]]))
# print('Indices = {}'.format(np.where(pred != target)[0]))
from sklearn.metrics import classification_report
print(classification_report(target, pred, target_names=['Class 0', 'Class 1']))
# neon_logger.display('Calculating metrics on the test set. This could take a while...')
# misclassification = lunaModel.eval(test_set, metric=Misclassification())
# neon_logger.display('Misclassification error (test) = {}'.format(misclassification))
# precision, recall = lunaModel.eval(test_set, metric=PrecisionRecall(num_classes=2))
# neon_logger.display('Precision = {}, Recall = {}'.format(precision, recall))
| {
"content_hash": "ec3d654bd59880ea29b4e2342214f257",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 94,
"avg_line_length": 30.96551724137931,
"alnum_prop": 0.6932071269487751,
"repo_name": "mas-dse-greina/neon",
"id": "3c86a99193e70e1480d3f628cbfdf180f45464bb",
"size": "4360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "luna16/old_code/LUNA16_inferenceTesting_ResNet.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4879489"
},
{
"name": "Python",
"bytes": "177823"
},
{
"name": "Shell",
"bytes": "8434"
}
],
"symlink_target": ""
} |
from base import APITestCase
from vilya.models.project import CodeDoubanProject
from vilya.models.project_issue import ProjectIssue
from vilya.models.issue_comment import IssueComment
from tests.utils import delete_project
class ProjectIssueCommentsTest(APITestCase):
def setUp(self):
super(ProjectIssueCommentsTest, self).setUp()
project_name = "code"
product_name = "fire"
summary = "test"
owner_id = "lisong_intern"
delete_project(project_name)
project = CodeDoubanProject.add(
project_name,
owner_id=owner_id,
summary=summary,
product=product_name
)
self.project = project
title = "test title"
description = "test desc"
creator = "test"
issue = ProjectIssue.add(
title,
description,
creator,
project=self.project.id
)
self.issue = issue
self.project = project
self.comment1 = IssueComment.add(
self.issue.issue_id, 'content1', 'test1')
self.comment2 = IssueComment.add(
self.issue.issue_id, 'content2', 'test2')
self.api_token = self.create_api_token('test1')
self.api_token2 = self.create_api_token('test2')
def test_get_issue_comments(self):
ret = self.app.get(
"/api/%s/issues/%s/comments/" % (
self.project.name, self.issue.number),
status=200
).json
self.assertTrue(isinstance(ret, list))
self.assertEquals(len(ret), 2)
def test_create_issue_comment(self):
api_token = self.create_api_token('test_user')
comment_content = "sent from iCode"
ret = self.app.post_json(
"/api/%s/issues/%s/comments/" % (
self.project.name, self.issue.number),
dict(content=comment_content),
headers=dict(Authorization="Bearer %s" % api_token.token)
).json
self.assertEquals(ret['content'], comment_content)
def test_get_a_single_issue_comment(self):
ret = self.app.get(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
)
).json
self.assertEquals(ret['content'], self.comment1.content)
def test_delete_a_single_issue_comment(self):
# not login
ret = self.app.delete(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
status=401
).json
self.assertProblemType(ret['type'], "unauthorized")
# not the author
ret = self.app.delete(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
status=403,
headers=dict(Authorization="Bearer %s" % self.api_token2.token)
).json
self.assertProblemType(ret['type'], "not_the_author")
# delete right
ret = self.app.delete(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
status=204,
headers=dict(Authorization="Bearer %s" % self.api_token.token)
)
# alright deleted
ret = self.app.get(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number,
),
status=404
)
def test_update_a_single_issue_comment(self):
new_comment_content = "sent from iCode"
# not login, will fail
ret = self.app.patch_json(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
dict(content=new_comment_content),
status=401
).json
self.assertProblemType(ret['type'], "unauthorized")
# not the author
ret = self.app.patch_json(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
dict(content=new_comment_content),
status=403,
headers=dict(Authorization="Bearer %s" % self.api_token2.token)
).json
self.assertProblemType(ret['type'], "not_the_author")
# send invalid json
ret = self.app.request(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
method="PATCH",
body="not a valid json",
status=400,
headers=dict(Authorization="Bearer %s" % self.api_token.token)
).json
self.assertProblemType(ret['type'], "not_json")
# only the author can edit comment
ret = self.app.patch_json(
"/api/%s/issues/%s/comments/%s/" % (
self.project.name,
self.issue.number,
self.comment1.number
),
dict(content=new_comment_content),
status=200,
headers=dict(Authorization="Bearer %s" % self.api_token.token)
).json
self.assertEquals(ret['content'], new_comment_content)
| {
"content_hash": "db8f293b233b56a2c5c80a46cd0b135e",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 75,
"avg_line_length": 33.129411764705885,
"alnum_prop": 0.5262784090909091,
"repo_name": "douban/code",
"id": "8128513ad632823fe6946701dde32d5a92a16f97",
"size": "5650",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/webtests/api/test_project_issue_comments.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "7956218"
},
{
"name": "HTML",
"bytes": "548630"
},
{
"name": "JavaScript",
"bytes": "7771620"
},
{
"name": "Makefile",
"bytes": "568"
},
{
"name": "Mako",
"bytes": "11668"
},
{
"name": "Python",
"bytes": "1486693"
},
{
"name": "Shell",
"bytes": "61416"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TmpAttribute'
db.create_table(u'server_tmpattribute', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('db_key', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('db_value', self.gf('src.utils.picklefield.PickledObjectField')(null=True)),
('db_lock_storage', self.gf('django.db.models.fields.TextField')(blank=True)),
('db_obj_id', self.gf('django.db.models.fields.IntegerField')(null=True)),
('db_obj_type', self.gf('django.db.models.fields.CharField')(max_length=10, null=True)),
('db_date_created', self.gf('django.db.models.fields.DateTimeField')(editable=True, auto_now_add=False)),
))
db.send_create_signal('server', ['TmpAttribute'])
def backwards(self, orm):
# Deleting model 'TmpAttribute'
db.delete_table(u'server_tmpattribute')
models = {
u'server.serverconfig': {
'Meta': {'object_name': 'ServerConfig'},
'db_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'db_value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'server.tmpattribute': {
'Meta': {'object_name': 'TmpAttribute'},
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'db_obj_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'db_obj_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'db_value': ('src.utils.picklefield.PickledObjectField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'db_date_created':('django.db.models.fields.DateTimeField',[],{'editable':'True', 'auto_now_add':'True'}),
}
}
complete_apps = ['server']
| {
"content_hash": "9d0a201d6b14dd6da12a4d9d7d7ab4ec",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 118,
"avg_line_length": 49.87234042553192,
"alnum_prop": 0.5840443686006825,
"repo_name": "google-code-export/evennia",
"id": "19b8019fd93f4a137bd33e0e0b1564a0831a493b",
"size": "2368",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/server/migrations/0003_add_tmpattr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "4450"
},
{
"name": "CSS",
"bytes": "19010"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "Gettext Catalog",
"bytes": "12141"
},
{
"name": "HTML",
"bytes": "31276"
},
{
"name": "JavaScript",
"bytes": "10492"
},
{
"name": "Python",
"bytes": "2789785"
}
],
"symlink_target": ""
} |
"""Utility functions for system identification."""
import matplotlib
import matplotlib.mlab as mlab
import numpy as np
import pylab
def EstimateTransferFunction(x, y, *args, **kwargs):
"""Estimate the transfer function from x to y using Welch's method."""
pxy, f = mlab.csd(x, y, *args, **kwargs)
pxx, _ = mlab.psd(x, *args, **kwargs)
return pxy / pxx, f
def AddTransferFunctionToPlot(axes, frequencies, transfer_function_data,
coherence, style, label, do_unwrap=False,
min_coherence=0.0):
"""Add transfer function data to the given set of axes.
Args:
axes: Array of three axes, into which to plot the transfer
function magnitude, phase, and coherence, respectively.
frequencies: Array of frequencies at which the transfer function
was measured.
transfer_function_data: Array of complex-valued transfer function
measurements.
coherence: Array of coherence measurements.
style: Style-string to be passed to Matplotlib.
label: Descriptive string to include in plot legend.
do_unwrap: Whether phase should be unwrapped.
min_coherence: Minimum coherence required to show point in plot.
"""
def Db(x):
return 20.0 * np.log10(np.abs(x))
def AngleDeg(x):
phase = np.angle(x)
if do_unwrap:
phase = np.unwrap(phase)
return np.rad2deg(phase)
i = coherence > min_coherence
axes[0].semilogx(frequencies[i], Db(transfer_function_data[i]), style)
if len(axes) > 1:
axes[1].semilogx(frequencies[i], AngleDeg(transfer_function_data[i]), style)
if len(axes) > 2:
axes[2].semilogx(frequencies, coherence, style, label=label)
def MakeAxes():
axes = [None, None, None]
axes[0] = pylab.subplot2grid((5, 1), (0, 0), rowspan=2)
axes[1] = pylab.subplot2grid((5, 1), (2, 0), rowspan=2, sharex=axes[0])
axes[2] = pylab.subplot2grid((5, 1), (4, 0), sharex=axes[0])
return axes
def SetAxes(axes):
"""Set the default axes properties for a Bode plot."""
ax = axes[0]
ax.grid(b=True, which='major', linestyle='solid', color='gray')
ax.grid(b=True, which='minor', linestyle='solid', color='gray')
ax.set_ylim([-20.0, 3.0])
ax.set_yticks(np.arange(-18.0, 3.0, 3.0))
ax.set_ylabel('Magnitude [dB]')
ax.set_title('Transfer function: commanded value to measured value.')
ax = axes[1]
ax.grid(b=True, which='major', linestyle='solid', color='gray')
ax.grid(b=True, which='minor', linestyle='solid', color='gray')
ax.set_ylim([-135.0, 45.0])
ax.set_yticks(np.arange(-135.0, 45.0, 15.0))
ax.set_ylabel('Phase [deg]')
ax = axes[2]
ax.set_ylabel('Coherence [#]')
ax.set_xlabel('Frequency [Hz]')
ax.grid(b=True, which='major', linestyle='solid', color='gray')
ax.grid(b=True, which='minor', linestyle='solid', color='gray')
for ax in axes:
ax.set_xlim([0.09, 50.0])
ax.set_xticks([0.1, 0.2, 0.3, 0.5, 0.7, 1.0, 2.0, 3.0, 5.0, 7.0, 10.0])
ax.get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter())
def PlotTransferFunction(cmd_array, val_array, labels, sample_rate, axes=None,
settling_time=0.0, shutdown_time=0.0, nfft=128,
detrend='mean', do_unwrap=False, min_coherence=0.0):
"""Compute and plot transfer functions between two arrays of time series.
Args:
cmd_array: Array of command (input) values. May be multi-dimensional.
val_array: Array of measured (output) values. Same dimensions as cmd_array.
labels: Array of strings to use as legend labels.
sample_rate: Sample rate in Hz.
axes: Array of three axes for plotting, or None to create axes.
settling_time: Duration of start of timeseries to ignore.
shutdown_time: Duration of end of timeseries to ignore.
nfft: Number of points at which to compute the Fourier transform.
detrend: Detrend method: 'none', 'mean', or 'linear'.
do_unwrap: Boolean specifying whether phase should be unwrapped.
min_coherence: Minimum coherence required to include data points in plots.
Returns:
Array of axes containing resulting plots.
Vector of frequencies at which transfer function was computed.
Transfer function data.
Coherence data.
"""
assert cmd_array.shape == val_array.shape
assert cmd_array.shape[1] == len(labels)
assert sample_rate > 0.0
if axes is None:
axes = MakeAxes()
# Common keyword arguments to Welch's method functions.
psd_kwargs = {'Fs': sample_rate,
'NFFT': nfft,
'detrend': {'none': mlab.detrend_none,
'mean': mlab.detrend_mean,
'linear': mlab.detrend_linear}[detrend],
'window': mlab.window_hanning}
for i in range(cmd_array.shape[1]):
settling_samples = np.round(sample_rate * settling_time)
shutdown_samples = np.round(sample_rate * shutdown_time)
assert settling_samples + shutdown_samples < val_array.shape[0]
selected = slice(settling_samples if settling_samples > 0 else None,
-shutdown_samples if shutdown_samples > 0 else None)
val = np.array(val_array[selected, i])
cmd = np.array(cmd_array[selected, i])
txy, f = EstimateTransferFunction(cmd, val, **psd_kwargs)
coh, _ = mlab.cohere(cmd, val, **psd_kwargs)
AddTransferFunctionToPlot(axes, f, txy, coh, '-', labels[i],
do_unwrap=do_unwrap,
min_coherence=min_coherence)
return axes, f, txy, coh
| {
"content_hash": "6b7446df9ed6ed710247ef766cef8f0b",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 80,
"avg_line_length": 38.236111111111114,
"alnum_prop": 0.6512895023610606,
"repo_name": "google/makani",
"id": "136f2c3732e482bca69b4a563331ab3843966236",
"size": "6095",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysis/sys_id/sys_id_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "119408"
},
{
"name": "C",
"bytes": "20174258"
},
{
"name": "C++",
"bytes": "30512322"
},
{
"name": "CSS",
"bytes": "8921"
},
{
"name": "Dockerfile",
"bytes": "1381"
},
{
"name": "Emacs Lisp",
"bytes": "1134"
},
{
"name": "HTML",
"bytes": "65745"
},
{
"name": "Java",
"bytes": "1558475"
},
{
"name": "JavaScript",
"bytes": "130727"
},
{
"name": "Jupyter Notebook",
"bytes": "1154728"
},
{
"name": "MATLAB",
"bytes": "1026162"
},
{
"name": "Makefile",
"bytes": "2798"
},
{
"name": "Objective-C",
"bytes": "62972"
},
{
"name": "Perl",
"bytes": "870724"
},
{
"name": "Python",
"bytes": "5552781"
},
{
"name": "RPC",
"bytes": "195736"
},
{
"name": "Roff",
"bytes": "2567875"
},
{
"name": "SWIG",
"bytes": "8663"
},
{
"name": "Shell",
"bytes": "297941"
},
{
"name": "Starlark",
"bytes": "462998"
},
{
"name": "Vim Script",
"bytes": "2281"
},
{
"name": "XC",
"bytes": "50398"
},
{
"name": "XS",
"bytes": "49289"
}
],
"symlink_target": ""
} |
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.utils.timezone import now
from geotrek.common.models import Attachment, AccessibilityAttachment
@receiver(post_save, sender=Attachment)
@receiver(post_save, sender=AccessibilityAttachment)
@receiver(post_delete, sender=Attachment)
@receiver(post_delete, sender=AccessibilityAttachment)
def update_content_object_date_update(sender, instance, *args, **kwargs):
""" after each creation / edition / deletion, increment date_updated to avoid object cache """
content_object = instance.content_object
if content_object and hasattr(content_object, 'date_update'):
content_object.date_update = now()
content_object.save(update_fields=['date_update'])
| {
"content_hash": "50376e204370d74b829647c1e0867aa4",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 98,
"avg_line_length": 46.11764705882353,
"alnum_prop": 0.7729591836734694,
"repo_name": "GeotrekCE/Geotrek-admin",
"id": "8f1cf9bf3778ea83bececa6166020b294ae3c5ab",
"size": "784",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geotrek/common/signals.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "46138"
},
{
"name": "Dockerfile",
"bytes": "1816"
},
{
"name": "HTML",
"bytes": "274524"
},
{
"name": "JavaScript",
"bytes": "231326"
},
{
"name": "Makefile",
"bytes": "1909"
},
{
"name": "PLpgSQL",
"bytes": "78024"
},
{
"name": "Python",
"bytes": "3456569"
},
{
"name": "SCSS",
"bytes": "7179"
},
{
"name": "Shell",
"bytes": "14369"
}
],
"symlink_target": ""
} |
"""
OpenStreetMaps geocoder, contributed by Alessandro Pasotti of ItOpen.
"""
from geopy.geocoders.base import (
Geocoder,
DEFAULT_FORMAT_STRING,
DEFAULT_TIMEOUT,
DEFAULT_SCHEME
)
from geopy.compat import urlencode
from geopy.location import Location
from geopy.util import logger
from geopy.exc import GeocoderQueryError
__all__ = ("Nominatim", )
class Nominatim(Geocoder):
"""
Nominatim geocoder for OpenStreetMap servers. Documentation at:
https://wiki.openstreetmap.org/wiki/Nominatim
Note that Nominatim does not support SSL.
"""
structured_query_params = {
'street',
'city',
'county',
'state',
'country',
'postalcode',
}
def __init__(
self,
format_string=DEFAULT_FORMAT_STRING,
view_box=(-180, -90, 180, 90),
country_bias=None,
timeout=DEFAULT_TIMEOUT,
proxies=None,
domain='nominatim.openstreetmap.org',
scheme=DEFAULT_SCHEME
): # pylint: disable=R0913
"""
:param string format_string: String containing '%s' where the
string to geocode should be interpolated before querying the
geocoder. For example: '%s, Mountain View, CA'. The default
is just '%s'.
:param tuple view_box: Coordinates to restrict search within.
:param string country_bias: Bias results to this country.
:param dict proxies: If specified, routes this geocoder's requests
through the specified proxy. E.g., {"https": "192.0.2.0"}. For
more information, see documentation on
:class:`urllib2.ProxyHandler`.
.. versionadded:: 0.96
:param string domain: Should be the localized Openstreetmap domain to
connect to. The default is 'nominatim.openstreetmap.org', but you
can change it to a domain of your own.
.. versionadded:: 1.8.2
:param string scheme: Use 'https' or 'http' as the API URL's scheme.
Default is https. Note that SSL connections' certificates are not
verified.
.. versionadded:: 1.8.2
"""
super(Nominatim, self).__init__(
format_string, scheme, timeout, proxies
)
self.country_bias = country_bias
self.format_string = format_string
self.view_box = view_box
self.country_bias = country_bias
self.domain = domain.strip('/')
self.api = "%s://%s/search" % (self.scheme, self.domain)
self.reverse_api = "%s://%s/reverse" % (self.scheme, self.domain)
def geocode(
self,
query,
exactly_one=True,
timeout=None,
addressdetails=False,
language=False,
geometry=None
): # pylint: disable=R0913,W0221
"""
Geocode a location query.
:param query: The address, query or structured query to geocode
you wish to geocode.
For a structured query, provide a dictionary whose keys
are one of: `street`, `city`, `county`, `state`, `country`, or
`postalcode`. For more information, see Nominatim's
documentation for "structured requests":
https://wiki.openstreetmap.org/wiki/Nominatim
:type query: dict or string
.. versionchanged:: 1.0.0
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
.. versionadded:: 0.97
:param addressdetails: If you want in *Location.raw* to include
addressdetails such as city_district, etc set it to True
:type addressdetails: bool
:param string language: Preferred language in which to return results.
Either uses standard
`RFC2616 <http://www.ietf.org/rfc/rfc2616.txt>`_
accept-language string or a simple comma-separated
list of language codes.
:type addressdetails: string
.. versionadded:: 1.0.0
:param string geometry: If present, specifies whether the geocoding
service should return the result's geometry in `wkt`, `svg`,
`kml`, or `geojson` formats. This is available via the
`raw` attribute on the returned :class:`geopy.location.Location`
object.
.. versionadded:: 1.3.0
"""
if isinstance(query, dict):
params = {
key: val
for key, val
in query.items()
if key in self.structured_query_params
}
else:
params = {'q': self.format_string % query}
params.update({
# `viewbox` apparently replaces `view_box`
'viewbox': self.view_box,
'format': 'json'
})
if self.country_bias:
params['countrycodes'] = self.country_bias
if addressdetails:
params['addressdetails'] = 1
if language:
params['accept-language'] = language
if geometry is not None:
geometry = geometry.lower()
if geometry == 'wkt':
params['polygon_text'] = 1
elif geometry == 'svg':
params['polygon_svg'] = 1
elif geometry == 'kml':
params['polygon_kml'] = 1
elif geometry == 'geojson':
params['polygon_geojson'] = 1
else:
raise GeocoderQueryError(
"Invalid geometry format. Must be one of: "
"wkt, svg, kml, geojson."
)
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout), exactly_one
)
def reverse(
self,
query,
exactly_one=True,
timeout=None,
language=False,
): # pylint: disable=W0221
"""
Returns a reverse geocoded location.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of (latitude,
longitude), or string as "%(latitude)s, %(longitude)s"
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
.. versionadded:: 0.97
:param string language: Preferred language in which to return results.
Either uses standard
`RFC2616 <http://www.ietf.org/rfc/rfc2616.txt>`_
accept-language string or a simple comma-separated
list of language codes.
:type addressdetails: string
.. versionadded:: 1.0.0
"""
try:
lat, lon = [
x.strip() for x in
self._coerce_point_to_string(query).split(',')
] # doh
except ValueError:
raise ValueError("Must be a coordinate pair or Point")
params = {
'lat': lat,
'lon': lon,
'format': 'json',
}
if language:
params['accept-language'] = language
url = "?".join((self.reverse_api, urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout), exactly_one
)
@staticmethod
def parse_code(place):
"""
Parse each resource.
"""
latitude = place.get('lat', None)
longitude = place.get('lon', None)
placename = place.get('display_name', None)
if latitude and longitude:
latitude = float(latitude)
longitude = float(longitude)
return Location(placename, (latitude, longitude), place)
def _parse_json(self, places, exactly_one):
if places is None:
return None
if not isinstance(places, list):
places = [places]
if not len(places):
return None
if exactly_one is True:
return self.parse_code(places[0])
else:
return [self.parse_code(place) for place in places]
| {
"content_hash": "cda3bc311286bc6203130458edfe3a94",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 79,
"avg_line_length": 32.989010989010985,
"alnum_prop": 0.5629580279813458,
"repo_name": "cchristelis/feti",
"id": "796262dcb5337f0b527d08741ce640ef5229d726",
"size": "9006",
"binary": false,
"copies": "10",
"ref": "refs/heads/develop",
"path": "deployment/import-scripts/geopy/geocoders/osm.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "66178"
},
{
"name": "HTML",
"bytes": "3411827"
},
{
"name": "JavaScript",
"bytes": "525391"
},
{
"name": "Makefile",
"bytes": "16513"
},
{
"name": "PLpgSQL",
"bytes": "9805987"
},
{
"name": "Python",
"bytes": "372712"
},
{
"name": "Shell",
"bytes": "2539"
}
],
"symlink_target": ""
} |
import logging
import uuid
import json
import boto
import boto.ec2
def _read_conf(config_file):
cfile = open(config_file, 'r').read()
config = {}
for line in cfile.split('\n'):
if line.startswith('#') or not line :
continue
temp = line.split('=')
config[temp[0]] = temp[1].strip('\r')
return config
def load_confs(conf_file):
configs = _read_conf(conf_file)
if not configs['AWS_CREDENTIALS_FILE']:
print "No creds file found"
exit(-1)
creds = []
with open(configs['AWS_CREDENTIALS_FILE']) as cred_file:
creds = json.load(cred_file)
configs['AWSAccessKeyId'] = creds['AccessKey']['AccessKeyId']
configs['AWSSecretKey'] = creds['AccessKey']['SecretAccessKey']
return configs
def init():
configs = load_confs("configs")
regions = boto.ec2.regions()
logging.debug("AWS region : ", configs['AWS_REGION'])
if configs['AWS_REGION'] not in [x.name for x in regions]:
print configs['AWS_REGION']
conn = boto.ec2.connect_to_region(configs['AWS_REGION'],
aws_access_key_id=configs['AWSAccessKeyId'],
aws_secret_access_key=configs['AWSSecretKey'])
if conn == None :
print "[INFO] : Region name could be incorrect"
print "[ERROR]: Failed to connect to region, exiting"
exit(-1)
return configs, conn
| {
"content_hash": "782c6a3561aebacc23f4300dfd78434c",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 90,
"avg_line_length": 27.660377358490567,
"alnum_prop": 0.5818553888130968,
"repo_name": "yadudoc/cloud_kotta",
"id": "f3df0d285fdd9e1094bdeaf20027830bace69506",
"size": "1489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "configurator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3558"
},
{
"name": "HTML",
"bytes": "1355"
},
{
"name": "JavaScript",
"bytes": "8960"
},
{
"name": "Python",
"bytes": "230490"
},
{
"name": "Shell",
"bytes": "3495"
},
{
"name": "Smarty",
"bytes": "44608"
}
],
"symlink_target": ""
} |
"""
File: RPS.py
Author: CS 1510
Description: This is an automatic rock paper scissors game played
with two comptuer players.
Deadline: ONTIME
"""
import random
random.seed()
#Function getWeapon
#Inputs: no inputs
#Output: The strings "r", "p", or "s"
def getWeapon():
"""Randomly gets a weapon."""
pick = random.randint(1,3)
if pick==1:
return "r"
elif pick==2:
return "p"
else:
return "s"
#Function isTie
#Inputs: playerOne weapon (character), playerTwo weapon (character)
#Output: True or False
def isTie(playerOne,playerTwo):
"""Returns True if player one and player two tie,
False otherwise"""
if playerOne==playerTwo:
return True
else:
return False
#Function playerOneWins
#Inputs: playerOne weapon (character), playerTwo weapon (character)
#Output: True or False
def playerOneWins(playerOne,playerTwo):
"""Returns True if player one wins, False otherwise"""
if ((playerOne=="r" and playerTwo=="s") or \
(playerOne=="s" and playerTwo=="p") or \
(playerOne=="p" and playerTwo=="r")):
return True
return False
#Function rps
#Inputs: number of games as an int
#Outputs: Returns None, but prints messages showing total ties, wins of player one,
# and wins of player 2 with acceptible input
def rps(numberOfGames):
"""Automatically plays rock,paper,scissors with two computer players"""
#Error checking
if type(numberOfGames) != int or numberOfGames <=0:
print("Rock Paper Scissors requires a positive integer")
print("representing the number of games to be played")
return None
tieScore=0
playerOneScore=0
playerTwoScore=0
#Play each game
for game in range(numberOfGames):
playerOne = getWeapon()
playerTwo = getWeapon()
if isTie(playerOne,playerTwo):
tieScore += 1
elif playerOneWins(playerOne,playerTwo):
playerOneScore += 1
else:
playerTwoScore += 1
#Gather and display results to user
results = "Total Ties="+str(tieScore)+"\n" + \
"Player One="+str(playerOneScore)+"\n" + \
"Player Two="+str(playerTwoScore)
print(results)
return None
| {
"content_hash": "c5b93ebbe2faf98f67e3c647bbafc171",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 83,
"avg_line_length": 24.641304347826086,
"alnum_prop": 0.643140714600794,
"repo_name": "sgolitsynskiy/sergey.cs.uni.edu",
"id": "d46f8c784088a8520a964fae2a6061cd5caa76b1",
"size": "2267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "www/courses/cs1510/fall2017/rps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19595"
},
{
"name": "HTML",
"bytes": "53797"
},
{
"name": "JavaScript",
"bytes": "433"
},
{
"name": "PHP",
"bytes": "224122"
},
{
"name": "Python",
"bytes": "39152"
}
],
"symlink_target": ""
} |
'''OpenGL extension APPLE.flush_buffer_range
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_APPLE_flush_buffer_range'
_DEPRECATED = False
GL_BUFFER_SERIALIZED_MODIFY_APPLE = constant.Constant( 'GL_BUFFER_SERIALIZED_MODIFY_APPLE', 0x8A12 )
GL_BUFFER_FLUSHING_UNMAP_APPLE = constant.Constant( 'GL_BUFFER_FLUSHING_UNMAP_APPLE', 0x8A13 )
glBufferParameteriAPPLE = platform.createExtensionFunction(
'glBufferParameteriAPPLE',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLint,),
doc='glBufferParameteriAPPLE(GLenum(target), GLenum(pname), GLint(param)) -> None',
argNames=('target','pname','param',),
deprecated=_DEPRECATED,
)
glFlushMappedBufferRangeAPPLE = platform.createExtensionFunction(
'glFlushMappedBufferRangeAPPLE',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLintptr,constants.GLsizeiptr,),
doc='glFlushMappedBufferRangeAPPLE(GLenum(target), GLintptr(offset), GLsizeiptr(size)) -> None',
argNames=('target','offset','size',),
deprecated=_DEPRECATED,
)
def glInitFlushBufferRangeAPPLE():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
| {
"content_hash": "843e092659c22a110c9ec970b1c24a59",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 100,
"avg_line_length": 39.638888888888886,
"alnum_prop": 0.795374912403644,
"repo_name": "Universal-Model-Converter/UMC3.0a",
"id": "ff7fed6eb499f462fd94829f5603fc577d517d8a",
"size": "1427",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "data/Python/x86/Lib/site-packages/OpenGL/raw/GL/APPLE/flush_buffer_range.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "226"
},
{
"name": "C",
"bytes": "1082640"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "3621086"
},
{
"name": "CSS",
"bytes": "6226"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "FORTRAN",
"bytes": "7795"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "Groff",
"bytes": "5943"
},
{
"name": "HTML",
"bytes": "1196266"
},
{
"name": "Java",
"bytes": "5793"
},
{
"name": "Makefile",
"bytes": "1109"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "33351557"
},
{
"name": "R",
"bytes": "1370"
},
{
"name": "Shell",
"bytes": "6931"
},
{
"name": "Tcl",
"bytes": "2084458"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
import copy
import time
import ddt
import mock
from oslo_config import cfg
from manila import exception
from manila.share import configuration
from manila.share import driver
from manila.share.drivers.netapp.dataontap.client import api as netapp_api
from manila.share.drivers.netapp.dataontap.client import client_cmode
from manila.share.drivers.netapp.dataontap.cluster_mode import data_motion
from manila.share.drivers.netapp import options as na_opts
from manila import test
from manila.tests.share.drivers.netapp.dataontap import fakes as fake
from manila.tests.share.drivers.netapp import fakes as na_fakes
CONF = cfg.CONF
@ddt.ddt
class NetAppCDOTDataMotionTestCase(test.TestCase):
def setUp(self):
super(NetAppCDOTDataMotionTestCase, self).setUp()
self.backend = 'backend1'
self.mock_cmode_client = self.mock_object(client_cmode,
"NetAppCmodeClient",
mock.Mock())
self.config = configuration.Configuration(driver.share_opts,
config_group=self.backend)
self.config.append_config_values(na_opts.netapp_cluster_opts)
self.config.append_config_values(na_opts.netapp_connection_opts)
self.config.append_config_values(na_opts.netapp_basicauth_opts)
self.config.append_config_values(na_opts.netapp_transport_opts)
self.config.append_config_values(na_opts.netapp_support_opts)
self.config.append_config_values(na_opts.netapp_provisioning_opts)
self.config.append_config_values(na_opts.netapp_replication_opts)
CONF.set_override("share_backend_name", self.backend,
group=self.backend, enforce_type=True)
CONF.set_override("netapp_transport_type", "https",
group=self.backend, enforce_type=True)
CONF.set_override("netapp_login", "fake_user",
group=self.backend, enforce_type=True)
CONF.set_override("netapp_password", "fake_password",
group=self.backend, enforce_type=True)
CONF.set_override("netapp_server_hostname", "fake_hostname",
group=self.backend, enforce_type=True)
CONF.set_override("netapp_server_port", 8866,
group=self.backend, enforce_type=True)
def test_get_client_for_backend(self):
self.mock_object(data_motion, "get_backend_configuration",
mock.Mock(return_value=self.config))
data_motion.get_client_for_backend(self.backend)
self.mock_cmode_client.assert_called_once_with(
hostname='fake_hostname', password='fake_password',
username='fake_user', transport_type='https', port=8866,
trace=mock.ANY, vserver=None)
def test_get_client_for_backend_with_vserver(self):
self.mock_object(data_motion, "get_backend_configuration",
mock.Mock(return_value=self.config))
CONF.set_override("netapp_vserver", 'fake_vserver',
group=self.backend, enforce_type=True)
data_motion.get_client_for_backend(self.backend)
self.mock_cmode_client.assert_called_once_with(
hostname='fake_hostname', password='fake_password',
username='fake_user', transport_type='https', port=8866,
trace=mock.ANY, vserver='fake_vserver')
def test_get_config_for_backend(self):
self.mock_object(data_motion, "CONF")
CONF.set_override("netapp_vserver", 'fake_vserver',
group=self.backend, enforce_type=True)
data_motion.CONF.list_all_sections.return_value = [self.backend]
config = data_motion.get_backend_configuration(self.backend)
self.assertEqual('fake_vserver', config.netapp_vserver)
def test_get_config_for_backend_different_backend_name(self):
self.mock_object(data_motion, "CONF")
CONF.set_override("netapp_vserver", 'fake_vserver',
group=self.backend, enforce_type=True)
CONF.set_override("share_backend_name", "fake_backend_name",
group=self.backend, enforce_type=True)
data_motion.CONF.list_all_sections.return_value = [self.backend]
config = data_motion.get_backend_configuration(self.backend)
self.assertEqual('fake_vserver', config.netapp_vserver)
self.assertEqual('fake_backend_name', config.share_backend_name)
@ddt.data([], ['fake_backend1', 'fake_backend2'])
def test_get_config_for_backend_not_configured(self, conf_sections):
self.mock_object(data_motion, "CONF")
data_motion.CONF.list_all_sections.return_value = conf_sections
self.assertRaises(exception.BadConfigurationException,
data_motion.get_backend_configuration,
self.backend)
class NetAppCDOTDataMotionSessionTestCase(test.TestCase):
def setUp(self):
super(NetAppCDOTDataMotionSessionTestCase, self).setUp()
self.source_backend = 'backend1'
self.dest_backend = 'backend2'
config = configuration.Configuration(driver.share_opts,
config_group=self.source_backend)
config.append_config_values(na_opts.netapp_cluster_opts)
config.append_config_values(na_opts.netapp_connection_opts)
config.append_config_values(na_opts.netapp_basicauth_opts)
config.append_config_values(na_opts.netapp_transport_opts)
config.append_config_values(na_opts.netapp_support_opts)
config.append_config_values(na_opts.netapp_provisioning_opts)
config.append_config_values(na_opts.netapp_replication_opts)
self.mock_object(data_motion, "get_backend_configuration",
mock.Mock(return_value=config))
self.mock_cmode_client = self.mock_object(client_cmode,
"NetAppCmodeClient",
mock.Mock())
self.dm_session = data_motion.DataMotionSession()
self.fake_src_share = copy.deepcopy(fake.SHARE)
self.fake_src_share_server = copy.deepcopy(fake.SHARE_SERVER)
self.source_vserver = 'source_vserver'
self.fake_src_share_server['backend_details']['vserver_name'] = (
self.source_vserver
)
self.fake_src_share['share_server'] = self.fake_src_share_server
self.fake_src_share['id'] = 'c02d497a-236c-4852-812a-0d39373e312a'
self.fake_src_vol_name = 'share_c02d497a_236c_4852_812a_0d39373e312a'
self.fake_dest_share = copy.deepcopy(fake.SHARE)
self.fake_dest_share_server = copy.deepcopy(fake.SHARE_SERVER)
self.dest_vserver = 'dest_vserver'
self.fake_dest_share_server['backend_details']['vserver_name'] = (
self.dest_vserver
)
self.fake_dest_share['share_server'] = self.fake_dest_share_server
self.fake_dest_share['id'] = '34fbaf57-745d-460f-8270-3378c2945e30'
self.fake_dest_vol_name = 'share_34fbaf57_745d_460f_8270_3378c2945e30'
self.mock_src_client = mock.Mock()
self.mock_dest_client = mock.Mock()
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[self.mock_dest_client,
self.mock_src_client]))
def test_create_snapmirror(self):
mock_dest_client = mock.Mock()
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(return_value=mock_dest_client))
self.dm_session.create_snapmirror(self.fake_src_share,
self.fake_dest_share)
mock_dest_client.create_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, schedule='hourly'
)
mock_dest_client.initialize_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
def test_delete_snapmirror(self):
mock_src_client = mock.Mock()
mock_dest_client = mock.Mock()
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[mock_dest_client,
mock_src_client]))
self.dm_session.delete_snapmirror(self.fake_src_share,
self.fake_dest_share)
mock_dest_client.abort_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, clear_checkpoint=False
)
mock_dest_client.delete_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
mock_src_client.release_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
def test_delete_snapmirror_does_not_exist(self):
"""Ensure delete succeeds when the snapmirror does not exist."""
mock_src_client = mock.Mock()
mock_dest_client = mock.Mock()
mock_dest_client.abort_snapmirror.side_effect = netapp_api.NaApiError(
code=netapp_api.EAPIERROR
)
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[mock_dest_client,
mock_src_client]))
self.dm_session.delete_snapmirror(self.fake_src_share,
self.fake_dest_share)
mock_dest_client.abort_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, clear_checkpoint=False
)
mock_dest_client.delete_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
mock_src_client.release_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
def test_delete_snapmirror_error_deleting(self):
"""Ensure delete succeeds when the snapmirror does not exist."""
mock_src_client = mock.Mock()
mock_dest_client = mock.Mock()
mock_dest_client.delete_snapmirror.side_effect = netapp_api.NaApiError(
code=netapp_api.ESOURCE_IS_DIFFERENT
)
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[mock_dest_client,
mock_src_client]))
self.dm_session.delete_snapmirror(self.fake_src_share,
self.fake_dest_share)
mock_dest_client.abort_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, clear_checkpoint=False
)
mock_dest_client.delete_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
mock_src_client.release_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
def test_delete_snapmirror_error_releasing(self):
"""Ensure delete succeeds when the snapmirror does not exist."""
mock_src_client = mock.Mock()
mock_dest_client = mock.Mock()
mock_src_client.release_snapmirror.side_effect = (
netapp_api.NaApiError(code=netapp_api.EOBJECTNOTFOUND))
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[mock_dest_client,
mock_src_client]))
self.dm_session.delete_snapmirror(self.fake_src_share,
self.fake_dest_share)
mock_dest_client.abort_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, clear_checkpoint=False
)
mock_dest_client.delete_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
mock_src_client.release_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
def test_delete_snapmirror_without_release(self):
mock_src_client = mock.Mock()
mock_dest_client = mock.Mock()
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[mock_dest_client,
mock_src_client]))
self.dm_session.delete_snapmirror(self.fake_src_share,
self.fake_dest_share,
release=False)
mock_dest_client.abort_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, clear_checkpoint=False
)
mock_dest_client.delete_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
self.assertFalse(mock_src_client.release_snapmirror.called)
def test_delete_snapmirror_source_unreachable(self):
mock_src_client = mock.Mock()
mock_dest_client = mock.Mock()
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[mock_dest_client,
Exception]))
self.dm_session.delete_snapmirror(self.fake_src_share,
self.fake_dest_share)
mock_dest_client.abort_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name, clear_checkpoint=False
)
mock_dest_client.delete_snapmirror.assert_called_once_with(
mock.ANY, self.fake_src_vol_name, mock.ANY,
self.fake_dest_vol_name
)
self.assertFalse(mock_src_client.release_snapmirror.called)
def test_break_snapmirror(self):
self.mock_object(self.dm_session, 'quiesce_then_abort')
self.dm_session.break_snapmirror(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.break_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
self.dm_session.quiesce_then_abort.assert_called_once_with(
self.fake_src_share, self.fake_dest_share)
self.mock_dest_client.mount_volume.assert_called_once_with(
self.fake_dest_vol_name)
def test_break_snapmirror_wait_for_quiesced(self):
self.mock_object(self.dm_session, 'quiesce_then_abort')
self.dm_session.break_snapmirror(self.fake_src_share,
self.fake_dest_share)
self.dm_session.quiesce_then_abort.assert_called_once_with(
self.fake_src_share, self.fake_dest_share)
self.mock_dest_client.break_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
self.mock_dest_client.mount_volume.assert_called_once_with(
self.fake_dest_vol_name)
def test_quiesce_then_abort_timeout(self):
self.mock_object(time, 'sleep')
mock_get_snapmirrors = mock.Mock(
return_value=[{'relationship-status': "transferring"}])
self.mock_object(self.mock_dest_client, 'get_snapmirrors',
mock_get_snapmirrors)
mock_backend_config = na_fakes.create_configuration()
mock_backend_config.netapp_snapmirror_quiesce_timeout = 10
self.mock_object(data_motion, 'get_backend_configuration',
mock.Mock(return_value=mock_backend_config))
self.dm_session.quiesce_then_abort(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.get_snapmirrors.assert_called_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name,
desired_attributes=['relationship-status', 'mirror-state']
)
self.assertEqual(2, self.mock_dest_client.get_snapmirrors.call_count)
self.mock_dest_client.quiesce_snapmirror.assert_called_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
self.mock_dest_client.abort_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name,
clear_checkpoint=False
)
def test_quiesce_then_abort_wait_for_quiesced(self):
self.mock_object(time, 'sleep')
self.mock_object(self.mock_dest_client, 'get_snapmirrors',
mock.Mock(side_effect=[
[{'relationship-status': "transferring"}],
[{'relationship-status': "quiesced"}]]))
self.dm_session.quiesce_then_abort(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.get_snapmirrors.assert_called_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name,
desired_attributes=['relationship-status', 'mirror-state']
)
self.assertEqual(2, self.mock_dest_client.get_snapmirrors.call_count)
self.mock_dest_client.quiesce_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
def test_resync_snapmirror(self):
self.dm_session.resync_snapmirror(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.resync_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
def test_change_snapmirror_source(self):
fake_new_src_share = copy.deepcopy(fake.SHARE)
fake_new_src_share['id'] = 'd02d497a-236c-4852-812a-0d39373e312a'
fake_new_src_share_name = 'share_d02d497a_236c_4852_812a_0d39373e312a'
mock_new_src_client = mock.Mock()
self.mock_object(self.dm_session, 'delete_snapmirror')
self.mock_object(data_motion, 'get_client_for_backend',
mock.Mock(side_effect=[self.mock_dest_client,
self.mock_src_client,
self.mock_dest_client,
mock_new_src_client]))
self.dm_session.change_snapmirror_source(
self.fake_dest_share, self.fake_src_share, fake_new_src_share,
[self.fake_dest_share, self.fake_src_share, fake_new_src_share])
self.assertFalse(self.mock_src_client.release_snapmirror.called)
self.assertEqual(4, self.dm_session.delete_snapmirror.call_count)
self.dm_session.delete_snapmirror.assert_called_with(
mock.ANY, mock.ANY, release=False
)
self.mock_dest_client.create_snapmirror.assert_called_once_with(
mock.ANY, fake_new_src_share_name, mock.ANY,
self.fake_dest_vol_name, schedule='hourly'
)
self.mock_dest_client.resync_snapmirror.assert_called_once_with(
mock.ANY, fake_new_src_share_name, mock.ANY,
self.fake_dest_vol_name
)
def test_get_snapmirrors(self):
self.mock_object(self.mock_dest_client, 'get_snapmirrors')
self.dm_session.get_snapmirrors(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.get_snapmirrors.assert_called_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name,
desired_attributes=['relationship-status',
'mirror-state',
'source-vserver',
'source-volume',
'last-transfer-end-timestamp']
)
self.assertEqual(1, self.mock_dest_client.get_snapmirrors.call_count)
def test_update_snapmirror(self):
self.mock_object(self.mock_dest_client, 'get_snapmirrors')
self.dm_session.update_snapmirror(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.update_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
def test_resume_snapmirror(self):
self.mock_object(self.mock_dest_client, 'get_snapmirrors')
self.dm_session.resume_snapmirror(self.fake_src_share,
self.fake_dest_share)
self.mock_dest_client.resume_snapmirror.assert_called_once_with(
self.source_vserver, self.fake_src_vol_name,
self.dest_vserver, self.fake_dest_vol_name)
| {
"content_hash": "c1b7bbce0a3b1f7345ef072395a165ea",
"timestamp": "",
"source": "github",
"line_count": 483,
"max_line_length": 79,
"avg_line_length": 45.34782608695652,
"alnum_prop": 0.6020636442496462,
"repo_name": "NetApp/manila",
"id": "5f679cbe6a0547c25a6d75fee6364943bb3ca0a7",
"size": "22533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/share/drivers/netapp/dataontap/cluster_mode/test_data_motion.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "8111068"
},
{
"name": "Shell",
"bytes": "91643"
}
],
"symlink_target": ""
} |
import itertools
import torch
class Optimizer(object):
""" The Optimizer class encapsulates torch.optim package and provides functionalities
for learning rate scheduling and gradient norm clipping.
Args:
optim (torch.optim.Optimizer): optimizer object, the parameters to be optimized
should be given when instantiating the object, e.g. torch.optim.SGD(params)
max_grad_norm (float, optional): value used for gradient norm clipping,
set 0 to disable (default 0)
"""
_ARG_MAX_GRAD_NORM = 'max_grad_norm'
def __init__(self, optim, max_grad_norm=0):
self.optimizer = optim
self.scheduler = None
self.max_grad_norm = max_grad_norm
def set_scheduler(self, scheduler):
""" Set the learning rate scheduler.
Args:
scheduler (torch.optim.lr_scheduler.*): object of learning rate scheduler,
e.g. torch.optim.lr_scheduler.StepLR
"""
self.scheduler = scheduler
def step(self):
""" Performs a single optimization step, including gradient norm clipping if necessary. """
if self.max_grad_norm > 0:
params = itertools.chain.from_iterable([group['params'] for group in self.optimizer.param_groups])
torch.nn.utils.clip_grad_norm(params, self.max_grad_norm)
self.optimizer.step()
def update(self, loss, epoch):
""" Update the learning rate if the criteria of the scheduler are met.
Args:
loss (float): The current loss. It could be training loss or developing loss
depending on the caller. By default the supervised trainer uses developing
loss.
epoch (int): The current epoch number.
"""
if self.scheduler is None:
pass
elif isinstance(self.scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau):
self.scheduler.step(loss)
else:
self.scheduler.step()
| {
"content_hash": "79d35d9dce3df908920bb0cc349f8fad",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 110,
"avg_line_length": 37.60377358490566,
"alnum_prop": 0.6332162568991471,
"repo_name": "taras-sereda/pytorch-seq2seq",
"id": "cc3bd47a4f06637e339b71788b2b56c4d57d1c94",
"size": "1993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "seq2seq/optim/optim.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "97975"
},
{
"name": "Shell",
"bytes": "602"
}
],
"symlink_target": ""
} |
""" test_template.py """
import json
import logging
import os
import time
import urllib
import signal
import subprocess
from collections import namedtuple
from ..common import status
# Test input. Please set each variable as it's own line, ended with \n, otherwise the value of lines
# passed into the topology will be incorrect, and the test will fail.
TEST_INPUT = ["1\n", "2\n", "3\n", "4\n", "5\n", "6\n", "7\n", "8\n",
"9\n", "10\n", "11\n", "12\n"]
# Retry variables in case the output is different from the input
RETRY_COUNT = 5
RETRY_INTERVAL = 10
# Topology shard definitions
NON_TMASTER_SHARD = 1
# Topology process name definitions
STMGR = 'stmgr'
HERON_BIN = "bin"
HERON_CORE = "heron-core"
HERON_METRICSMGR = 'metricsmgr'
HERON_SANDBOX_HOME = "."
HERON_STMGR = "heron-stmgr"
HERON_STMGR_CMD = os.path.join(HERON_SANDBOX_HOME, HERON_CORE, HERON_BIN, HERON_STMGR)
ProcessTuple = namedtuple('ProcessTuple', 'pid cmd')
class TestTemplate(object):
""" Class that encapsulates the template used for integration tests. Intended to be abstract and
subclassed for specific tests. """
def __init__(self, testname, params):
self.testname = testname
self.params = params
# pylint: disable=too-many-return-statements, too-many-branches,
# pylint: disable=too-many-statements
def run_test(self):
""" Runs the test template. Must either return TestSuccess or raise TestFailure"""
topology_submitted = False
try:
# prepare test data, start the topology and block until it's running
self._prepare_test_data()
self.submit_topology()
topology_submitted = True
_block_until_stmgr_running(self.get_expected_container_count())
self._block_until_topology_running(self.get_expected_min_instance_count())
# Execute the specific test logic and block until topology is running again
self.execute_test_case()
_block_until_stmgr_running(self.get_expected_container_count())
physical_plan_json =\
self._block_until_topology_running(self.get_expected_min_instance_count())
# trigger the test data to flow and invoke the pre_check_results hook
self._inject_test_data()
self.pre_check_results(physical_plan_json)
# finally verify the expected results
result = self._check_results()
return result
except status.TestFailure as e:
raise e
except Exception as e:
raise status.TestFailure("Exception thrown during test", e)
finally:
if topology_submitted:
self.cleanup_test()
def submit_topology(self):
_submit_topology(
self.params['cliPath'],
self.params['cluster'],
self.params['testJarPath'],
self.params['topologyClassPath'],
self.params['topologyName'],
self.params['readFile'],
self.params['outputFile']
)
# pylint: disable=no-self-use
def get_expected_container_count(self):
return 1
# pylint: disable=no-self-use
def get_expected_min_instance_count(self):
return 1
def execute_test_case(self):
pass
# pylint: disable=no-self-use,unused-argument
def pre_check_results(self, physical_plan_json):
return True
def cleanup_test(self):
try:
_kill_topology(self.params['cliPath'], self.params['cluster'], self.params['topologyName'])
except Exception as e:
logging.error("Failed to kill %s topology: %s", self.params['topologyName'], str(e))
finally:
self._delete_test_data_files()
def _delete_test_data_files(self):
_safe_delete_file(self.params['readFile'])
_safe_delete_file(self.params['outputFile'])
def _prepare_test_data(self):
self._delete_test_data_files()
# insert lines into temp file and then move to read file
try:
with open('temp.txt', 'w') as f:
for line in TEST_INPUT:
f.write(line)
except Exception as e:
logging.error("Failed to write to temp.txt file: %s", str(e))
return False
def _inject_test_data(self):
# move to read file. This guarantees contents will be put into the file the
# spout is reading from atomically
# which increases the determinism
os.rename('temp.txt', self.params['readFile'])
def _check_results(self):
""" get actual and expected result.
retry if results are not equal a predesignated amount of times
"""
expected_result = ""
actual_result = ""
retries_left = RETRY_COUNT
_sleep("before trying to check results for test %s" % self.testname, RETRY_INTERVAL)
while retries_left > 0:
retries_left -= 1
try:
with open(self.params['readFile'], 'r') as f:
expected_result = f.read()
with open(self.params['outputFile'], 'r') as g:
actual_result = g.read()
except Exception as e:
message =\
"Failed to read expected or actual results from file for test %s: %s" % self.testname
if retries_left == 0:
raise status.TestFailure(message, e)
logging.error(message, e)
# if we get expected result, no need to retry
expected_sorted = sorted(expected_result.split('\n'))
actual_sorted = sorted(actual_result.split('\n'))
if expected_sorted == actual_sorted:
break
if retries_left > 0:
expected_result = ""
actual_result = ""
expected_sorted = []
actual_sorted = []
logging.info("Failed to get expected results for test %s (attempt %s/%s), "\
+ "retrying after %s seconds",
self.testname, RETRY_COUNT - retries_left, RETRY_COUNT, RETRY_INTERVAL)
time.sleep(RETRY_INTERVAL)
# Compare the actual and expected result
if actual_sorted == expected_sorted:
success = status.TestSuccess(
"Actual result matched expected result for test %s" % self.testname)
logging.info("Actual result ---------- \n%s", actual_sorted)
logging.info("Expected result ---------- \n%s", expected_sorted)
return success
else:
failure = status.TestFailure(
"Actual result did not match expected result for test %s" % self.testname)
logging.info("Actual result ---------- \n%s", actual_sorted)
logging.info("Expected result ---------- \n%s", expected_sorted)
raise failure
# pylint: disable=no-self-use
def get_pid(self, process_name, heron_working_directory):
"""
opens .pid file of process and reads the first and only line, which should be the process pid
if fail, return -1
"""
process_pid_file = os.path.join(heron_working_directory, process_name + '.pid')
try:
with open(process_pid_file, 'r') as f:
pid = f.readline()
return pid
except Exception:
logging.error("Unable to open file %s", process_pid_file)
return -1
# pylint: disable=no-self-use
def kill_process(self, process_number):
""" kills process by running unix command kill """
if process_number < 1:
raise RuntimeError(
"Not attempting to kill process id < 1 passed to kill_process: %d" % process_number)
logging.info("Killing process number %s", process_number)
try:
os.kill(int(process_number), signal.SIGTERM)
except OSError as ex:
if "No such process" in str(ex): # killing a non-existing process condsidered as success
logging.info(str(ex))
else:
raise RuntimeError("Unable to kill process %s" % process_number)
except Exception:
raise RuntimeError("Unable to kill process %s" % process_number)
logging.info("Killed process number %s", process_number)
def kill_strmgr(self):
logging.info("Executing kill stream manager")
stmgr_pid = self.get_pid('%s-%d' % (STMGR, NON_TMASTER_SHARD), self.params['workingDirectory'])
self.kill_process(stmgr_pid)
def kill_metricsmgr(self):
logging.info("Executing kill metrics manager")
metricsmgr_pid = self.get_pid(
'%s-%d' % (HERON_METRICSMGR, NON_TMASTER_SHARD), self.params['workingDirectory'])
self.kill_process(metricsmgr_pid)
def _get_tracker_pplan(self):
url = 'http://localhost:%s/topologies/physicalplan?' % self.params['trackerPort']\
+ 'cluster=local&environ=default&topology=IntegrationTest_LocalReadWriteTopology'
logging.debug("Fetching physical plan from %s", url)
response = urllib.urlopen(url)
physical_plan_json = json.loads(response.read())
if 'result' not in physical_plan_json:
raise status.TestFailure(
"Could not find result json in physical plan request to tracker: %s" % url)
return physical_plan_json['result']
def _block_until_topology_running(self, min_instances):
retries_left = RETRY_COUNT
_sleep("before trying to fetch pplan for test %s" % self.testname, RETRY_INTERVAL)
while retries_left > 0:
retries_left -= 1
packing_plan = self._get_tracker_pplan()
if packing_plan:
instances_found = len(packing_plan['instances'])
if instances_found >= min_instances:
logging.info("Successfully fetched pplan from tracker for test %s after %s attempts.",
self.testname, RETRY_COUNT - retries_left)
return packing_plan
elif retries_left == 0:
raise status.TestFailure(
"Got pplan from tracker for test %s but the number of " % self.testname +
"instances found (%d) was less than min expected (%s)." %
(instances_found, min_instances))
if retries_left > 0:
_sleep("before trying again to fetch pplan for test %s (attempt %s/%s)" %
(self.testname, RETRY_COUNT - retries_left, RETRY_COUNT), RETRY_INTERVAL)
else:
raise status.TestFailure("Failed to get pplan from tracker for test %s after %s attempts."
% (self.testname, RETRY_COUNT))
def _block_until_stmgr_running(expected_stmgrs):
# block until ./heron-stmgr exists
process_list = _get_processes()
while not _processes_exists(process_list, HERON_STMGR_CMD, expected_stmgrs):
process_list = _get_processes()
time.sleep(1)
def _submit_topology(heron_cli_path, test_cluster, test_jar_path, topology_class_path,
topology_name, input_file, output_file):
""" Submit topology using heron-cli """
# unicode string messes up subprocess.call quotations, must change into string type
splitcmd = [
heron_cli_path, 'submit', '--verbose', '--', test_cluster, test_jar_path,
topology_class_path, topology_name, input_file, output_file, str(len(TEST_INPUT))
]
logging.info("Submitting topology: %s", splitcmd)
p = subprocess.Popen(splitcmd)
p.wait()
if p.returncode != 0:
raise status.TestFailure("Failed to submit topology %s" % topology_name)
logging.info("Submitted topology %s", topology_name)
def _kill_topology(heron_cli_path, test_cluster, topology_name):
""" Kill a topology using heron-cli """
splitcmd = [heron_cli_path, 'kill', test_cluster, topology_name]
logging.info("Killing topology: %s", ' '.join(splitcmd))
# this call can be blocking, no need for subprocess
if subprocess.call(splitcmd) != 0:
raise RuntimeError("Unable to kill the topology: %s" % topology_name)
def _get_processes():
"""
returns a list of process tuples (pid, cmd)
This only applies only for local scheduler as it uses the ps command
and assumes the topology will be running on different processes on same machine
"""
# pylint: disable=fixme
# TODO: if the submit fails before we get here (e.g., Topology already exists), this hangs
processes = subprocess.check_output(['ps', '-o', 'pid,args'])
processes = processes.split('\n')
processes = processes[1:] # remove first line, which is name of columns
process_list = []
for process in processes:
# remove empty lines
if process == '':
continue
pretuple = process.split(' ', 1)
process_list.append(ProcessTuple(pretuple[0], pretuple[1]))
return process_list
def _sleep(message, seconds):
logging.info("Sleeping for %d seconds %s", seconds, message)
time.sleep(seconds)
def _processes_exists(process_list, process_cmd, min_processes):
""" check if a process is running """
proccess_count = 0
for process in process_list:
if process_cmd in process.cmd:
proccess_count += 1
return proccess_count >= min_processes
def _safe_delete_file(file_name):
if os.path.isfile(file_name) and os.path.exists(file_name):
try:
os.remove(file_name)
except Exception as e:
logging.error("Failed to delete file: %s: %s", file_name, e)
return False
| {
"content_hash": "a7e4fe5402dec9b6fa4210ec9fea41a4",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 100,
"avg_line_length": 37.24778761061947,
"alnum_prop": 0.6600934505424884,
"repo_name": "lucperkins/heron",
"id": "19ff4d1807d77eea1a0ce0191b9b4b51545843b2",
"size": "13268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integration_test/src/python/local_test_runner/test_template.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11709"
},
{
"name": "C++",
"bytes": "1623239"
},
{
"name": "CSS",
"bytes": "109554"
},
{
"name": "HCL",
"bytes": "2115"
},
{
"name": "HTML",
"bytes": "156820"
},
{
"name": "Java",
"bytes": "4466689"
},
{
"name": "JavaScript",
"bytes": "1110981"
},
{
"name": "M4",
"bytes": "17941"
},
{
"name": "Makefile",
"bytes": "1046"
},
{
"name": "Objective-C",
"bytes": "1929"
},
{
"name": "Python",
"bytes": "1537910"
},
{
"name": "Ruby",
"bytes": "1930"
},
{
"name": "Scala",
"bytes": "72781"
},
{
"name": "Shell",
"bytes": "166876"
},
{
"name": "Smarty",
"bytes": "528"
},
{
"name": "Thrift",
"bytes": "915"
}
],
"symlink_target": ""
} |
import logging
from django.contrib.auth.models import User, Group, Permission
try:
from django.contrib.auth.models import Message
except ImportError:
Message = None
from django.db import models
from django.http import Http404
from django.shortcuts import get_object_or_404, _get_queryset
from piston.handler import BaseHandler, AnonymousBaseHandler
from piston.utils import rc
from django_roa_server.models import RemotePage, RemotePageWithManyFields, \
RemotePageWithBooleanFields, RemotePageWithCustomSlug, \
RemotePageWithOverriddenUrls, RemotePageWithRelations, \
RemotePageWithNamedRelations, RemotePageWithRelationsThrough, \
RemotePageWithCustomPrimaryKey
logger = logging.getLogger("django_roa_server")
class ROAHandler(BaseHandler):
def flatten_dict(self, dct):
return dict([ (str(k), dct.get(k)) for k in dct.keys() \
if (k, dct.get(k)) != (u'id', u'None') and (k, dct.get(k)) != (u'pk', u'None')])
@staticmethod
def _get_object(model, *args, **kwargs):
return get_object_or_404(model, pk=kwargs['pk'])
def read(self, request, *args, **kwargs):
"""
Retrieves an object or a list of objects.
"""
if not self.has_model():
return rc.NOT_IMPLEMENTED
logger.debug('Args: %s' % str(args))
logger.debug('Kwargs: %s' % str(kwargs))
if kwargs.values() != [None]:
# Returns a single object
return [self._get_object(self.model, *args, **kwargs)]
# Initialization
queryset = _get_queryset(self.model)
logger.debug('Before filters: %s' % str(queryset))
# Filtering
filters, excludes = {}, {}
for k, v in request.GET.iteritems():
if k.startswith('filter_'):
filters[k[7:]] = v
if k.startswith('exclude_'):
excludes[k[8:]] = v
queryset = queryset.filter(*filters.items()).exclude(*excludes.items())
logger.debug('Filters: %s' % str(filters))
logger.debug('Excludes: %s' % str(excludes))
logger.debug('After filters: %s' % str(queryset))
# Ordering (test custom parameters' name)
if 'order' in request.GET:
order_bys = request.GET['order'].split(',')
queryset = queryset.order_by(*order_bys)
# Slicing
limit_start = int(request.GET.get('limit_start', 0))
limit_stop = request.GET.get('limit_stop', False) and int(request.GET['limit_stop']) or None
queryset = queryset[limit_start:limit_stop]
obj_list = list(queryset)
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
logger.debug(u'Objects: %s retrieved' % [unicode(obj) for obj in obj_list])
return queryset
def create(self, request, *args, **kwargs):
"""
Creates an object given request args, returned as a list.
"""
if not self.has_model():
return rc.NOT_IMPLEMENTED
data = request.POST.copy()
values = {}
for field in self.model._meta.local_fields:
field_value = data.get(field.name, None)
if field_value not in (u'', u'None'):
# Handle FK fields
if field.rel and isinstance(field.rel, models.ManyToOneRel):
field_value = data.get(field.attname, None)
if field_value not in (u'', u'None'):
values[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
else:
values[field.attname] = None
# Handle all other fields
else:
if isinstance(field, models.fields.BooleanField):
field_value = field.to_python(field_value)
elif isinstance(field, models.fields.FloatField):
if field_value is not None:
field_value = float(field_value)
values[field.name] = field_value
obj = self.model.objects.create(**values)
response = [self.model.objects.get(pk=obj.pk)]
logger.debug(u'Object "%s" created' % unicode(obj))
return response
def update(self, request, *args, **kwargs):
"""
Modifies an object given request args, returned as a list.
"""
if not self.has_model():
return rc.NOT_IMPLEMENTED
data = request.PUT.copy()
logger.debug(u'Received: %s as PUT data' % data)
obj = self._get_object(self.model, *args, **kwargs)
for field in self.model._meta.local_fields:
field_name = field.name
# Handle FK fields
if field.rel and isinstance(field.rel, models.ManyToOneRel):
field_value = data.get(field.attname, None)
if field_value not in (u'', u'None', None):
field_value = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
else:
field_value = None
setattr(obj, field.attname, field_value)
# Handle all other fields
elif field_name in data:
field_value = data[field_name]
if field_value in (u'', u'None'):
field_value = None
if isinstance(field, models.fields.BooleanField) \
or isinstance(field, models.fields.NullBooleanField) \
or isinstance(field, models.fields.IntegerField):
field_value = field.to_python(field_value)
elif isinstance(field, models.fields.FloatField):
if field_value is not None:
field_value = float(field_value)
elif isinstance(field, models.fields.CharField):
if field_value is None:
field_value = u''
setattr(obj, field_name, field_value)
obj.save()
response = [self.model.objects.get(pk=obj.pk)]
logger.debug(u'Object "%s" modified with %s' % (
unicode(obj), unicode(data.items())))
return response
def delete(self, request, *args, **kwargs):
"""
Deletes an object.
"""
if not self.has_model():
raise NotImplementedError
try:
obj = self._get_object(self.model, *args, **kwargs)
obj.delete()
logger.debug(u'Object "%s" deleted, remains %s' % (
unicode(obj),
[unicode(obj) for obj in self.model.objects.all()]))
return rc.DELETED
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
return rc.NOT_HERE
class ROACountHandler(BaseHandler):
allowed_methods = ('GET', )
def read(self, request, *args, **kwargs):
"""
Retrieves the number of objects.
"""
if not self.has_model():
return rc.NOT_IMPLEMENTED
# Initialization
queryset = _get_queryset(self.model)
# Filtering
filters, excludes = {}, {}
for k, v in request.GET.iteritems():
if k.startswith('filter_'):
filters[k[7:]] = v
if k.startswith('exclude_'):
excludes[k[8:]] = v
queryset = queryset.filter(*filters.items()).exclude(*excludes.items())
# Ordering
if 'order_by' in request.GET:
order_bys = request.GET['order_by'].split(',')
queryset = queryset.order_by(*order_bys)
# Counting
counter = queryset.count()
logger.debug(u'Count: %s objects' % counter)
return counter
class ROAWithSlugHandler(ROAHandler):
@staticmethod
def _get_object(model, *args, **kwargs):
"""Returns an object from a slug.
Useful when the slug is a combination of many fields.
"""
pk, slug = kwargs['object_slug'].split('-', 1)
obj = get_object_or_404(model, pk=pk)
return obj
class RemotePageHandler(ROAHandler):
model = RemotePage
class RemotePageCountHandler(ROACountHandler):
model = RemotePage
class RemotePageWithManyFieldsHandler(ROAHandler):
model = RemotePageWithManyFields
class RemotePageWithManyFieldsCountHandler(ROACountHandler):
model = RemotePageWithManyFields
class RemotePageWithBooleanFieldsHandler(ROAHandler):
model = RemotePageWithBooleanFields
class RemotePageWithBooleanFieldsCountHandler(ROACountHandler):
model = RemotePageWithBooleanFields
class RemotePageWithCustomSlugHandler(ROAWithSlugHandler):
model = RemotePageWithCustomSlug
class RemotePageWithCustomSlugCountHandler(ROACountHandler):
model = RemotePageWithCustomSlug
class RemotePageWithCustomPrimaryKeyHandler(ROAHandler):
model = RemotePageWithCustomPrimaryKey
class RemotePageWithCustomPrimaryKeyCountHandler(ROACountHandler):
model = RemotePageWithCustomPrimaryKey
class RemotePageWithCustomPrimaryKeyCount2Handler(ROACountHandler):
model = RemotePageWithCustomPrimaryKey
def read(self, request, *args, **kwargs):
return 'invalid counter'
class RemotePageWithOverriddenUrlsHandler(ROAWithSlugHandler):
model = RemotePageWithOverriddenUrls
class RemotePageWithOverriddenUrlsCountHandler(ROACountHandler):
model = RemotePageWithOverriddenUrls
class RemotePageWithRelationsHandler(ROAHandler):
model = RemotePageWithRelations
class RemotePageWithRelationsCountHandler(ROACountHandler):
model = RemotePageWithRelations
class RemotePageWithRelationsThroughHandler(ROAHandler):
model = RemotePageWithRelationsThrough
class RemotePageWithRelationsThroughCountHandler(ROACountHandler):
model = RemotePageWithRelationsThrough
class RemotePageWithNamedRelationsHandler(ROAHandler):
model = RemotePageWithNamedRelations
class RemotePageWithNamedRelationsCountHandler(ROACountHandler):
model = RemotePageWithNamedRelations
class UserHandler(ROAHandler):
model = User
MessageHandler = None
if Message:
class MessageHandler(ROAHandler):
model = Message
class PermissionHandler(ROAHandler):
model = Permission
class GroupHandler(ROAHandler):
model = Group
| {
"content_hash": "74d0b93f4da302193739e5c0d02cc588",
"timestamp": "",
"source": "github",
"line_count": 313,
"max_line_length": 121,
"avg_line_length": 33.26517571884984,
"alnum_prop": 0.6208221283134845,
"repo_name": "charles-vdulac/django-roa",
"id": "e8d60edff5d403393434a56abe4591a832d6e734",
"size": "10412",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/django_roa_server/handlers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "181765"
},
{
"name": "Shell",
"bytes": "90"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import APITestCase
class TeamMembersTest(APITestCase):
def test_simple(self):
org = self.create_organization()
team = self.create_team(organization=org)
foo = self.create_user('foo@example.com')
bar = self.create_user('bar@example.com')
member = self.create_member(organization=org, user=foo, teams=[team])
self.create_member(organization=org, user=bar, teams=[])
self.login_as(user=self.user)
url = reverse('sentry-api-0-team-members', kwargs={
'organization_slug': org.slug,
'team_slug': team.slug,
})
response = self.client.get(url)
assert response.status_code == 200
assert len(response.data) == 1
assert response.data[0]['id'] == str(member.id)
| {
"content_hash": "687481e125e99f37e2e4a6cf16dd9d98",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 77,
"avg_line_length": 37.166666666666664,
"alnum_prop": 0.6401345291479821,
"repo_name": "nicholasserra/sentry",
"id": "b5bcf0c74dd07312fdfd6eaa139976fc95cd63a0",
"size": "892",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/sentry/api/endpoints/test_team_members.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "174940"
},
{
"name": "HTML",
"bytes": "199996"
},
{
"name": "JavaScript",
"bytes": "609445"
},
{
"name": "Lua",
"bytes": "21966"
},
{
"name": "Makefile",
"bytes": "4816"
},
{
"name": "Python",
"bytes": "8613631"
}
],
"symlink_target": ""
} |
import json
import os
import sys
from get_initial_pks import get_initial_pks
path_files_folder = 'flashcard-exporter/data/'
path_folder_full = os.path.join(os.path.dirname(os.path.abspath(__file__)),os.pardir, path_files_folder)
# if filename not specified use arthistoryflash.json
if len(sys.argv) < 2:
json_file = 'arthistoryflash.json'
elif len(sys.argv) == 2:
json_file = sys.argv[1]
if not json_file.endswith('.json'):
print "Invalid filename."
sys.exit()
else:
print "Invalid arguments."
sys.exit()
json_data = open(os.path.join(path_folder_full, json_file))
data = json.load(json_data)
json_data.close()
# initialize file order
global file_order
file_order = 0
# where to save the json files
path_fixtures = 'fixtures/'
# creates json file
def output_create(model_name, json_dump):
global file_order
file_order = file_order + 1
outputfile = str(file_order) + '_initial_data_' + model_name + '.json'
output= open(os.path.join(os.path.dirname(__file__),path_fixtures,outputfile),'w')
output.write(json.dumps(json_dump, output, indent=4))
print 'Successfully created '+str(outputfile)
output.close()
# initial pks
initial_collection_pk, initial_deck_pk, initial_card_pk, initial_field_pk, initial_decks_cards_pk = get_initial_pks()
# collection
collection_pk = initial_collection_pk + 1
collection_name= data['collections'][0]['collection_name']
collection_description = "A course on "+ str(collection_name)
collection_json = [{
"model":"flash.Collection",
"pk":collection_pk,
"fields":
{
"title":collection_name,
"description":collection_description
}
}]
output_create('collection', collection_json)
# decks
initial_deck_pk = 1 + initial_deck_pk
# number of decks
decks = len(data['collections'][0]['decks'])
decks_json = []
for i in range(0, decks):
deck_title = data['collections'][0]['decks'][i]['deck_name']
decks_json.append({
"model":"flash.Deck",
"pk":i+initial_deck_pk,
"fields":
{
"title":deck_title,
"collection" : collection_pk
}
})
output_create('decks', decks_json)
# map the card ids to natural numbers
card_mapping = dict()
# cards
initial_card_pk = 1 + initial_card_pk
cards = len(data['cards'])
cards_json = []
for k in range(0, cards):
card_pk = k + initial_card_pk
sort_order = k+1
card_id = data['cards'][k]['_card_id']
card_mapping[card_id] = card_pk
cards_json.append({
"model":"flash.Card",
"pk":card_pk,
"fields":
{
"sort_order": sort_order,
"collection" : collection_pk
}
})
output_create('cards', cards_json)
# fields and cards fields
fields_json = []
cards_fields_json = []
initial_pk = 1 + initial_field_pk
pk = 0
for k in range(0, cards):
num_fields = len(data['cards'][k]['fields'])
for j in range(0, num_fields):
field = data['cards'][k]['fields'][j]
label = field['label']
field_type = field['type']
show_label = field['show_label']
display = field['display']
value = field['value']
card_id = data['cards'][k]['_card_id']
card = card_mapping[card_id]
sort_order = j+1
field_pk = pk +initial_pk
pk = pk+1
fields_json.append({
"model":"flash.Field",
"pk":field_pk,
"fields":
{
"sort_order": sort_order,
"collection" : collection_pk,
"label": label,
"display": display,
"show_label":show_label,
"field_type":field_type
}
})
cards_fields_json.append({
"model":"flash.Cards_Fields",
"pk":field_pk,
"fields":
{
"sort_order": sort_order,
"value": value,
"field":field_pk,
"card":card
}
})
output_create('fields', fields_json)
output_create('cards_fields', cards_fields_json)
# decks_cards
decks = len(data['collections'][0]['decks'])
decks_cards_json = []
pk = 0
initial_pk = 1 + initial_decks_cards_pk
for i in range(0, decks):
deck_title = data['collections'][0]['decks'][i]['deck_name']
deck_cards = data['collections'][0]['decks'][i]['deck_cards']
num_deck_cards = len(deck_cards)
for l in range(0, num_deck_cards):
card_id = deck_cards[l]
card = card_mapping[card_id]
dc_pk = pk+initial_pk
pk = pk+1
deck_pk = i+initial_deck_pk
card_sort = l+1
decks_cards_json.append({
"model":"flash.Decks_cards",
"pk":dc_pk,
"fields":
{
"deck":deck_pk,
"card" :card,
"sort_order":card_sort
}
})
output_create('decks_cards', decks_cards_json) | {
"content_hash": "ddc989258576175191d89c5db9eaac4d",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 117,
"avg_line_length": 32.530386740331494,
"alnum_prop": 0.48046875,
"repo_name": "Harvard-ATG/HarvardCards",
"id": "c36239aa58e8b04a0f2eec57a2da35b25f5d62db",
"size": "6227",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "modules/flashcard-loader/initial_data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "145267"
},
{
"name": "HTML",
"bytes": "243388"
},
{
"name": "JavaScript",
"bytes": "2347716"
},
{
"name": "Makefile",
"bytes": "285"
},
{
"name": "Puppet",
"bytes": "6337"
},
{
"name": "Python",
"bytes": "215766"
},
{
"name": "Shell",
"bytes": "1727"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
from bokeh.util.api import INTERNAL, PUBLIC ; INTERNAL, PUBLIC
from bokeh.util.testing import verify_api ; verify_api
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
import pandas as pd
# Bokeh imports
from bokeh.util.testing import verify_all
# Module under test
#import bokeh.sampledata.daylight as bsd
#-----------------------------------------------------------------------------
# API Definition
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
ALL = (
'daylight_warsaw_2013',
)
#-----------------------------------------------------------------------------
# Public API
#-----------------------------------------------------------------------------
Test___all__ = pytest.mark.sampledata(verify_all("bokeh.sampledata.daylight", ALL))
@pytest.mark.sampledata
def test_daylight_warsaw_2013():
import bokeh.sampledata.daylight as bsd
assert isinstance(bsd.daylight_warsaw_2013, pd.DataFrame)
# check detail for package data
assert len(bsd.daylight_warsaw_2013) == 365
#-----------------------------------------------------------------------------
# Internal API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
| {
"content_hash": "0a1bd006b0d8b2f6a4461fb229a41927",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 83,
"avg_line_length": 33.07272727272727,
"alnum_prop": 0.35733919736118747,
"repo_name": "rs2/bokeh",
"id": "10ceb8e8c18bf49b1df3b7397c34d5d3944ca5ba",
"size": "2339",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bokeh/sampledata/tests/test_daylight.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1442"
},
{
"name": "CSS",
"bytes": "101858"
},
{
"name": "CoffeeScript",
"bytes": "1220192"
},
{
"name": "HTML",
"bytes": "48230"
},
{
"name": "JavaScript",
"bytes": "57773"
},
{
"name": "Makefile",
"bytes": "1150"
},
{
"name": "Python",
"bytes": "2648330"
},
{
"name": "Shell",
"bytes": "8519"
},
{
"name": "TypeScript",
"bytes": "236495"
}
],
"symlink_target": ""
} |
import os
import unittest
from pordego.config_loader import get_analysis_configs, load_config
PATH_TO_DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class TestConfigLoader(unittest.TestCase):
def test_include_config(self):
"""Include files and recursively included files should resolve correctly"""
analysis_config = load_config(os.path.join(PATH_TO_DATA_DIR, "test_config_include.yml"))["analysis"]
configs = get_analysis_configs(analysis_config, PATH_TO_DATA_DIR)
self.assertEqual([{'config_val': 1, 'plugin_name': 'sub_test'},
{'sub_sub_config1': 2, 'plugin_name': 'sub_sub_test'}], configs)
| {
"content_hash": "8ede8c109aaad366a029aa733369d83f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 108,
"avg_line_length": 47,
"alnum_prop": 0.6638297872340425,
"repo_name": "ttreptow/pordego",
"id": "473c2d1d59b2a9d9ff0ec89c85de2dc4b8a26bd7",
"size": "705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_config_loader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9462"
}
],
"symlink_target": ""
} |
import csv
import datetime
import json
import logging
import urlparse
from io import BytesIO
from django.conf import settings
from django.core.exceptions import ValidationError as DjangoValidationError
from django.core.urlresolvers import reverse
from django.core.validators import validate_ipv4_address, validate_ipv46_address
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from mongoengine.base import ValidationError
from crits.campaigns.forms import CampaignForm
from crits.campaigns.campaign import Campaign
from crits.config.config import CRITsConfig
from crits.core import form_consts
from crits.core.class_mapper import class_from_id
from crits.core.crits_mongoengine import EmbeddedSource, EmbeddedCampaign
from crits.core.crits_mongoengine import json_handler
from crits.core.forms import SourceForm, DownloadFileForm
from crits.core.handlers import build_jtable, csv_export
from crits.core.handlers import jtable_ajax_list, jtable_ajax_delete
from crits.core.user_tools import is_admin, user_sources
from crits.core.user_tools import is_user_subscribed, is_user_favorite
from crits.domains.domain import Domain
from crits.domains.handlers import upsert_domain, get_valid_root_domain
from crits.events.event import Event
from crits.indicators.forms import IndicatorActionsForm
from crits.indicators.forms import IndicatorActivityForm
from crits.indicators.indicator import IndicatorAction
from crits.indicators.indicator import Indicator
from crits.indicators.indicator import EmbeddedConfidence, EmbeddedImpact
from crits.ips.handlers import ip_add_update, validate_and_normalize_ip
from crits.ips.ip import IP
from crits.notifications.handlers import remove_user_from_notification
from crits.services.handlers import run_triage, get_supported_services
from crits.vocabulary.indicators import (
IndicatorTypes,
IndicatorThreatTypes,
IndicatorAttackTypes
)
from crits.vocabulary.ips import IPTypes
from crits.vocabulary.relationships import RelationshipTypes
logger = logging.getLogger(__name__)
def generate_indicator_csv(request):
"""
Generate a CSV file of the Indicator information
:param request: The request for this CSV.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
response = csv_export(request, Indicator)
return response
def generate_indicator_jtable(request, option):
"""
Generate the jtable data for rendering in the list template.
:param request: The request for this jtable.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
obj_type = Indicator
type_ = "indicator"
mapper = obj_type._meta['jtable_opts']
if option == "jtlist":
# Sets display url
details_url = mapper['details_url']
details_url_key = mapper['details_url_key']
fields = mapper['fields']
response = jtable_ajax_list(obj_type,
details_url,
details_url_key,
request,
includes=fields)
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
if option == "jtdelete":
response = {"Result": "ERROR"}
if jtable_ajax_delete(obj_type, request):
response = {"Result": "OK"}
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
jtopts = {
'title': "Indicators",
'default_sort': mapper['default_sort'],
'listurl': reverse('crits.%ss.views.%ss_listing' % (type_,
type_),
args=('jtlist',)),
'deleteurl': reverse('crits.%ss.views.%ss_listing' % (type_,
type_),
args=('jtdelete',)),
'searchurl': reverse(mapper['searchurl']),
'fields': list(mapper['jtopts_fields']),
'hidden_fields': mapper['hidden_fields'],
'linked_fields': mapper['linked_fields'],
'details_link': mapper['details_link'],
'no_sort': mapper['no_sort']
}
config = CRITsConfig.objects().first()
if not config.splunk_search_url:
del jtopts['fields'][1]
jtable = build_jtable(jtopts, request)
jtable['toolbar'] = [
{
'tooltip': "'All Indicators'",
'text': "'All'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'New Indicators'",
'text': "'New'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'In Progress Indicators'",
'text': "'In Progress'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'In Progress'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Analyzed Indicators'",
'text': "'Analyzed'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'Analyzed'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Deprecated Indicators'",
'text': "'Deprecated'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'Deprecated'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Add Indicator'",
'text': "'Add Indicator'",
'click': "function () {$('#new-indicator').click()}",
},
]
if config.splunk_search_url:
for field in jtable['fields']:
if field['fieldname'].startswith("'splunk"):
field['display'] = """ function (data) {
return '<a href="%s' + data.record.value + '"><img src="/new_images/splunk.png" /></a>';
}
""" % config.splunk_search_url
if option == "inline":
return render_to_response("jtable.html",
{'jtable': jtable,
'jtid': '%s_listing' % type_,
'button': '%ss_tab' % type_},
RequestContext(request))
else:
return render_to_response("%s_listing.html" % type_,
{'jtable': jtable,
'jtid': '%s_listing' % type_},
RequestContext(request))
def get_indicator_details(indicator_id, analyst):
"""
Generate the data to render the Indicator details template.
:param indicator_id: The ObjectId of the Indicator to get details for.
:type indicator_id: str
:param analyst: The user requesting this information.
:type analyst: str
:returns: template (str), arguments (dict)
"""
template = None
users_sources = user_sources(analyst)
indicator = Indicator.objects(id=indicator_id,
source__name__in=users_sources).first()
if not indicator:
error = ("Either this indicator does not exist or you do "
"not have permission to view it.")
template = "error.html"
args = {'error': error}
return template, args
forms = {}
forms['new_action'] = IndicatorActionsForm(initial={'analyst': analyst,
'active': "off",
'date': datetime.datetime.now()})
forms['new_activity'] = IndicatorActivityForm(initial={'analyst': analyst,
'date': datetime.datetime.now()})
forms['new_campaign'] = CampaignForm()#'date': datetime.datetime.now(),
forms['new_source'] = SourceForm(analyst, initial={'date': datetime.datetime.now()})
forms['download_form'] = DownloadFileForm(initial={"obj_type": 'Indicator',
"obj_id": indicator_id})
indicator.sanitize("%s" % analyst)
# remove pending notifications for user
remove_user_from_notification("%s" % analyst, indicator_id, 'Indicator')
# subscription
subscription = {
'type': 'Indicator',
'id': indicator_id,
'subscribed': is_user_subscribed("%s" % analyst,
'Indicator',
indicator_id),
}
# relationship
relationship = {
'type': 'Indicator',
'value': indicator_id,
}
#objects
objects = indicator.sort_objects()
#relationships
relationships = indicator.sort_relationships("%s" % analyst, meta=True)
#comments
comments = {'comments': indicator.get_comments(),
'url_key': indicator_id}
#screenshots
screenshots = indicator.get_screenshots(analyst)
# favorites
favorite = is_user_favorite("%s" % analyst, 'Indicator', indicator.id)
# services
service_list = get_supported_services('Indicator')
# analysis results
service_results = indicator.get_analysis_results()
args = {'objects': objects,
'relationships': relationships,
'comments': comments,
'relationship': relationship,
'subscription': subscription,
"indicator": indicator,
"forms": forms,
"indicator_id": indicator_id,
'screenshots': screenshots,
'service_list': service_list,
'service_results': service_results,
'favorite': favorite,
'rt_url': settings.RT_URL}
return template, args
def get_indicator_type_value_pair(field):
"""
Extracts the type/value pair from a generic field. This is generally used on
fields that can become indicators such as objects or email fields.
The type/value pairs are used in indicator relationships
since indicators are uniquely identified via their type/value pair.
This function can be used in conjunction with:
crits.indicators.handlers.does_indicator_relationship_exist
Args:
field: The input field containing a type/value pair. This field is
generally from custom dictionaries such as from Django templates.
Returns:
Returns true if the input field already has an indicator associated
with its values. Returns false otherwise.
"""
# this is an object
if field.get("name") != None and field.get("type") != None and field.get("value") != None:
name = field.get("name")
type = field.get("type")
value = field.get("value").lower().strip()
full_type = type
if type != name:
full_type = type + " - " + name
return (full_type, value)
# this is an email field
if field.get("field_type") != None and field.get("field_value") != None:
return (field.get("field_type"), field.get("field_value").lower().strip())
# otherwise the logic to extract the type/value pair from this
# specific field type is not supported
return (None, None)
def get_verified_field(data, valid_values, field=None, default=None):
"""
Validate and correct string value(s) in a dictionary key or list,
or a string by itself.
:param data: The data to be verified and corrected.
:type data: dict, list of strings, or str
:param valid_values: Key with simplified string, value with actual string
:type valid_values: dict
:param field: The dictionary key containing the data.
:type field: str
:param default: A value to use if an invalid item cannot be corrected
:type default: str
:returns: the validated/corrected value(str), list of values(list) or ''
"""
if isinstance(data, dict):
data = data.get(field, '')
if isinstance(data, list):
value_list = data
else:
value_list = [data]
for i, item in enumerate(value_list):
if isinstance(item, basestring):
item = item.lower().strip().replace(' - ', '-')
if item in valid_values:
value_list[i] = valid_values[item]
continue
if default is not None:
item = default
continue
return ''
if isinstance(data, list):
return value_list
else:
return value_list[0]
def handle_indicator_csv(csv_data, source, method, reference, ctype, username,
add_domain=False):
"""
Handle adding Indicators in CSV format (file or blob).
:param csv_data: The CSV data.
:type csv_data: str or file handle
:param source: The name of the source for these indicators.
:type source: str
:param method: The method of acquisition of this indicator.
:type method: str
:param reference: The reference to this data.
:type reference: str
:param ctype: The CSV type.
:type ctype: str ("file" or "blob")
:param username: The user adding these indicators.
:type username: str
:param add_domain: If the indicators being added are also other top-level
objects, add those too.
:type add_domain: boolean
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if ctype == "file":
cdata = csv_data.read()
else:
cdata = csv_data.encode('ascii')
data = csv.DictReader(BytesIO(cdata), skipinitialspace=True)
result = {'success': True}
result_message = ""
# Compute permitted values in CSV
valid_ratings = {
'unknown': 'unknown',
'benign': 'benign',
'low': 'low',
'medium': 'medium',
'high': 'high'}
valid_campaign_confidence = {
'low': 'low',
'medium': 'medium',
'high': 'high'}
valid_campaigns = {}
for c in Campaign.objects(active='on'):
valid_campaigns[c['name'].lower().replace(' - ', '-')] = c['name']
valid_actions = {}
for a in IndicatorAction.objects(active='on'):
valid_actions[a['name'].lower().replace(' - ', '-')] = a['name']
valid_ind_types = {}
for obj in IndicatorTypes.values(sort=True):
valid_ind_types[obj.lower().replace(' - ', '-')] = obj
# Start line-by-line import
added = 0
for processed, d in enumerate(data, 1):
ind = {}
ind['value'] = d.get('Indicator', '').lower().strip()
ind['type'] = get_verified_field(d, valid_ind_types, 'Type')
ind['threat_type'] = d.get('Threat Type', IndicatorThreatTypes.UNKNOWN)
ind['attack_type'] = d.get('Attack Type', IndicatorAttackTypes.UNKNOWN)
if not ind['value'] or not ind['type']:
# Mandatory value missing or malformed, cannot process csv row
i = ""
result['success'] = False
if not ind['value']:
i += "No valid Indicator value "
if not ind['type']:
i += "No valid Indicator type "
result_message += "Cannot process row %s: %s<br />" % (processed, i)
continue
campaign = get_verified_field(d, valid_campaigns, 'Campaign')
if campaign:
ind['campaign'] = campaign
ind['campaign_confidence'] = get_verified_field(d, valid_campaign_confidence,
'Campaign Confidence',
default='low')
actions = d.get('Action', '')
if actions:
actions = get_verified_field(actions.split(','), valid_actions)
if not actions:
result['success'] = False
result_message += "Cannot process row %s: Invalid Action<br />" % processed
continue
ind['confidence'] = get_verified_field(d, valid_ratings, 'Confidence',
default='unknown')
ind['impact'] = get_verified_field(d, valid_ratings, 'Impact',
default='unknown')
ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME] = d.get(form_consts.Common.BUCKET_LIST, '')
ind[form_consts.Common.TICKET_VARIABLE_NAME] = d.get(form_consts.Common.TICKET, '')
try:
response = handle_indicator_insert(ind, source, reference, analyst=username,
method=method, add_domain=add_domain)
except Exception, e:
result['success'] = False
result_message += "Failure processing row %s: %s<br />" % (processed, str(e))
continue
if response['success']:
if actions:
action = {'active': 'on',
'analyst': username,
'begin_date': '',
'end_date': '',
'performed_date': '',
'reason': '',
'date': datetime.datetime.now()}
for action_type in actions:
action['action_type'] = action_type
action_add(response.get('objectid'), action)
else:
result['success'] = False
result_message += "Failure processing row %s: %s<br />" % (processed, response['message'])
continue
added += 1
if processed < 1:
result['success'] = False
result_message = "Could not find any valid CSV rows to parse!"
result['message'] = "Successfully added %s Indicator(s).<br />%s" % (added, result_message)
return result
def handle_indicator_ind(value, source, ctype, threat_type, attack_type,
analyst, method='', reference='',
add_domain=False, add_relationship=False, campaign=None,
campaign_confidence=None, confidence=None, impact=None,
bucket_list=None, ticket=None, cache={}):
"""
Handle adding an individual indicator.
:param value: The indicator value.
:type value: str
:param source: The name of the source for this indicator.
:type source: str
:param ctype: The indicator type.
:type ctype: str
:param threat_type: The indicator threat type.
:type threat_type: str
:param attack_type: The indicator attack type.
:type attack_type: str
:param analyst: The user adding this indicator.
:type analyst: str
:param method: The method of acquisition of this indicator.
:type method: str
:param reference: The reference to this data.
:type reference: str
:param add_domain: If the indicators being added are also other top-level
objects, add those too.
:type add_domain: boolean
:param add_relationship: If a relationship can be made, create it.
:type add_relationship: boolean
:param campaign: Campaign to attribute to this indicator.
:type campaign: str
:param campaign_confidence: Confidence of this campaign.
:type campaign_confidence: str
:param confidence: Indicator confidence.
:type confidence: str
:param impact: Indicator impact.
:type impact: str
:param bucket_list: The bucket(s) to assign to this indicator.
:type bucket_list: str
:param ticket: Ticket to associate with this indicator.
:type ticket: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys "success" (boolean) and "message" (str)
"""
result = None
if not source:
return {"success" : False, "message" : "Missing source information."}
if threat_type is None:
threat_type = IndicatorThreatTypes.UNKNOWN
if attack_type is None:
attack_type = IndicatorAttackTypes.UNKNOWN
if value == None or value.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty value field"}
elif ctype == None or ctype.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty type field"}
else:
ind = {}
ind['type'] = ctype.strip()
ind['threat_type'] = threat_type.strip()
ind['attack_type'] = attack_type.strip()
ind['value'] = value.lower().strip()
if campaign:
ind['campaign'] = campaign
if campaign_confidence and campaign_confidence in ('low', 'medium', 'high'):
ind['campaign_confidence'] = campaign_confidence
if confidence and confidence in ('unknown', 'benign', 'low', 'medium',
'high'):
ind['confidence'] = confidence
if impact and impact in ('unknown', 'benign', 'low', 'medium', 'high'):
ind['impact'] = impact
if bucket_list:
ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME] = bucket_list
if ticket:
ind[form_consts.Common.TICKET_VARIABLE_NAME] = ticket
try:
return handle_indicator_insert(ind, source, reference, analyst,
method, add_domain, add_relationship, cache=cache)
except Exception, e:
return {'success': False, 'message': repr(e)}
return result
def handle_indicator_insert(ind, source, reference='', analyst='', method='',
add_domain=False, add_relationship=False, cache={}):
"""
Insert an individual indicator into the database.
NOTE: Setting add_domain to True will always create a relationship as well.
However, to create a relationship with an object that already exists before
this function was called, set add_relationship to True. This will assume
that the domain or IP object to create the relationship with already exists
and will avoid infinite mutual calls between, for example, add_update_ip
and this function. add domain/IP objects.
:param ind: Information about the indicator.
:type ind: dict
:param source: The source for this indicator.
:type source: list, str, :class:`crits.core.crits_mongoengine.EmbeddedSource`
:param reference: The reference to the data.
:type reference: str
:param analyst: The user adding this indicator.
:type analyst: str
:param method: Method of acquiring this indicator.
:type method: str
:param add_domain: If this indicator is also a top-level object, try to add
it.
:type add_domain: boolean
:param add_relationship: Attempt to add relationships if applicable.
:type add_relationship: boolean
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"objectid" (str) if successful,
"is_new_indicator" (boolean) if successful.
"""
if ind['type'] not in IndicatorTypes.values():
return {'success': False,
'message': "Not a valid Indicator Type"}
if ind['threat_type'] not in IndicatorThreatTypes.values():
return {'success': False,
'message': "Not a valid Indicator Threat Type"}
if ind['attack_type'] not in IndicatorAttackTypes.values():
return {'success': False,
'message': "Not a valid Indicator Attack Type"}
(ind['value'], error) = validate_indicator_value(ind['value'], ind['type'])
if error:
return {"success": False, "message": error}
is_new_indicator = False
dmain = None
ip = None
rank = {
'unknown': 0,
'benign': 1,
'low': 2,
'medium': 3,
'high': 4,
}
indicator = Indicator.objects(ind_type=ind['type'],
value=ind['value']).first()
if not indicator:
indicator = Indicator()
indicator.ind_type = ind['type']
indicator.threat_type = ind['threat_type']
indicator.attack_type = ind['attack_type']
indicator.value = ind['value']
indicator.created = datetime.datetime.now()
indicator.confidence = EmbeddedConfidence(analyst=analyst)
indicator.impact = EmbeddedImpact(analyst=analyst)
is_new_indicator = True
if 'campaign' in ind:
if isinstance(ind['campaign'], basestring) and len(ind['campaign']) > 0:
confidence = ind.get('campaign_confidence', 'low')
ind['campaign'] = EmbeddedCampaign(name=ind['campaign'],
confidence=confidence,
description="",
analyst=analyst,
date=datetime.datetime.now())
if isinstance(ind['campaign'], EmbeddedCampaign):
indicator.add_campaign(ind['campaign'])
elif isinstance(ind['campaign'], list):
for campaign in ind['campaign']:
if isinstance(campaign, EmbeddedCampaign):
indicator.add_campaign(campaign)
if 'confidence' in ind and rank.get(ind['confidence'], 0) > rank.get(indicator.confidence.rating, 0):
indicator.confidence.rating = ind['confidence']
indicator.confidence.analyst = analyst
if 'impact' in ind and rank.get(ind['impact'], 0) > rank.get(indicator.impact.rating, 0):
indicator.impact.rating = ind['impact']
indicator.impact.analyst = analyst
bucket_list = None
if form_consts.Common.BUCKET_LIST_VARIABLE_NAME in ind:
bucket_list = ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME]
if bucket_list:
indicator.add_bucket_list(bucket_list, analyst)
ticket = None
if form_consts.Common.TICKET_VARIABLE_NAME in ind:
ticket = ind[form_consts.Common.TICKET_VARIABLE_NAME]
if ticket:
indicator.add_ticket(ticket, analyst)
if isinstance(source, list):
for s in source:
indicator.add_source(source_item=s, method=method, reference=reference)
elif isinstance(source, EmbeddedSource):
indicator.add_source(source_item=source, method=method, reference=reference)
elif isinstance(source, basestring):
s = EmbeddedSource()
s.name = source
instance = EmbeddedSource.SourceInstance()
instance.reference = reference
instance.method = method
instance.analyst = analyst
instance.date = datetime.datetime.now()
s.instances = [instance]
indicator.add_source(s)
if add_domain or add_relationship:
ind_type = indicator.ind_type
ind_value = indicator.value
url_contains_ip = False
if ind_type in (IndicatorTypes.DOMAIN,
IndicatorTypes.URI):
if ind_type == IndicatorTypes.URI:
domain_or_ip = urlparse.urlparse(ind_value).hostname
try:
validate_ipv46_address(domain_or_ip)
url_contains_ip = True
except DjangoValidationError:
pass
else:
domain_or_ip = ind_value
if not url_contains_ip:
success = None
if add_domain:
success = upsert_domain(domain_or_ip, indicator.source, '%s' % analyst,
None, bucket_list=bucket_list, cache=cache)
if not success['success']:
return {'success': False, 'message': success['message']}
if not success or not 'object' in success:
dmain = Domain.objects(domain=domain_or_ip).first()
else:
dmain = success['object']
if ind_type in IPTypes.values() or url_contains_ip:
if url_contains_ip:
ind_value = domain_or_ip
try:
validate_ipv4_address(domain_or_ip)
ind_type = IndicatorTypes.IPV4_ADDRESS
except DjangoValidationError:
ind_type = IndicatorTypes.IPV6_ADDRESS
success = None
if add_domain:
success = ip_add_update(ind_value,
ind_type,
source=indicator.source,
campaign=indicator.campaign,
analyst=analyst,
bucket_list=bucket_list,
ticket=ticket,
indicator_reference=reference,
cache=cache)
if not success['success']:
return {'success': False, 'message': success['message']}
if not success or not 'object' in success:
ip = IP.objects(ip=indicator.value).first()
else:
ip = success['object']
indicator.save(username=analyst)
if dmain:
dmain.add_relationship(indicator,
RelationshipTypes.RELATED_TO,
analyst="%s" % analyst,
get_rels=False)
dmain.save(username=analyst)
if ip:
ip.add_relationship(indicator,
RelationshipTypes.RELATED_TO,
analyst="%s" % analyst,
get_rels=False)
ip.save(username=analyst)
# run indicator triage
if is_new_indicator:
indicator.reload()
run_triage(indicator, analyst)
return {'success': True, 'objectid': str(indicator.id),
'is_new_indicator': is_new_indicator, 'object': indicator}
def does_indicator_relationship_exist(field, indicator_relationships):
"""
Checks if the input field's values already have an indicator
by cross checking against the list of indicator relationships. The input
field already has an associated indicator created if the input field's
"type" and "value" pairs exist -- since indicators are uniquely identified
by their type/value pair.
Args:
field: The generic input field containing a type/value pair. This is
checked against a list of indicators relationships to see if a
corresponding indicator already exists. This field is generally
from custom dictionaries such as from Django templates.
indicator_relationships: The list of indicator relationships
to cross reference the input field against.
Returns:
Returns true if the input field already has an indicator associated
with its values. Returns false otherwise.
"""
type, value = get_indicator_type_value_pair(field)
if indicator_relationships != None:
if type != None and value != None:
for indicator_relationship in indicator_relationships:
if indicator_relationship == None:
logger.error('Indicator relationship is not valid: ' +
str(indicator_relationship))
continue
if type == indicator_relationship.get('ind_type') and value == indicator_relationship.get('ind_value'):
return True
else:
logger.error('Could not extract type/value pair of input field' +
'type: ' + str(type) +
'value: ' + (value.encode("utf-8") if value else str(value)) +
'indicator_relationships: ' + str(indicator_relationships))
return False
def ci_search(itype, confidence, impact, actions):
"""
Find indicators based on type, confidence, impact, and/or actions.
:param itype: The indicator type to search for.
:type itype: str
:param confidence: The confidence level(s) to search for.
:type confidence: str
:param impact: The impact level(s) to search for.
:type impact: str
:param actions: The action(s) to search for.
:type actions: str
:returns: :class:`crits.core.crits_mongoengine.CritsQuerySet`
"""
query = {}
if confidence:
item_list = confidence.replace(' ', '').split(',')
query["confidence.rating"] = {"$in": item_list}
if impact:
item_list = impact.replace(' ', '').split(',')
query["impact.rating"] = {"$in": item_list}
if actions:
item_list = actions.split(',')
query["actions.action_type"] = {"$in": item_list}
query["type"] = "%s" % itype.strip()
result_filter = ('type', 'value', 'confidence', 'impact', 'actions')
results = Indicator.objects(__raw__=query).only(*result_filter)
return results
def set_indicator_type(indicator_id, itype, username):
"""
Set the Indicator type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param itype: The new indicator type.
:type itype: str
:param username: The user updating the indicator.
:type username: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=indicator_id).first()
value = indicator.value
ind_check = Indicator.objects(ind_type=itype, value=value).first()
if ind_check:
# we found a dupe
return {'success': False}
else:
try:
indicator.ind_type = itype
indicator.save(username=username)
return {'success': True}
except ValidationError:
return {'success': False}
def set_indicator_threat_type(id_, threat_type, user):
"""
Set the Indicator threat type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param threat_type: The new indicator threat type.
:type threat_type: str
:param user: The user updating the indicator.
:type user: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=id_).first()
value = indicator.value
ind_check = Indicator.objects(threat_type=threat_type, value=value).first()
if ind_check:
# we found a dupe
return {'success': False}
else:
try:
indicator.threat_type = threat_type
indicator.save(username=user)
return {'success': True}
except ValidationError:
return {'success': False}
def set_indicator_attack_type(id_, attack_type, user):
"""
Set the Indicator attack type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param attack_type: The new indicator attack type.
:type attack_type: str
:param user: The user updating the indicator.
:type user: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=id_).first()
value = indicator.value
ind_check = Indicator.objects(attack_type=attack_type, value=value).first()
if ind_check:
# we found a dupe
return {'success': False}
else:
try:
indicator.attack_type = attack_type
indicator.save(username=user)
return {'success': True}
except ValidationError:
return {'success': False}
def add_new_indicator_action(action, analyst):
"""
Add a new indicator action to CRITs.
:param action: The action to add to CRITs.
:type action: str
:param analyst: The user adding this action.
:returns: True, False
"""
action = action.strip()
try:
idb_action = IndicatorAction.objects(name=action).first()
if idb_action:
return False
idb_action = IndicatorAction()
idb_action.name = action
idb_action.save(username=analyst)
return True
except ValidationError:
return False
def indicator_remove(_id, username):
"""
Remove an Indicator from CRITs.
:param _id: The ObjectId of the indicator to remove.
:type _id: str
:param username: The user removing the indicator.
:type username: str
:returns: dict with keys "success" (boolean) and "message" (list) if failed.
"""
if is_admin(username):
indicator = Indicator.objects(id=_id).first()
if indicator:
indicator.delete(username=username)
return {'success': True}
else:
return {'success': False, 'message': ['Cannot find Indicator']}
else:
return {'success': False, 'message': ['Must be an admin to delete']}
def action_add(indicator_id, action):
"""
Add an action to an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param action: The information about the action.
:type action: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(action['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.add_action(action['action_type'],
action['active'],
action['analyst'],
action['begin_date'],
action['end_date'],
action['performed_date'],
action['reason'],
action['date'])
indicator.save(username=action['analyst'])
return {'success': True, 'object': action}
except ValidationError, e:
return {'success': False, 'message': e}
def action_update(indicator_id, action):
"""
Update an action for an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param action: The information about the action.
:type action: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(action['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.edit_action(action['action_type'],
action['active'],
action['analyst'],
action['begin_date'],
action['end_date'],
action['performed_date'],
action['reason'],
action['date'])
indicator.save(username=action['analyst'])
return {'success': True, 'object': action}
except ValidationError, e:
return {'success': False, 'message': e}
def action_remove(indicator_id, date, analyst):
"""
Remove an action from an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param date: The date of the action to remove.
:type date: datetime.datetime
:param analyst: The user removing the action.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=indicator_id).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.delete_action(date)
indicator.save(username=analyst)
return {'success': True}
except ValidationError, e:
return {'success': False, 'message': e}
def activity_add(indicator_id, activity):
"""
Add activity to an Indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param activity: The activity information.
:type activity: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(activity['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.add_activity(activity['analyst'],
activity['start_date'],
activity['end_date'],
activity['description'],
activity['date'])
indicator.save(username=activity['analyst'])
return {'success': True, 'object': activity,
'id': str(indicator.id)}
except ValidationError, e:
return {'success': False, 'message': e,
'id': str(indicator.id)}
def activity_update(indicator_id, activity):
"""
Update activity for an Indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param activity: The activity information.
:type activity: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(activity['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.edit_activity(activity['analyst'],
activity['start_date'],
activity['end_date'],
activity['description'],
activity['date'])
indicator.save(username=activity['analyst'])
return {'success': True, 'object': activity}
except ValidationError, e:
return {'success': False, 'message': e}
def activity_remove(indicator_id, date, analyst):
"""
Remove activity from an Indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param date: The date of the activity to remove.
:type date: datetime.datetime
:param analyst: The user removing this activity.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=indicator_id).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.delete_activity(date)
indicator.save(username=analyst)
return {'success': True}
except ValidationError, e:
return {'success': False, 'message': e}
def ci_update(indicator_id, ci_type, value, analyst):
"""
Update confidence or impact for an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param ci_type: What we are updating.
:type ci_type: str ("confidence" or "impact")
:param value: The value to set.
:type value: str ("unknown", "benign", "low", "medium", "high")
:param analyst: The user updating this indicator.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=indicator_id).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
if ci_type == "confidence" or ci_type == "impact":
try:
if ci_type == "confidence":
indicator.set_confidence(analyst, value)
else:
indicator.set_impact(analyst, value)
indicator.save(username=analyst)
return {'success': True}
except ValidationError, e:
return {'success': False, "message": e}
else:
return {'success': False, 'message': 'Invalid CI type'}
def create_indicator_and_ip(type_, id_, ip, analyst):
"""
Add indicators for an IP address.
:param type_: The CRITs top-level object we are getting this IP from.
:type type_: class which inherits from
:class:`crits.core.crits_mongoengine.CritsBaseAttributes`
:param id_: The ObjectId of the top-level object to search for.
:type id_: str
:param ip: The IP address to generate an indicator out of.
:type ip: str
:param analyst: The user adding this indicator.
:type analyst: str
:returns: dict with keys:
"success" (boolean),
"message" (str),
"value" (str)
"""
obj_class = class_from_id(type_, id_)
if obj_class:
ip_class = IP.objects(ip=ip).first()
ind_type = IPTypes.IPV4_ADDRESS
ind_class = Indicator.objects(ind_type=ind_type, value=ip).first()
# setup IP
if ip_class:
ip_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
else:
ip_class = IP()
ip_class.ip = ip
ip_class.source = obj_class.source
ip_class.save(username=analyst)
ip_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
# setup Indicator
message = ""
if ind_class:
message = ind_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
ind_class.add_relationship(ip_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
else:
ind_class = Indicator()
ind_class.source = obj_class.source
ind_class.ind_type = ind_type
ind_class.value = ip
ind_class.save(username=analyst)
message = ind_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
ind_class.add_relationship(ip_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
# save
try:
obj_class.save(username=analyst)
ip_class.save(username=analyst)
ind_class.save(username=analyst)
if message['success']:
rels = obj_class.sort_relationships("%s" % analyst, meta=True)
return {'success': True, 'message': rels, 'value': obj_class.id}
else:
return {'success': False, 'message': message['message']}
except Exception, e:
return {'success': False, 'message': e}
else:
return {'success': False,
'message': "Could not find %s to add relationships" % type_}
def create_indicator_from_tlo(tlo_type, tlo, analyst, source_name=None,
tlo_id=None, ind_type=None, value=None,
update_existing=True, add_domain=True):
"""
Create an indicator from a Top-Level Object (TLO).
:param tlo_type: The CRITs type of the parent TLO.
:type tlo_type: str
:param tlo: A CRITs parent TLO class object
:type tlo: class - some CRITs TLO
:param analyst: The user creating this indicator.
:type analyst: str
:param source_name: The source name for the new source instance that
records this indicator being added.
:type source_name: str
:param tlo_id: The ObjectId of the parent TLO.
:type tlo_id: str
:param ind_type: The indicator type, if TLO is not Domain or IP.
:type ind_type: str
:param value: The value of the indicator, if TLO is not Domain or IP.
:type value: str
:param update_existing: If Indicator already exists, update it
:type update_existing: boolean
:param add_domain: If new indicator contains a domain/ip, add a
matching Domain or IP TLO
:type add_domain: boolean
:returns: dict with keys:
"success" (boolean),
"message" (str),
"value" (str),
"indicator" :class:`crits.indicators.indicator.Indicator`
"""
if not tlo:
tlo = class_from_id(tlo_type, tlo_id)
if not tlo:
return {'success': False,
'message': "Could not find %s" % tlo_type}
source = tlo.source
campaign = tlo.campaign
bucket_list = tlo.bucket_list
tickets = tlo.tickets
# If value and ind_type provided, use them instead of defaults
if tlo_type == "Domain":
value = value or tlo.domain
ind_type = ind_type or IndicatorTypes.DOMAIN
elif tlo_type == "IP":
value = value or tlo.ip
ind_type = ind_type or tlo.ip_type
elif tlo_type == "Indicator":
value = value or tlo.value
ind_type = ind_type or tlo.ind_type
if not value or not ind_type: # if not provided & no default
return {'success': False,
'message': "Indicator value & type must be provided"
"for TLO of type %s" % tlo_type}
#check if indicator already exists
if Indicator.objects(ind_type=ind_type,
value=value).first() and not update_existing:
return {'success': False, 'message': "Indicator already exists"}
result = handle_indicator_ind(value, source,
ctype=ind_type,
threat_type=IndicatorThreatTypes.UNKNOWN,
attack_type=IndicatorAttackTypes.UNKNOWN,
analyst=analyst,
add_domain=add_domain,
add_relationship=True,
campaign=campaign,
bucket_list=bucket_list,
ticket=tickets)
if result['success']:
ind = Indicator.objects(id=result['objectid']).first()
if ind:
if source_name:
# add source to show when indicator was created/updated
ind.add_source(source=source_name,
method= 'Indicator created/updated ' \
'from %s with ID %s' % (tlo_type, tlo.id),
date=datetime.datetime.now(),
analyst = analyst)
tlo.add_relationship(ind,
RelationshipTypes.RELATED_TO,
analyst=analyst)
tlo.save(username=analyst)
for rel in tlo.relationships:
if rel.rel_type == "Event":
# Get event object to pass in.
rel_item = Event.objects(id=rel.object_id).first()
if rel_item:
ind.add_relationship(rel_item,
RelationshipTypes.RELATED_TO,
analyst=analyst)
ind.save(username=analyst)
tlo.reload()
rels = tlo.sort_relationships("%s" % analyst, meta=True)
return {'success': True, 'message': rels,
'value': tlo.id, 'indicator': ind}
else:
return {'success': False, 'message': "Failed to create Indicator"}
else:
return result
def validate_indicator_value(value, ind_type):
"""
Check that a given value is valid for a particular Indicator type.
:param value: The value to be validated
:type value: str
:param ind_type: The indicator type to validate against
:type ind_type: str
:returns: tuple: (Valid value, Error message)
"""
value = value.strip()
domain = ""
# URL
if ind_type == IndicatorTypes.URI:
if "://" not in value.split('.')[0]:
return ("", "URI must contain protocol "
"prefix (e.g. http://, https://, ftp://) ")
domain_or_ip = urlparse.urlparse(value).hostname
try:
validate_ipv46_address(domain_or_ip)
return (value, "")
except DjangoValidationError:
domain = domain_or_ip
# Email address
if ind_type in (IndicatorTypes.EMAIL_ADDRESS,
IndicatorTypes.EMAIL_FROM,
IndicatorTypes.EMAIL_REPLY_TO,
IndicatorTypes.EMAIL_SENDER):
if '@' not in value:
return ("", "Email address must contain an '@'")
domain_or_ip = value.split('@')[-1]
if domain_or_ip[0] == '[' and domain_or_ip[-1] == ']':
try:
validate_ipv46_address(domain_or_ip[1:-1])
return (value, "")
except DjangoValidationError:
return ("", "Email address does not contain a valid IP")
else:
domain = domain_or_ip
# IPs
if ind_type in IPTypes.values():
(ip_address, error) = validate_and_normalize_ip(value, ind_type)
if error:
return ("", error)
else:
return (ip_address, "")
# Domains
if ind_type in (IndicatorTypes.DOMAIN,
IndicatorTypes.URI) or domain:
(root, domain, error) = get_valid_root_domain(domain or value)
if error:
return ("", error)
else:
return (value, "")
return (value, "")
| {
"content_hash": "d8c90eedb2ce5ce01090e961067c27d8",
"timestamp": "",
"source": "github",
"line_count": 1412,
"max_line_length": 122,
"avg_line_length": 39.126770538243626,
"alnum_prop": 0.5680127427733632,
"repo_name": "kaoscoach/crits",
"id": "a91d5dcad42930d0f53eb1a0032854c9b2b0f770",
"size": "55247",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "crits/indicators/handlers.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "8694"
},
{
"name": "CSS",
"bytes": "390510"
},
{
"name": "HTML",
"bytes": "456114"
},
{
"name": "JavaScript",
"bytes": "3476844"
},
{
"name": "Python",
"bytes": "1868172"
},
{
"name": "Shell",
"bytes": "18052"
}
],
"symlink_target": ""
} |
__author__ = 'wei'
from ..protobuf import *
from . import igt_base_template
class LinkTemplate(igt_base_template.BaseTemplate):
def __init__(self):
igt_base_template.BaseTemplate.__init__(self)
self.text = ""
self.title = ""
self.logo = ""
self.logoURL = ""
self.url = ""
self.isRing = True
self.isVibrate = True
self.isClearable = True
self.pushType = "NotifyMsg"
def getActionChains(self):
#set actionchain
actionChain1 = gt_req_pb2.ActionChain()
actionChain1.actionId = 1
actionChain1.type = gt_req_pb2.ActionChain.Goto
actionChain1.next = 10000
#start up app
actionChain2 = gt_req_pb2.ActionChain()
actionChain2.actionId = 10000
actionChain2.type = gt_req_pb2.ActionChain.notification
actionChain2.title = self.title
actionChain2.text = self.text
actionChain2.logo = self.logo
actionChain2.logoURL = self.logoURL
actionChain2.ring = self.isRing
actionChain2.clearable = self.isClearable
actionChain2.buzz = self.isVibrate
actionChain2.next = 10010
#goto
actionChain3 = gt_req_pb2.ActionChain()
actionChain3.actionId = 10010
actionChain3.type = gt_req_pb2.ActionChain.Goto
actionChain3.next = 10030
#start web
actionChain4 = gt_req_pb2.ActionChain()
actionChain4.actionId = 10030
actionChain4.type = gt_req_pb2.ActionChain.startweb
actionChain4.url = self.url
actionChain4.next = 100
#end
actionChain5 = gt_req_pb2.ActionChain()
actionChain5.actionId = 100
actionChain5.type = gt_req_pb2.ActionChain.eoa
actionChains = [actionChain1, actionChain2, actionChain3, actionChain4, actionChain5]
return actionChains
| {
"content_hash": "081ed378a4712a9676e38eaf087ab459",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 93,
"avg_line_length": 32.38333333333333,
"alnum_prop": 0.6103962943901183,
"repo_name": "alphapigger/igetui",
"id": "542bff455a1f8d7db343e5d7e5786e173524772b",
"size": "1943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "igetui/template/igt_link_template.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "82155"
},
{
"name": "Protocol Buffer",
"bytes": "14537"
},
{
"name": "Python",
"bytes": "593618"
}
],
"symlink_target": ""
} |
__author__ = 'mikael.brandin@devolver.se'
# coding=utf-8
import os
import subprocess
import io
import sys
from urllib.parse import urlparse
from client import exceptions
from client import protocol
class ClientException(exceptions.ArmoryException):
def __init__(self, msg):
super(ClientException, self).__init__(msg);
class BaseClient:
def push(self, package_name, pack_file, hash):
pass;
def pull(self, package_name, package_version, dst):
pass;
def pull_branch(self, branch_name, home_directory):
pass;
class IOClient(BaseClient):
def __init__(self, uri):
self.uri = urlparse(uri)
def connect(self, uri, feature):
return None
def cleanup(self):
pass
def push(self, package_name, pack_file, hash):
if not os.path.exists(pack_file):
raise ClientException("No such file: " + pack_file)
shell = self.connect(self.uri, 'push')
if shell is None:
raise ClientException("unable to create shell")
print("push {package} {hash}".format(package=package_name, hash=hash))
shell.write_msg("push {package} {hash}".format(package=package_name, hash=hash))
shell.write_msg("ok")
msg = shell.read_msg()
if msg.msg == 'accept':
print("accept: " + package_name)
shell.write_file(pack_file, hash)
shell.wait()
return None
elif msg.msg == 'reject':
print("reject: " + package_name)
return None
elif msg.msg == 'error':
print("error: " + package_name + " " + str(msg.params))
return None
else:
print("unknown response: " + str(msg))
return None
self.cleanup()
pass
def pull_branch(self, branch_name, home_directory):
shell = self.connect(self.uri, 'pull')
if shell is None:
raise ClientException("unable to create shell");
shell.write_msg("pull /branch/" + branch_name);
shell.write_msg("ok")
msg = shell.read_msg()
if msg.msg == 'accept':
shell.read_file(home_directory + branch_name + '.armory', 0)
shell.wait()
return True
elif msg.msg == 'reject':
print("reject: " + branch_name)
return False
elif msg.msg == 'error':
print("error: " + str(msg))
return False
else:
print("unknown response: " + str(msg))
return False
return True
def pull(self, type, package_name, version, dst):
shell = self.connect(self.uri, 'pull')
if shell is None:
raise ClientException("unable to create shell")
shell.write_msg("pull /"+type+"/" + package_name + '/' + version)
shell.write_msg("ok")
msg = shell.read_msg()
if msg.msg == 'accept':
shell.read_file(dst, 0)
shell.wait()
return None
elif msg.msg == 'reject':
print("reject: " + package_name)
return None
elif msg.msg == 'error':
print("error: " + str(msg))
return None
else:
print("unknown response: " + str(msg))
return None
pass;
class Shell:
def __init__(self, cmd, cwd):
self.process = subprocess.Popen(cmd, shell=True, cwd=cwd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def read_msg(self):
return protocol.read_msg(self.process.stdout);
def read_line(self):
return self.process.stdout.readline();
def write_msg(self, msg):
return protocol.write_msg(self.process.stdin, msg)
def write_file(self, file, hash):
return protocol.write_file(self.process.stdin, file, hash)
def read_file(self, file, hash):
with io.open(self.process.stdout.fileno(), mode='rb', closefd=False) as stream:
return protocol.read_file(stream, file, hash)
def wait(self):
self.process.wait() | {
"content_hash": "1ba9e4d1f2a8e37eeb97da3e13ca937b",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 123,
"avg_line_length": 27.554054054054053,
"alnum_prop": 0.5669445806768023,
"repo_name": "mikaelbrandin/armory",
"id": "eb875e425cbab809bedad3bdd30278663965e673",
"size": "4078",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/base_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "90815"
}
],
"symlink_target": ""
} |
r"""Implementation of paradigms, mirroring and extending functionality in Thrax.
This follows the treatment of morphology in chapter 2 of:
Roark, B. and Sproat, R. 2007. Computational Approaches to Morphology and
Syntax. Oxford University Press.
Following that work, all affixation is treated via composition, since that is
the most general single regular operation that can handle morphological
processes. For example, suffixing an "s" to a word, with a morpheme boundary "+"
is modeled as the following "shape":
\Sigma^* ({\epsilon} \times {+s})
i.e. an insertion of "+s" after anything, where \Sigma^* represents a string
of any symbol except the boundary symbol. If one wanted to also constrain the
stem to be of a particular shape (e.g., English -er comparatives, where a stem
must be at most two syllables long) or even to modify the stem to be a
particular shape (as in Yowlumne) then one can replace \Sigma^* with a more
restrictive acceptor or transducer.
A paradigm consists of:
- A Category.
- A list of tuples of a FeatureVector from the Category and a "shape" as above.
- A specification of what FeatureVector corresponds to the "lemma". For example
in Latin nouns it would be "[case=nom][num=sg]". In Latin verbs it is the
first person singular indicative active.
- A list of stem strings or acceptors.
- An optional name.
- An optional set of rewrite rules to apply to forms in the paradigm.
- An optional boundary symbol (defaults to "+").
- An optional parent paradigm from which this paradigm inherits.
Stems are composed with all of the slot entries, producing an FST that maps
from stems to all of their possible forms, with boundary symbols and features.
Thus, for a Latin first declension noun:
aqu -> aqu+a[case=nom][num=sg]
aqu -> aqu+ae[case=gen][num=sg]
aqu -> aqu+am[case=acc][num=sg]
etc. Call this "stem_to_forms".
The Paradigm then defines a few common operations:
1. Analyzer:
This is the output projection of the stem_to_forms, composed with an FST that
deletes boundary labels and features, and then inverted. Thus this maps from,
e.g.:
aquam -> aqu+am[case=acc][num=sg]
2. Tagger:
Same as the Analyzer, but omitting the decomposition of the word into parts:
aquam -> aquam[case=acc][num=sg]
3. Lemmatizer:
The lemmatizer is constructed by taking stem_to_forms, moving the feature labels
to the input side, inverting (so it maps from fully-formed word to stem +
features), then inflecting the stem as the lemma. Thus:
aquam -> aqua[case=acc][num=sg]
4. Inflector.
This is the inverse of the lemmatizer:
aqua[case=acc][num=sg] -> aquam
"""
import array
from typing import Iterator, List, Optional, Sequence, Tuple
import pynini
from pynini.lib import byte
from pynini.lib import features
from pynini.lib import pynutil
from pynini.lib import rewrite
class Error(Exception):
"""Errors specific to this module."""
pass
# Helper functions.
def build_stem_ids(min_id: int, max_id: int) -> pynini.Fst:
"""Builds the set of stem IDs.
These are strings of the form __n__, for n in the range [min_id, max_id).
Args:
min_id: minimum stem ID (inclusive).
max_id: maximum stem ID (exclusive).
Returns:
FST representing the stem IDs in [min_id, max_id) as strings.
"""
return pynini.union(
*["__{}__".format(i) for i in range(min_id, max_id)]).optimize()
def make_byte_star_except_boundary(
boundary: pynini.FstLike = "+") -> pynini.Fst:
"""Helper function to make sigma-star over bytes, minus the boundary symbol.
Args:
boundary: a string, the boundary symbol to use.
Returns:
An acceptor representing sigma-star over bytes, minus the boundary symbol.
"""
return pynini.difference(byte.BYTE, boundary).closure().optimize()
def prefix(affix: pynini.FstLike, stem_form: pynini.FstLike) -> pynini.Fst:
"""Helper function that preffixes `affix` to stem_form.
Args:
affix: an acceptor representing the affix, typically ending with the
boundary symbol.
stem_form: a transducer representing the stem. If there are no further
restrictions on the form of the base, this could simply be the output of
make_byte_star_except_boundary(). However the affix may impose a
restriction on what it may attach to, or even impose a change of shape.
Returns:
The concatenation of the insertion of `affix` and `stem_form`.
"""
return pynutil.insert(affix) + stem_form
def suffix(affix: pynini.FstLike, stem_form: pynini.FstLike) -> pynini.Fst:
"""Helper function that suffixes `affix` to stem_form.
Args:
affix: an acceptor representing the affix, typically beginning with the
boundary symbol.
stem_form: a transducer representing the stem. If there are no further
restrictions on the form of the base, this could simply be the output of
make_byte_star_except_boundary(). However the affix may impose a
restriction on what it may attach to, or even impose a change of shape.
Returns:
The concatenation of `stem_form` and the insertion of `affix`.
"""
return stem_form + pynutil.insert(affix)
# Type aliases used below.
Analysis = Tuple[str, features.FeatureVector]
ParadigmSlot = Tuple[pynini.Fst, features.FeatureVector]
class Paradigm:
"""Implementation of morphological paradigms.
A paradigm consists of a Category, a sequence of slot (transducer,
FeatureVector) pairs, a feature vector indicating which slot is the lemma,
a sequence of stem acceptors or strings, an optional name,
an optional set of rewrite rules to be applied in order,
an optional boundary symbol and an optional parent paradigm from which to
inherit.
"""
def __init__(self,
category: features.Category,
slots: Sequence[ParadigmSlot],
lemma_feature_vector: features.FeatureVector,
stems: Sequence[pynini.FstLike],
rules: Optional[Sequence[pynini.Fst]] = None,
name: Optional[str] = None,
boundary: str = "+",
parent_paradigm: Optional["Paradigm"] = None):
"""Paradigm initializer.
Args:
category: a Category object.
slots: a sequence of ParadigmSlots, or (fst, FeatureVector) pairs (the
latter as a convenient shorthand).
lemma_feature_vector: FeatureVector associated with the lemma. This must
be one of the slots provided, otherwise the construction will fail.
stems: a sequence of strings and/or acceptors representing stems
belonging to this paradigm.
rules: an optional sequence of FSTs, rules to be applied to produce the
surface forms. If rules is None, then the rules are inherited from the
parent category, if any.
name: an optional string, the name of this paradigm.
boundary: a string representing the boundary symbol.
parent_paradigm: an optional Paradigm object from which to inherit.
Raises:
Error: Lemma form not found in slots.
"""
self._category = category
self._slots = list(slots)
# Verify that the feature vectors are from the right category:
for (_, feature_vector) in self._slots:
if feature_vector.category != self._category:
raise Error(f"Paradigm category {self._category} != "
f"feature vector category {feature_vector.category}")
self._name = name
self._boundary = boundary
# Rule to delete the boundary symbol.
self._boundary_deleter = self._unconditioned_rewrite(
pynutil.delete(self.boundary))
# Rule to delete the boundary label and feature labels.
self._deleter = pynini.compose(
self._unconditioned_rewrite(
pynutil.delete(self.category._feature_labels)),
self._boundary_deleter).optimize()
# Rule to translate all boundary labels into human-readable strings.
self._feature_label_rewriter = self._unconditioned_rewrite(
self._category.feature_mapper)
# And one that maps the other way
self._feature_label_encoder = self._unconditioned_rewrite(
pynini.invert(self._category.feature_mapper))
# Inherit from the parent paradigm.
self._rules = None if rules is None else list(rules)
self._parent_paradigm = parent_paradigm
self._inherit()
# The union of all the shapes of the affixes in the slots, concatenated with
# the insertion of the feature vectors.
self._shape = pynini.union(*(
(shape + pynutil.insert(feature_vector.acceptor))
for (shape, feature_vector) in self._slots))
# Derives the lemma form from the slot's shape, then delete all the features
# and boundary symbol, so that it maps from the stem to the lemma without
# the features and boundary symbol.
self._lemma = None
for (shape, feature_vector) in self._slots:
if feature_vector == lemma_feature_vector:
self._lemma = shape + pynutil.insert(lemma_feature_vector.acceptor)
if self._lemma is None:
raise Error("Lemma form not found in slots")
if self._rules is not None:
for rule in self._rules:
self._lemma @= rule
self._lemma @= self._deleter
self._lemma.optimize()
self._stems = list(stems)
# Stems to form transducer.
self._stems_to_forms = pynini.union(*self._stems)
self._stems_to_forms.optimize()
self._stems_to_forms @= self._shape
if self._rules:
for rule in self._rules:
self._stems_to_forms @= rule
self._stems_to_forms.optimize()
# The analyzer, mapping from a fully formed word (e.g. "aquārum") to an
# analysis (e.g. "aqu+ārum[case=gen][num=pl]")
self._analyzer = None
# The tagger, mapping from a fully formed word (e.g. "aquārum") to the
# same string with morphosyntactic tags (e.g. "aquārum[case=gen][num=pl]").
self._tagger = None
# The lemmatizer, mapping from a fully formed word (e.g. "aquārum") to the
# lemma with morphosyntactic tags (e.g. "aqua[case=gen][num=pl]").
self._lemmatizer = None
# Inversion of the lemmatizer.
self._inflector = None
def _inherit(self) -> None:
"""Inherit from parent paradigm.
Checks that the categories and boundaries match, then sets the rules for
this paradigm from the parent if they are None. Slots in the list provided
to this paradigm then override any in the parent that have matching feature
vectors.
Raises:
Error: Paradigm category/boundary != parent paradigm category/boundary.
"""
if self._parent_paradigm is None:
return
if self._category != self._parent_paradigm.category:
raise Error(f"Paradigm category {self._category} != "
f"parent paradigm category {self._parent_paradigm.category}")
if self._boundary != self._parent_paradigm.boundary:
raise Error(f"Paradigm boundary {self._boundary} != "
f"parent paradigm boundary {self._parent_paradigm.boundary}")
if self._rules is None:
self._rules = self._parent_paradigm.rules
# Adds parent slots if their feature vector isn't in the current slots'
# feature vector.
self._slots.extend(
parent_slot for parent_slot in self._parent_paradigm.slots
if parent_slot[1] not in (slot[1] for slot in self._slots))
def _flip_lemmatizer_feature_labels(self) -> None:
"""Destructively flips lemmatizer's feature labels from input to output."""
feature_labels = set()
for s in self.category.feature_labels.states():
aiter = self.category.feature_labels.arcs(s)
while not aiter.done():
arc = aiter.value()
if arc.ilabel:
feature_labels.add(arc.ilabel)
aiter.next()
self.lemmatizer.set_input_symbols(self.lemmatizer.output_symbols())
for s in self.lemmatizer.states():
maiter = self.lemmatizer.mutable_arcs(s)
while not maiter.done():
arc = maiter.value()
if arc.olabel in feature_labels:
# This assertion should always be true by construction.
assert arc.ilabel == 0, (
f"ilabel = "
f"{self.lemmatizer.input_symbols().find(arc.ilabel)},"
f" olabel = "
f"{self.lemmatizer.output_symbols().find(arc.olabel)}")
arc = pynini.Arc(arc.olabel, arc.ilabel, arc.weight, arc.nextstate)
maiter.set_value(arc)
maiter.next()
def _unconditioned_rewrite(self, tau: pynini.Fst) -> pynini.Fst:
"""Helper function for context-independent cdrewrites.
Args:
tau: Change FST, i.e. phi x psi.
Returns:
cdrewrite(tau, "", "", self._category.sigma_star)
"""
return pynini.cdrewrite(tau, "", "", self._category.sigma_star).optimize()
# Helper for parsing strings that contain stems and features.
def _parse_lattice(self, lattice: pynini.Fst) -> Iterator[Analysis]:
"""Given a lattice, returns all string and feature vectors.
Args:
lattice: a rewrite lattice FST.
Yields:
Pairs of (string, feature vector).
"""
gensym = pynini.generated_symbols()
piter = lattice.paths()
while not piter.done():
# Mutable byte array for the stem, analysis string, or wordform.
wordbuf = array.array("B") # Mutable byte array.
features_and_values = []
for label in piter.olabels():
if not label:
continue
if label < 256:
wordbuf.append(label)
else:
features_and_values.append(gensym.find(label))
word = wordbuf.tobytes().decode("utf8")
vector = features.FeatureVector(self.category, *features_and_values)
yield (word, vector)
piter.next()
# The analyzer, inflector, lemmatizer, and tagger are all created lazily.
@property
def analyzer(self) -> Optional[pynini.Fst]:
if self.stems_to_forms is None:
return None
if self._analyzer is not None:
return self._analyzer
self._make_analyzer()
return self._analyzer
def _make_analyzer(self) -> None:
"""Helper function for constructing analyzer."""
self._analyzer = pynini.project(self._stems_to_forms, "output")
self._analyzer @= self._deleter
self._analyzer.invert().optimize()
def analyze(self, word: pynini.FstLike) -> List[Analysis]:
"""Returns list of possible analyses.
Args:
word: inflected form, the input to the analyzer.
Returns:
A list of possible analyses.
Raises:
rewrite.Error: composition failure.
"""
return list(
self._parse_lattice(rewrite.rewrite_lattice(word, self.analyzer)))
@property
def tagger(self) -> Optional[pynini.Fst]:
if self.analyzer is None:
return None
if self._tagger is not None:
return self._tagger
self._make_tagger()
return self._tagger
def _make_tagger(self) -> None:
"""Helper function for constructing tagger."""
self._tagger = self._analyzer @ self._boundary_deleter
self._tagger.optimize()
def tag(self, word: pynini.FstLike) -> List[Analysis]:
"""Returns list of possible taggings.
Args:
word: inflected form, the input to the tagger.
Returns:
A list of possible analysis.
Raises:
rewrite.Error: composition failure.
"""
return list(self._parse_lattice(rewrite.rewrite_lattice(word, self.tagger)))
@property
def lemmatizer(self) -> Optional[pynini.Fst]:
if self.stems_to_forms is None:
return None
if self._lemmatizer is not None:
return self._lemmatizer
self._make_lemmatizer()
return self._lemmatizer
def _make_lemmatizer(self) -> None:
"""Helper function for constructing lemmatizer."""
# Breaking this down into steps for ease of readability.
# Flips to map from form+features to stem.
self._lemmatizer = self._stems_to_forms.copy()
# Moves the feature labels to the input side, then invert. By construction
# the feature labels are always concatenated to the end.
self._flip_lemmatizer_feature_labels()
# Deletes boundary on the analysis side.
self._lemmatizer @= self._boundary_deleter
self._lemmatizer.invert()
# Maps from the stem side to the lemma. The self._feature_labels is needed
# to match the features that are now glommed onto the right-hand side.
self._lemmatizer @= self._lemma + self.category.feature_labels
self._lemmatizer.optimize()
def lemmatize(self, word: pynini.FstLike) -> List[Analysis]:
"""Returns list of possible lemmatizations.
Args:
word: inflected form, the input to the lemmatizer.
Returns:
A list of possible lemmatizations.
Raises:
rewrite.Error: composition failure.
"""
return list(
self._parse_lattice(rewrite.rewrite_lattice(word, self.lemmatizer)))
@property
def inflector(self) -> Optional[pynini.Fst]:
if self.lemmatizer is None:
return None
if self._inflector is not None:
return self._inflector
self._inflector = pynini.invert(self._lemmatizer)
return self._inflector
def inflect(self, lemma: pynini.FstLike,
featvec: features.FeatureVector) -> List[str]:
"""Returns list of possible inflections.
Args:
lemma: lemma string or acceptor.
featvec: FeatureVector.
Returns:
A list of possible inflections.
Raises:
rewrite.Error: composition failure.
"""
return rewrite.rewrites(lemma + featvec.acceptor, self.inflector)
@property
def category(self) -> features.Category:
return self._category
@property
def slots(self) -> List[ParadigmSlot]:
return self._slots
@property
def name(self) -> str:
return self._name
@property
def boundary(self) -> str:
return self._boundary
@property
def stems(self) -> List[pynini.FstLike]:
return self._stems
@property
def stems_to_forms(self) -> pynini.Fst:
return self._stems_to_forms
@property
def feature_label_rewriter(self) -> pynini.Fst:
return self._feature_label_rewriter
@property
def feature_label_encoder(self) -> pynini.Fst:
return self._feature_label_encoder
@property
def rules(self) -> Optional[List[pynini.Fst]]:
return self._rules
| {
"content_hash": "8065f8c37c9522da11b9fcadfea7c5f6",
"timestamp": "",
"source": "github",
"line_count": 525,
"max_line_length": 80,
"avg_line_length": 34.55238095238095,
"alnum_prop": 0.6807607497243661,
"repo_name": "kylebgorman/pynini",
"id": "d66b433caefaed18b57052060e077aac92b630c0",
"size": "18822",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pynini/lib/paradigms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "173515"
},
{
"name": "Cython",
"bytes": "285320"
},
{
"name": "Python",
"bytes": "287089"
},
{
"name": "Shell",
"bytes": "1897"
},
{
"name": "Starlark",
"bytes": "31558"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_shalera.iff"
result.attribute_template_id = 9
result.stfName("npc_name","hutt_base_female")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "d1271040ac5e40affa4d75b77f1a7b3c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 53,
"avg_line_length": 22.153846153846153,
"alnum_prop": 0.6875,
"repo_name": "anhstudios/swganh",
"id": "bcc81afd457a242526124ae7bfb8894eac1e2cc2",
"size": "433",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/mobile/shared_shalera.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from nested_inline.admin import NestedStackedInline, NestedModelAdmin
from example.models import *
class LevelThreeInline(NestedStackedInline):
model = LevelThree
extra = 1
fk_name = 'level'
class LevelTwoInline(NestedStackedInline):
model = LevelTwo
extra = 1
fk_name = 'level'
inlines = [LevelThreeInline]
class LevelOneInline(NestedStackedInline):
model = LevelOne
extra = 1
fk_name = 'level'
inlines = [LevelTwoInline]
class TopLevelAdmin(NestedModelAdmin):
model = TopLevel
inlines = [LevelOneInline]
admin.site.register(TopLevel, TopLevelAdmin)
| {
"content_hash": "c533896e8289d70882fb4d0449723b7d",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 69,
"avg_line_length": 21.5,
"alnum_prop": 0.7302325581395349,
"repo_name": "Dmitri-Sintsov/django-nested-inline",
"id": "0b3326a83fe93299c8b3a1caf2030ebc4d8345ad",
"size": "645",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "example/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6011"
},
{
"name": "HTML",
"bytes": "13017"
},
{
"name": "JavaScript",
"bytes": "22846"
},
{
"name": "Python",
"bytes": "19838"
}
],
"symlink_target": ""
} |
from core import perf_benchmark
from benchmarks import silk_flags
from measurements import power
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PowerAndroidAcceptance(perf_benchmark.PerfBenchmark):
"""Android power acceptance test."""
test = power.Power
page_set = page_sets.AndroidAcceptancePageSet
def SetExtraBrowserOptions(self, options):
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.android_acceptance'
@benchmark.Enabled('android')
class PowerTypical10Mobile(perf_benchmark.PerfBenchmark):
"""Android typical 10 mobile power test."""
test = power.Power
page_set = page_sets.Typical10MobilePageSet
def SetExtraBrowserOptions(self, options):
options.full_performance_mode = False
@classmethod
def ShouldDisable(cls, possible_browser): # http://crbug.com/597656
return (possible_browser.browser_type == 'reference' and
possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X')
@classmethod
def Name(cls):
return 'power.typical_10_mobile'
@benchmark.Enabled('android')
@benchmark.Disabled('android-webview') # http://crbug.com/622300
class PowerToughAdCases(perf_benchmark.PerfBenchmark):
"""Android power test with tough ad pages."""
test = power.Power
page_set = page_sets.ToughAdCasesPageSet
def SetExtraBrowserOptions(self, options):
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.tough_ad_cases'
@classmethod
def ShouldDisable(cls, possible_browser):
# http://crbug.com/563968, http://crbug.com/593973
return (cls.IsSvelte(possible_browser) or
(possible_browser.browser_type == 'reference' and
possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X'))
@benchmark.Enabled('android')
@benchmark.Disabled('all')
class PowerTypical10MobileReload(perf_benchmark.PerfBenchmark):
"""Android typical 10 mobile power reload test."""
test = power.LoadPower
page_set = page_sets.Typical10MobileReloadPageSet
def SetExtraBrowserOptions(self, options):
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.typical_10_mobile_reload'
@benchmark.Enabled('android')
class PowerGpuRasterizationTypical10Mobile(perf_benchmark.PerfBenchmark):
"""Measures power on key mobile sites with GPU rasterization."""
tag = 'gpu_rasterization'
test = power.Power
page_set = page_sets.Typical10MobilePageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.gpu_rasterization.typical_10_mobile'
@classmethod
def ShouldDisable(cls, possible_browser):
# http://crbug.com/563968, http://crbug.com/593973
return (cls.IsSvelte(possible_browser) or
(possible_browser.browser_type == 'reference' and
possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X'))
@benchmark.Enabled('mac')
class PowerTop10(perf_benchmark.PerfBenchmark):
"""Top 10 quiescent power test."""
test = power.QuiescentPower
page_set = page_sets.Top10PageSet
def SetExtraBrowserOptions(self, options):
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.top_10'
@benchmark.Enabled('mac')
class PowerGpuRasterizationTop10(perf_benchmark.PerfBenchmark):
"""Top 10 quiescent power test with GPU rasterization enabled."""
tag = 'gpu_rasterization'
test = power.QuiescentPower
page_set = page_sets.Top10PageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.gpu_rasterization.top_10'
@benchmark.Enabled('mac')
class PowerTop25(perf_benchmark.PerfBenchmark):
"""Top 25 quiescent power test."""
test = power.QuiescentPower
page_set = page_sets.Top25PageSet
def SetExtraBrowserOptions(self, options):
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.top_25'
def CreateStorySet(self, _):
# Exclude techcrunch.com. It is not suitable for this benchmark because it
# does not consistently become quiescent within 60 seconds.
stories = self.page_set()
found = next((x for x in stories if 'techcrunch.com' in x.url), None)
if found:
stories.RemoveStory(found)
return stories
@benchmark.Enabled('mac')
class PowerGpuRasterizationTop25(perf_benchmark.PerfBenchmark):
"""Top 25 quiescent power test with GPU rasterization enabled."""
tag = 'gpu_rasterization'
test = power.QuiescentPower
page_set = page_sets.Top25PageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
options.full_performance_mode = False
@classmethod
def Name(cls):
return 'power.gpu_rasterization.top_25'
def CreateStorySet(self, _):
# Exclude techcrunch.com. It is not suitable for this benchmark because it
# does not consistently become quiescent within 60 seconds.
stories = self.page_set()
found = next((x for x in stories if 'techcrunch.com' in x.url), None)
if found:
stories.RemoveStory(found)
return stories
@benchmark.Enabled('mac')
class PowerScrollingTrivialPage(perf_benchmark.PerfBenchmark):
"""Measure power consumption for some very simple pages."""
test = power.QuiescentPower
page_set = page_sets.MacGpuTrivialPagesStorySet
@classmethod
def Name(cls):
return 'power.trivial_pages'
| {
"content_hash": "2efbf53f9b782a14ec5a875a99b58895",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 78,
"avg_line_length": 29.925531914893618,
"alnum_prop": 0.7415570565232847,
"repo_name": "danakj/chromium",
"id": "e1632e57de12a34225cf8015df80bd82143365c7",
"size": "5789",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/perf/benchmarks/power.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
lantz_core.limits
~~~~~~~~~~~~~~~~~
Module defining the limits utilities used for int and float.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from types import MethodType
from math import modf
from functools import update_wrapper
from .unit import UNIT_SUPPORT, get_unit_registry
if UNIT_SUPPORT:
from pint.quantity import _Quantity
class AbstractLimitsValidator(object):
""" Base class for all limits validators.
Attributes
----------
minimum :
Minimal allowed value or None.
maximum :
Maximal allowed value or None.
step :
Allowed step between values or None.
Methods
-------
validate :
Validate a given value against the range.
"""
__slots__ = ('minimum', 'maximum', 'step', 'validate')
class IntLimitsValidator(AbstractLimitsValidator):
"""Limits used to validate a the value of an integer.
Parameters
----------
min : int, optional
Minimal allowed value
max : int, optional
Maximum allowed value
step : int, optional
Smallest allowed step
Methods
-------
validate :
Validate a given value. If a unit is declared both floats and Quantity
can be passed.
"""
__slots__ = ()
def __init__(self, min=None, max=None, step=None):
mess = 'The {} of an IntRange must be an integer not {}.'
if min is None and max is None:
raise ValueError('An IntLimitsValidator must have a min or max')
if min is not None and not isinstance(min, int):
raise TypeError(mess.format('min', type(min)))
if max is not None and not isinstance(max, int):
raise TypeError(mess.format('max', type(max)))
if step and not isinstance(step, int):
raise TypeError(mess.format('step', type(step)))
self.minimum = min
self.maximum = max
self.step = step
if min is not None:
if max is not None:
if step:
self.validate = self._validate_range_and_step
else:
self.validate = self._validate_range
else:
if step:
self.validate = self._validate_larger_and_step
else:
self.validate = self._validate_larger
else:
if step:
self.validate = self._validate_smaller_and_step
else:
self.validate = self._validate_smaller
def _validate_smaller(self, value):
"""Check if the value is smaller than the maximum.
"""
return value <= self.maximum
def _validate_smaller_and_step(self, value):
"""Check if the value is smaller than the maximum and respect the step.
"""
return value <= self.maximum and (value-self.maximum) % self.step == 0
def _validate_larger(self, value):
"""Check if the value is larger than the minimum.
"""
return value >= self.minimum
def _validate_larger_and_step(self, value):
"""Check if the value is larger than the minimum and respect the step.
"""
return value >= self.minimum and (value-self.minimum) % self.step == 0
def _validate_range(self, value):
"""Check if the value is in the range.
"""
return self.minimum <= value <= self.maximum
def _validate_range_and_step(self, value):
"""Check if the value is in the range and respect the step.
"""
return self.minimum <= value <= self.maximum\
and (value-self.minimum) % self.step == 0
class FloatLimitsValidator(AbstractLimitsValidator):
"""Range used to validate a the value of an integer.
Parameters
----------
min : float, optional
Minimal allowed value
max : float, optional
Maximum allowed value
step : float, optional
Smallest allowed step
unit : str, optional
Unit in which the bounds and step are expressed.
Attributes
----------
unit : Unit or None
Unit used when validating.
Methods
-------
validate :
Validate a given value. If a unit is declared both floats and Quantity
can be passed. When passing a float the unit can be given as a keyword.
This is used when validating Float features.
"""
__slots__ = ('unit')
def __init__(self, min=None, max=None, step=None, unit=None):
mess = 'The {} of an FloatLimitsValidator must be a float not {}.'
if min is None and max is None:
raise ValueError('An FloatLimitsValidator must have a min or max')
if min is not None and not isinstance(min, (int, float)):
raise TypeError(mess.format('min', type(min)))
if max is not None and not isinstance(max, (int, float)):
raise TypeError(mess.format('max', type(max)))
if step and not isinstance(step, (int, float)):
raise TypeError(mess.format('step', type(step)))
self.minimum = float(min) if min is not None else None
self.maximum = float(max) if max is not None else None
self.step = float(step) if step is not None else None
if UNIT_SUPPORT and unit:
ureg = get_unit_registry()
self.unit = ureg.parse_expression(unit)
wrap = self._unit_conversion
else:
wrap = lambda x: x
if min is not None:
if max is not None:
if step:
self.validate = wrap(self._validate_range_and_step)
else:
self.validate = wrap(self._validate_range)
else:
if step:
self.validate = wrap(self._validate_larger_and_step)
else:
self.validate = wrap(self._validate_larger)
else:
if step:
self.validate = wrap(self._validate_smaller_and_step)
else:
self.validate = wrap(self._validate_smaller)
def _unit_conversion(self, cmp_func):
"""Decorator handling unit conversion to the unit.
"""
if isinstance(cmp_func, MethodType):
cmp_func = cmp_func.__func__
def wrapper(self, value, unit=None):
if unit and unit != self.unit:
value *= (1*unit).to(self.unit).magnitude
elif isinstance(value, _Quantity):
value = value.to(self.unit).magnitude
return cmp_func(self, value)
update_wrapper(wrapper, cmp_func)
wrapper.__doc__ += '\nAutomatic handling of unit conversions'
return MethodType(wrapper, self)
def _validate_smaller(self, value, unit=None):
"""Check if the value is smaller than the maximum.
"""
return value <= self.maximum
def _validate_smaller_and_step(self, value, unit=None):
"""Check if the value is smaller than the maximum and respect the step.
"""
ratio = round(abs((value-self.maximum)/self.step), 9)
return value <= self.maximum and modf(ratio)[0] < 1e-9
def _validate_larger(self, value, unit=None):
"""Check if the value is larger than the minimum.
"""
return value >= self.minimum
def _validate_larger_and_step(self, value, unit=None):
"""Check if the value is larger than the minimum and respect the step.
"""
ratio = round(abs((value-self.minimum)/self.step), 9)
return value >= self.minimum and modf(ratio)[0] < 1e-9
def _validate_range(self, value, unit=None):
"""Check if the value is in the range.
"""
return self.minimum <= value <= self.maximum
def _validate_range_and_step(self, value, unit=None):
"""Check if the value is in the range and respect the step.
"""
ratio = round(abs((value-self.minimum)/self.step), 9)
return self.minimum <= value <= self.maximum\
and abs(modf(ratio)[0]) < 1e-9
| {
"content_hash": "df69397615629a77c1dd84bdf3022af3",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 79,
"avg_line_length": 31.49236641221374,
"alnum_prop": 0.5801721003514726,
"repo_name": "LabPy/lantz_core",
"id": "6e56c05d84fbb114c60fa81adb1e058f671807f6",
"size": "8275",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lantz_core/limits.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "215696"
}
],
"symlink_target": ""
} |
import sys
import json
import string
import random
POPULAR_NGRAM_COUNT = 10000
def preprocess_frequencies(frequencies, order):
'''Compile simple mapping from N-grams to frequencies into data structures to help compute
the probability of state transitions to complete an N-gram
Arguments:
frequencies -- mapping from N-gram to frequency recorded in the training text
order -- The N in each N-gram (i.e. number of items)
Returns:
sequencer -- Set of mappings from each N-1 sequence to the freqency of possible
items completing it
popular_ngrams -- list of most common N-grams
all_words -- list of all unique words that occur in the training text
'''
sequencer = {}
ngrams_sorted_by_freq = [
k for k in sorted(frequencies, key=frequencies.get, reverse=True)
]
popular_ngrams = ngrams_sorted_by_freq[:POPULAR_NGRAM_COUNT]
all_words = set()
for ngram in frequencies:
#Separate the N-1 lead of each N-gram from its item completions
freq = frequencies[ngram]
words = ngram.split()
lead = words[:-1]
final = words[-1]
#A rule of Python means we have to conver a list to a tuple before using it as
#A mapping key
sequencer.setdefault(tuple(lead), {})
sequencer[tuple(lead)][final] = freq
for word in words:
all_words.add(word)
#We used the set data structure to keep the words unique
#But the way we need to use it, we must convert to list
all_words = list(all_words)
return sequencer, popular_ngrams, all_words
def generate_letters(sequencer, popular_ngrams, all_words, length, order):
'''Generate text based on probabilities derived from statistics for initializing
and continuing sequences of letters
Arguments:
sequencer -- mapping from each leading sequence to frequencies of the next letter
popular_ngrams -- list of the highest frequency N-Grams
length -- approximate number of characters to generate before ending the program
order -- The N in each N-gram (i.e. number of items)
Returns:
nothing
'''
#The lead is the initial part of the N-Gram to be completed, of length N-1
#containing the last N-1 items produced
lead = []
#Keep track of how many items have been generated
generated_count = 0
while generated_count < length:
#This condition will be true until the initial lead N-gram is constructed
#It will also be true if we get to a dead end where there are no stats
#For the next item from the current lead
if tuple(lead) not in sequencer:
#Pick an N-gram at random from the most popular
reset = random.choice(popular_ngrams)
#Split it up into a list to server as a lead
reset = reset.split()
#Drop the final item so that lead is N-1
lead = reset[:-1]
for item in lead:
#Note we now print a space between items, which are words
print(item, end=' ', flush=True)
generated_count += len(lead)
else:
freq = sequencer[tuple(lead)]
weights = [ freq.get(w, 0) for w in all_words ]
chosen = random.choices(all_words, weights=weights)[0]
print(chosen, end=' ', flush=True)
#Clip the first item from the lead and tack on the new item
lead = lead[1:] + [chosen]
generated_count += 1
return
if __name__ == '__main__':
#File with N-gram frequencies is the first argument
raw_freq_fp = open(sys.argv[1])
length = int(sys.argv[2])
raw_freqs = json.load(raw_freq_fp)
#Figure out the N-gram order. Just pull the first N-gram and check how many words in it
order = len(next(iter(raw_freqs)).split())
sequencer, popular_ngrams, all_words = preprocess_frequencies(raw_freqs, order)
generate_letters(sequencer, popular_ngrams, all_words, length, order)
| {
"content_hash": "4bb2af539ab5fb96c5afae8d969b38f9",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 94,
"avg_line_length": 39.59803921568628,
"alnum_prop": 0.6432285219113641,
"repo_name": "uogbuji/ibmdw",
"id": "096b1386021632c904e66f3adee82583dea7589f",
"size": "4039",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "patternai/generate_words.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "96"
},
{
"name": "HTML",
"bytes": "8821"
},
{
"name": "Python",
"bytes": "30260"
}
],
"symlink_target": ""
} |
import argparse
from euca2ools.commands.monitoring import CloudWatchRequest
from requestbuilder import Arg
class DeleteAlarms(CloudWatchRequest):
DESCRIPTION = 'Delete alarms'
ARGS = [Arg('AlarmNames.member', metavar='ALARM', nargs='+',
help='names of the alarms to delete'),
Arg('-f', '--force', action='store_true', route_to=None,
help=argparse.SUPPRESS)] # for compatibility
| {
"content_hash": "a29664566fac7258b396f4f1df18ae5d",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 68,
"avg_line_length": 39.36363636363637,
"alnum_prop": 0.6720554272517321,
"repo_name": "vasiliykochergin/euca2ools",
"id": "ed0309af101c98f6570364b53ddf00586004fa92",
"size": "1775",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "euca2ools/commands/monitoring/deletealarms.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1220919"
},
{
"name": "Shell",
"bytes": "872"
}
],
"symlink_target": ""
} |
import base64
import properties
from .. import BaseModel
class AttachmentData(BaseModel):
data = properties.String(
'base64 encoded data.',
)
@property
def raw_data(self):
"""Raw (decoded) attachment data (possibly binary)."""
return self.data and base64.b64decode(self.data)
@raw_data.setter
def raw_data(self, value):
"""Set the base64 encoded data using a raw value or file object."""
if value:
try:
value = value.read()
except AttributeError:
pass
b64 = base64.b64encode(value.encode('utf-8'))
self.data = b64.decode('utf-8')
@raw_data.deleter
def raw_data(self):
self.data = None
| {
"content_hash": "48fd57885a592e74c7e813e8ef3a2cee",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 75,
"avg_line_length": 24.258064516129032,
"alnum_prop": 0.5784574468085106,
"repo_name": "LasLabs/python-helpscout",
"id": "5d4aeabdf4656a673bccb5146ba0c0dba3393827",
"size": "866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "helpscout/models/attachment_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "147545"
}
],
"symlink_target": ""
} |
"""Tests for certbot._internal.plugins.webroot."""
from __future__ import print_function
import argparse
import errno
import json
import shutil
import tempfile
import unittest
import josepy as jose
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock
from acme import challenges
from certbot import achallenges
from certbot import errors
from certbot.compat import filesystem
from certbot.compat import os
from certbot.display import util as display_util
from certbot.tests import acme_util
from certbot.tests import util as test_util
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
class AuthenticatorTest(unittest.TestCase):
"""Tests for certbot._internal.plugins.webroot.Authenticator."""
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.HTTP01_P, domain="thing.com", account_key=KEY)
def setUp(self):
from certbot._internal.plugins.webroot import Authenticator
# On Linux directories created by tempfile.mkdtemp inherit their permissions from their
# parent directory. So the actual permissions are inconsistent over various tests env.
# To circumvent this, a dedicated sub-workspace is created under the workspace, using
# filesystem.mkdir to get consistent permissions.
self.workspace = tempfile.mkdtemp()
self.path = os.path.join(self.workspace, 'webroot')
filesystem.mkdir(self.path)
self.partial_root_challenge_path = os.path.join(
self.path, ".well-known")
self.root_challenge_path = os.path.join(
self.path, ".well-known", "acme-challenge")
self.validation_path = os.path.join(
self.root_challenge_path,
"ZXZhR3hmQURzNnBTUmIyTEF2OUlaZjE3RHQzanV4R0orUEN0OTJ3citvQQ")
self.config = mock.MagicMock(webroot_path=self.path,
webroot_map={"thing.com": self.path})
self.auth = Authenticator(self.config, "webroot")
def tearDown(self):
shutil.rmtree(self.path)
def test_more_info(self):
more_info = self.auth.more_info()
self.assertIsInstance(more_info, str)
self.assertIn(self.path, more_info)
def test_add_parser_arguments(self):
add = mock.MagicMock()
self.auth.add_parser_arguments(add)
self.assertEqual(2, add.call_count)
def test_prepare(self):
self.auth.prepare() # shouldn't raise any exceptions
@test_util.patch_display_util()
def test_webroot_from_list(self, mock_get_utility):
self.config.webroot_path = []
self.config.webroot_map = {"otherthing.com": self.path}
mock_display = mock_get_utility()
mock_display.menu.return_value = (display_util.OK, 1,)
self.auth.perform([self.achall])
self.assertTrue(mock_display.menu.called)
for call in mock_display.menu.call_args_list:
self.assertIn(self.achall.domain, call[0][0])
self.assertTrue(all(
webroot in call[0][1]
for webroot in self.config.webroot_map.values()))
self.assertEqual(self.config.webroot_map[self.achall.domain],
self.path)
@unittest.skipIf(filesystem.POSIX_MODE, reason='Test specific to Windows')
@test_util.patch_display_util()
def test_webconfig_file_generate_and_cleanup(self, mock_get_utility):
mock_display = mock_get_utility()
mock_display.menu.return_value = (display_util.OK, 1,)
self.auth.perform([self.achall])
self.assertTrue(os.path.exists(os.path.join(self.root_challenge_path, "web.config")))
self.auth.cleanup([self.achall])
self.assertFalse(os.path.exists(os.path.join(self.root_challenge_path, "web.config")))
@unittest.skipIf(filesystem.POSIX_MODE, reason='Test specific to Windows')
@test_util.patch_display_util()
def test_foreign_webconfig_file_handling(self, mock_get_utility):
mock_display = mock_get_utility()
mock_display.menu.return_value = (display_util.OK, 1,)
challenge_path = os.path.join(self.path, ".well-known", "acme-challenge")
filesystem.makedirs(challenge_path)
webconfig_path = os.path.join(challenge_path, "web.config")
with open(webconfig_path, "w") as file:
file.write("something")
self.auth.perform([self.achall])
from certbot import crypto_util
webconfig_hash = crypto_util.sha256sum(webconfig_path)
from certbot._internal.plugins.webroot import _WEB_CONFIG_SHA256SUMS
self.assertTrue(webconfig_hash not in _WEB_CONFIG_SHA256SUMS)
@unittest.skipIf(filesystem.POSIX_MODE, reason='Test specific to Windows')
def test_foreign_webconfig_multiple_domains(self):
# Covers bug https://github.com/certbot/certbot/issues/9091
achall_2 = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(challenges.HTTP01(token=b"bingo"), "pending"),
domain="second-thing.com", account_key=KEY)
self.config.webroot_map["second-thing.com"] = self.path
challenge_path = os.path.join(self.path, ".well-known", "acme-challenge")
filesystem.makedirs(challenge_path)
webconfig_path = os.path.join(challenge_path, "web.config")
with open(webconfig_path, "w") as file:
file.write("something")
self.auth.perform([self.achall, achall_2])
@test_util.patch_display_util()
def test_webroot_from_list_help_and_cancel(self, mock_get_utility):
self.config.webroot_path = []
self.config.webroot_map = {"otherthing.com": self.path}
mock_display = mock_get_utility()
mock_display.menu.side_effect = ((display_util.CANCEL, -1),)
self.assertRaises(errors.PluginError, self.auth.perform, [self.achall])
self.assertTrue(mock_display.menu.called)
for call in mock_display.menu.call_args_list:
self.assertIn(self.achall.domain, call[0][0])
self.assertTrue(all(
webroot in call[0][1]
for webroot in self.config.webroot_map.values()))
@test_util.patch_display_util()
def test_new_webroot(self, mock_get_utility):
self.config.webroot_path = []
self.config.webroot_map = {"something.com": self.path}
mock_display = mock_get_utility()
mock_display.menu.return_value = (display_util.OK, 0,)
with mock.patch('certbot.display.ops.validated_directory') as m:
m.side_effect = ((display_util.CANCEL, -1),
(display_util.OK, self.path,))
self.auth.perform([self.achall])
self.assertEqual(self.config.webroot_map[self.achall.domain], self.path)
@test_util.patch_display_util()
def test_new_webroot_empty_map_cancel(self, mock_get_utility):
self.config.webroot_path = []
self.config.webroot_map = {}
mock_display = mock_get_utility()
mock_display.menu.return_value = (display_util.OK, 0,)
with mock.patch('certbot.display.ops.validated_directory') as m:
m.return_value = (display_util.CANCEL, -1)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
def test_perform_missing_root(self):
self.config.webroot_path = None
self.config.webroot_map = {}
self.assertRaises(errors.PluginError, self.auth.perform, [])
def test_perform_reraises_other_errors(self):
self.auth.full_path = os.path.join(self.path, "null")
permission_canary = os.path.join(self.path, "rnd")
with open(permission_canary, "w") as f:
f.write("thingimy")
filesystem.chmod(self.path, 0o000)
try:
with open(permission_canary, "r"):
pass
print("Warning, running tests as root skips permissions tests...")
except IOError:
# ok, permissions work, test away...
self.assertRaises(errors.PluginError, self.auth.perform, [])
filesystem.chmod(self.path, 0o700)
@mock.patch("certbot._internal.plugins.webroot.filesystem.copy_ownership_and_apply_mode")
def test_failed_chown(self, mock_ownership):
mock_ownership.side_effect = OSError(errno.EACCES, "msg")
self.auth.perform([self.achall]) # exception caught and logged
@test_util.patch_display_util()
def test_perform_new_webroot_not_in_map(self, mock_get_utility):
new_webroot = tempfile.mkdtemp()
self.config.webroot_path = []
self.config.webroot_map = {"whatever.com": self.path}
mock_display = mock_get_utility()
mock_display.menu.side_effect = ((display_util.OK, 0),
(display_util.OK, new_webroot))
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.HTTP01_P, domain="something.com", account_key=KEY)
with mock.patch('certbot.display.ops.validated_directory') as m:
m.return_value = (display_util.OK, new_webroot,)
self.auth.perform([achall])
self.assertEqual(self.config.webroot_map[achall.domain], new_webroot)
def test_perform_permissions(self):
self.auth.prepare()
# Remove exec bit from permission check, so that it
# matches the file
self.auth.perform([self.achall])
self.assertTrue(filesystem.check_mode(self.validation_path, 0o644))
# Check permissions of the directories
for dirpath, dirnames, _ in os.walk(self.path):
for directory in dirnames:
full_path = os.path.join(dirpath, directory)
self.assertTrue(filesystem.check_mode(full_path, 0o755))
self.assertTrue(filesystem.has_same_ownership(self.validation_path, self.path))
def test_perform_cleanup(self):
self.auth.prepare()
responses = self.auth.perform([self.achall])
self.assertEqual(1, len(responses))
self.assertTrue(os.path.exists(self.validation_path))
with open(self.validation_path) as validation_f:
validation = validation_f.read()
self.assertTrue(
challenges.KeyAuthorizationChallengeResponse(
key_authorization=validation).verify(
self.achall.chall, KEY.public_key()))
self.auth.cleanup([self.achall])
self.assertFalse(os.path.exists(self.validation_path))
self.assertFalse(os.path.exists(self.root_challenge_path))
self.assertFalse(os.path.exists(self.partial_root_challenge_path))
def test_perform_cleanup_existing_dirs(self):
filesystem.mkdir(self.partial_root_challenge_path)
self.auth.prepare()
self.auth.perform([self.achall])
self.auth.cleanup([self.achall])
# Ensure we don't "clean up" directories that previously existed
self.assertFalse(os.path.exists(self.validation_path))
self.assertFalse(os.path.exists(self.root_challenge_path))
def test_perform_cleanup_multiple_challenges(self):
bingo_achall = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=b"bingo"), "pending"),
domain="thing.com", account_key=KEY)
bingo_validation_path = "YmluZ28"
filesystem.mkdir(self.partial_root_challenge_path)
self.auth.prepare()
self.auth.perform([bingo_achall, self.achall])
self.auth.cleanup([self.achall])
self.assertFalse(os.path.exists(bingo_validation_path))
self.assertTrue(os.path.exists(self.root_challenge_path))
self.auth.cleanup([bingo_achall])
self.assertFalse(os.path.exists(self.validation_path))
self.assertFalse(os.path.exists(self.root_challenge_path))
def test_cleanup_leftovers(self):
self.auth.prepare()
self.auth.perform([self.achall])
leftover_path = os.path.join(self.root_challenge_path, 'leftover')
filesystem.mkdir(leftover_path)
self.auth.cleanup([self.achall])
self.assertFalse(os.path.exists(self.validation_path))
self.assertTrue(os.path.exists(self.root_challenge_path))
os.rmdir(leftover_path)
@mock.patch('certbot.compat.os.rmdir')
def test_cleanup_failure(self, mock_rmdir):
self.auth.prepare()
self.auth.perform([self.achall])
os_error = OSError()
os_error.errno = errno.EACCES
mock_rmdir.side_effect = os_error
self.auth.cleanup([self.achall])
self.assertFalse(os.path.exists(self.validation_path))
self.assertTrue(os.path.exists(self.root_challenge_path))
class WebrootActionTest(unittest.TestCase):
"""Tests for webroot argparse actions."""
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.HTTP01_P, domain="thing.com", account_key=KEY)
def setUp(self):
from certbot._internal.plugins.webroot import Authenticator
self.path = tempfile.mkdtemp()
self.parser = argparse.ArgumentParser()
self.parser.add_argument("-d", "--domains",
action="append", default=[])
Authenticator.inject_parser_options(self.parser, "webroot")
def test_webroot_map_action(self):
args = self.parser.parse_args(
["--webroot-map", json.dumps({'thing.com': self.path})])
self.assertEqual(args.webroot_map["thing.com"], self.path)
def test_domain_before_webroot(self):
args = self.parser.parse_args(
"-d {0} -w {1}".format(self.achall.domain, self.path).split())
config = self._get_config_after_perform(args)
self.assertEqual(config.webroot_map[self.achall.domain], self.path)
def test_domain_before_webroot_error(self):
self.assertRaises(errors.PluginError, self.parser.parse_args,
"-d foo -w bar -w baz".split())
self.assertRaises(errors.PluginError, self.parser.parse_args,
"-d foo -w bar -d baz -w qux".split())
def test_multiwebroot(self):
args = self.parser.parse_args("-w {0} -d {1} -w {2} -d bar".format(
self.path, self.achall.domain, tempfile.mkdtemp()).split())
self.assertEqual(args.webroot_map[self.achall.domain], self.path)
config = self._get_config_after_perform(args)
self.assertEqual(
config.webroot_map[self.achall.domain], self.path)
def test_webroot_map_partial_without_perform(self):
# This test acknowledges the fact that webroot_map content will be partial if webroot
# plugin perform method is not invoked (corner case when all auths are already valid).
# To not be a problem, the webroot_path must always been conserved during renew.
# This condition is challenged by:
# certbot.tests.renewal_tests::RenewalTest::test_webroot_params_conservation
# See https://github.com/certbot/certbot/pull/7095 for details.
other_webroot_path = tempfile.mkdtemp()
args = self.parser.parse_args("-w {0} -d {1} -w {2} -d bar".format(
self.path, self.achall.domain, other_webroot_path).split())
self.assertEqual(args.webroot_map, {self.achall.domain: self.path})
self.assertEqual(args.webroot_path, [self.path, other_webroot_path])
def _get_config_after_perform(self, config):
from certbot._internal.plugins.webroot import Authenticator
auth = Authenticator(config, "webroot")
auth.perform([self.achall])
return auth.config
if __name__ == "__main__":
unittest.main() # pragma: no cover
| {
"content_hash": "ef059aff3a2e8ce85d0d97735c5bee25",
"timestamp": "",
"source": "github",
"line_count": 365,
"max_line_length": 95,
"avg_line_length": 43.23013698630137,
"alnum_prop": 0.6504848215983269,
"repo_name": "lmcro/letsencrypt",
"id": "d7e96159658ea5a7e022bce72e2f4dfde8a1f9d7",
"size": "15779",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "certbot/tests/plugins/webroot_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "14147"
},
{
"name": "Augeas",
"bytes": "4731"
},
{
"name": "Batchfile",
"bytes": "35037"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37309"
},
{
"name": "Nginx",
"bytes": "4274"
},
{
"name": "Python",
"bytes": "1225979"
},
{
"name": "Shell",
"bytes": "26934"
}
],
"symlink_target": ""
} |
"""
FILE: inno.py
AUTHOR: Cody Preord
@summary: Lexer configuration module for Inno Setup Scripts
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: _inno.py 68798 2011-08-20 17:17:05Z CJP $"
__revision__ = "$Revision: 68798 $"
#-----------------------------------------------------------------------------#
# Imports
import wx
import wx.stc as stc
import re
# Local Imports
import synglob
import syndata
#-----------------------------------------------------------------------------#
#---- Keyword Definitions ----#
SECTION_KW = (0, "code components custommessages dirs files icons ini "
"installdelete langoptions languages messages registry run "
"setup types tasks uninstalldelete uninstallrun _istool")
KEYWORDS = (1, "allowcancelduringinstall allownoicons allowrootdirectory "
"allowuncpath alwaysrestart alwaysshowcomponentslist "
"alwaysshowdironreadypage alwaysshowgrouponreadypage "
"alwaysusepersonalgroup appcomments appcontact appcopyright "
"appenddefaultdirname appenddefaultgroupname appid appmodifypath "
"appmutex appname apppublisher apppublisherurl appreadmefile "
"appsupporturl appupdatesurl appvername appversion "
"architecturesallowed architecturesinstallin64bitmode backcolor "
"backcolor2 backcolordirection backsolid changesassociations "
"changesenvironment compression copyrightfontname "
"copyrightfontsize createappdir createuninstallregkey "
"defaultdirname defaultgroupname defaultuserinfoname "
"defaultuserinfoorg defaultuserinfoserial dialogfontname "
"dialogfontsize direxistswarning disabledirpage "
"disablefinishedpage disableprogramgrouppage disablereadymemo "
"disablereadypage disablestartupprompt diskclustersize "
"diskslicesize diskspanning enablesdirdoesntexistwarning "
"encryption extradiskspacerequired flatcomponentslist "
"infoafterfile infobeforefile internalcompresslevel "
"languagedetectionmethod languagecodepage languageid languagename "
"licensefile mergeduplicatefiles minversion onlybelowversion "
"outputbasefilename outputdir outputmanifestfile password "
"privilegesrequired reservebytes restartifneededbyrun "
"setupiconfile showcomponentsizes showlanguagedialog "
"showtaskstreelines slicesperdisk solidcompression sourcedir "
"timestamprounding timestampsinutc titlefontname titlefontsize "
"touchdate touchtime uninstallable uninstalldisplayicon "
"uninstalldisplayname uninstallfilesdir uninstalllogmode "
"uninstallrestartcomputer updateuninstalllogappname "
"usepreviousappdir usepreviousgroup useprevioussetuptype "
"useprevioustasks useprevioususerinfo userinfopage usesetupldr "
"versioninfocompany versioninfocopyright versioninfodescription "
"versioninfotextversion versioninfoversion welcomefontname "
"welcomefontsize windowshowcaption windowstartmaximized "
"windowresizable windowvisible wizardimagebackcolor "
"wizardimagefile wizardimagestretch wizardsmallimagefile")
PARAM_KW = (2, "afterinstall attribs beforeinstall check comment components "
"copymode description destdir destname excludes "
"extradiskspacerequired filename flags fontinstall "
"groupdescription hotkey infoafterfile infobeforefile "
"iconfilename iconindex key languages licensefile messagesfile "
"minversion name onlybelowversion parameters permissions root "
"runonceid section source statusmsg string subkey tasks type "
"types valuedata valuename valuetype workingdir")
PREPROC_KW = (3, "append define dim else emit endif endsub error expr file for "
"if ifdef ifexist ifndef ifnexist include insert pragma sub "
"undef")
PASCAL_KW = (4, "begin break case const continue do downto else end except "
"finally for function if of procedure repeat then to try until "
"uses var while with")
USER_DEF = (5, "")
#---- End Keyword Definitions ----#
#---- Syntax Style Specs ----#
SYNTAX_ITEMS = [(stc.STC_INNO_COMMENT, 'comment_style'),
(stc.STC_INNO_COMMENT_PASCAL, 'comment_style'),
(stc.STC_INNO_DEFAULT, 'default_style'),
(stc.STC_INNO_IDENTIFIER, 'default_style'),
(stc.STC_INNO_KEYWORD, 'keyword_style'),
(stc.STC_INNO_KEYWORD_PASCAL, 'keyword4_style'),
(stc.STC_INNO_KEYWORD_USER, 'default_style'),
(stc.STC_INNO_PARAMETER, 'keyword2_style'),
(stc.STC_INNO_PREPROC, 'pre_style'),
(stc.STC_INNO_SECTION, 'scalar_style'),
(stc.STC_INNO_STRING_DOUBLE, 'string_style'),
(stc.STC_INNO_STRING_SINGLE, 'char_style')]
if wx.VERSION >= (2, 9, 0, 0, ''):
SYNTAX_ITEMS.append((stc.STC_INNO_INLINE_EXPANSION, 'default_style')) #TODO
else:
SYNTAX_ITEMS.append((stc.STC_INNO_PREPROC_INLINE, 'pre_style'))
#---- Extra Properties ----#
FOLD = ("fold", "1")
FOLD_COMP = ("fold.compact", "1")
#-----------------------------------------------------------------------------#
class SyntaxData(syndata.SyntaxDataBase):
"""SyntaxData object for Inno Setup Scripts"""
def __init__(self, langid):
super(SyntaxData, self).__init__(langid)
# Setup
self.SetLexer(stc.STC_LEX_INNOSETUP)
self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter)
def GetKeywords(self):
"""Returns Specified Keywords List """
return [SECTION_KW, KEYWORDS, PARAM_KW, PREPROC_KW, PASCAL_KW]
def GetSyntaxSpec(self):
"""Syntax Specifications """
return SYNTAX_ITEMS
def GetProperties(self):
"""Returns a list of Extra Properties to set """
return [FOLD]
def GetCommentPattern(self):
"""Returns a list of characters used to comment a block of code """
# Note: Inno can also use pascal comments (i.e {})
return [u';']
#-----------------------------------------------------------------------------#
def AutoIndenter(estc, pos, ichar):
"""Auto indent Inno Setup Scripts.
@param estc: EditraStyledTextCtrl
@param pos: current carat position
@param ichar: Indentation character
"""
rtxt = u''
line = estc.GetCurrentLine()
text = estc.GetTextRange(estc.PositionFromLine(line), pos)
eolch = estc.GetEOLChar()
indent = estc.GetLineIndentation(line)
if ichar == u"\t":
tabw = estc.GetTabWidth()
else:
tabw = estc.GetIndent()
i_space = indent / tabw
ndent = eolch + ichar * i_space
rtxt = ndent + ((indent - (tabw * i_space)) * u' ')
if_pat = re.compile('if\s+.*\sthen')
text = text.strip()
if text == u'begin' or if_pat.match(text):
rtxt += ichar
# Put text in the buffer
estc.AddText(rtxt)
| {
"content_hash": "dfa4b9d4e06695887877672a94a0809e",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 80,
"avg_line_length": 42.785714285714285,
"alnum_prop": 0.6378686700055648,
"repo_name": "DaniilLeksin/gc",
"id": "45d93124db49a00808e6a83878e3ef946fe9a623",
"size": "7749",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wx/tools/Editra/src/syntax/_inno.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "868"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "923"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "774"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "279"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "932"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "6456238"
},
{
"name": "R",
"bytes": "687"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "1211"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "219"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "1648"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
} |
"""Converts cubic bezier curves to quadratic splines.
Conversion is performed such that the quadratic splines keep the same end-curve
tangents as the original cubics. The approach is iterative, increasing the
number of segments for a spline until the error gets below a bound.
Respective curves from multiple fonts will be converted at once to ensure that
the resulting splines are interpolation-compatible.
"""
from __future__ import print_function, division, absolute_import
import logging
from fontTools.pens.basePen import AbstractPen
from fontTools.pens.pointPen import PointToSegmentPen
from fontTools.pens.reverseContourPen import ReverseContourPen
from cu2qu import curves_to_quadratic
from cu2qu.errors import (
UnequalZipLengthsError, IncompatibleSegmentNumberError,
IncompatibleSegmentTypesError, IncompatibleGlyphsError,
IncompatibleFontsError)
__all__ = ['fonts_to_quadratic', 'font_to_quadratic']
DEFAULT_MAX_ERR = 0.001
CURVE_TYPE_LIB_KEY = "com.github.googlei18n.cu2qu.curve_type"
logger = logging.getLogger(__name__)
_zip = zip
def zip(*args):
"""Ensure each argument to zip has the same length. Also make sure a list is
returned for python 2/3 compatibility.
"""
if len(set(len(a) for a in args)) != 1:
raise UnequalZipLengthsError(*args)
return list(_zip(*args))
class GetSegmentsPen(AbstractPen):
"""Pen to collect segments into lists of points for conversion.
Curves always include their initial on-curve point, so some points are
duplicated between segments.
"""
def __init__(self):
self._last_pt = None
self.segments = []
def _add_segment(self, tag, *args):
if tag in ['move', 'line', 'qcurve', 'curve']:
self._last_pt = args[-1]
self.segments.append((tag, args))
def moveTo(self, pt):
self._add_segment('move', pt)
def lineTo(self, pt):
self._add_segment('line', pt)
def qCurveTo(self, *points):
self._add_segment('qcurve', self._last_pt, *points)
def curveTo(self, *points):
self._add_segment('curve', self._last_pt, *points)
def closePath(self):
self._add_segment('close')
def endPath(self):
self._add_segment('end')
def addComponent(self, glyphName, transformation):
pass
def _get_segments(glyph):
"""Get a glyph's segments as extracted by GetSegmentsPen."""
pen = GetSegmentsPen()
# glyph.draw(pen)
# We can't simply draw the glyph with the pen, but we must initialize the
# PointToSegmentPen explicitly with outputImpliedClosingLine=True.
# By default PointToSegmentPen does not outputImpliedClosingLine -- unless
# last and first point on closed contour are duplicated. Because we are
# converting multiple glyphs at the same time, we want to make sure
# this function returns the same number of segments, whether or not
# the last and first point overlap.
# https://github.com/googlefonts/fontmake/issues/572
# https://github.com/fonttools/fonttools/pull/1720
pointPen = PointToSegmentPen(pen, outputImpliedClosingLine=True)
glyph.drawPoints(pointPen)
return pen.segments
def _set_segments(glyph, segments, reverse_direction):
"""Draw segments as extracted by GetSegmentsPen back to a glyph."""
glyph.clearContours()
pen = glyph.getPen()
if reverse_direction:
pen = ReverseContourPen(pen)
for tag, args in segments:
if tag == 'move':
pen.moveTo(*args)
elif tag == 'line':
pen.lineTo(*args)
elif tag == 'curve':
pen.curveTo(*args[1:])
elif tag == 'qcurve':
pen.qCurveTo(*args[1:])
elif tag == 'close':
pen.closePath()
elif tag == 'end':
pen.endPath()
else:
raise AssertionError('Unhandled segment type "%s"' % tag)
def _segments_to_quadratic(segments, max_err, stats):
"""Return quadratic approximations of cubic segments."""
assert all(s[0] == 'curve' for s in segments), 'Non-cubic given to convert'
new_points = curves_to_quadratic([s[1] for s in segments], max_err)
n = len(new_points[0])
assert all(len(s) == n for s in new_points[1:]), 'Converted incompatibly'
spline_length = str(n - 2)
stats[spline_length] = stats.get(spline_length, 0) + 1
return [('qcurve', p) for p in new_points]
def _glyphs_to_quadratic(glyphs, max_err, reverse_direction, stats):
"""Do the actual conversion of a set of compatible glyphs, after arguments
have been set up.
Return True if the glyphs were modified, else return False.
"""
try:
segments_by_location = zip(*[_get_segments(g) for g in glyphs])
except UnequalZipLengthsError:
raise IncompatibleSegmentNumberError(glyphs)
if not any(segments_by_location):
return False
# always modify input glyphs if reverse_direction is True
glyphs_modified = reverse_direction
new_segments_by_location = []
incompatible = {}
for i, segments in enumerate(segments_by_location):
tag = segments[0][0]
if not all(s[0] == tag for s in segments[1:]):
incompatible[i] = [s[0] for s in segments]
elif tag == 'curve':
segments = _segments_to_quadratic(segments, max_err, stats)
glyphs_modified = True
new_segments_by_location.append(segments)
if glyphs_modified:
new_segments_by_glyph = zip(*new_segments_by_location)
for glyph, new_segments in zip(glyphs, new_segments_by_glyph):
_set_segments(glyph, new_segments, reverse_direction)
if incompatible:
raise IncompatibleSegmentTypesError(glyphs, segments=incompatible)
return glyphs_modified
def glyphs_to_quadratic(
glyphs, max_err=None, reverse_direction=False, stats=None):
"""Convert the curves of a set of compatible of glyphs to quadratic.
All curves will be converted to quadratic at once, ensuring interpolation
compatibility. If this is not required, calling glyphs_to_quadratic with one
glyph at a time may yield slightly more optimized results.
Return True if glyphs were modified, else return False.
Raises IncompatibleGlyphsError if glyphs have non-interpolatable outlines.
"""
if stats is None:
stats = {}
if not max_err:
# assume 1000 is the default UPEM
max_err = DEFAULT_MAX_ERR * 1000
if isinstance(max_err, (list, tuple)):
max_errors = max_err
else:
max_errors = [max_err] * len(glyphs)
assert len(max_errors) == len(glyphs)
return _glyphs_to_quadratic(glyphs, max_errors, reverse_direction, stats)
def fonts_to_quadratic(
fonts, max_err_em=None, max_err=None, reverse_direction=False,
stats=None, dump_stats=False, remember_curve_type=True):
"""Convert the curves of a collection of fonts to quadratic.
All curves will be converted to quadratic at once, ensuring interpolation
compatibility. If this is not required, calling fonts_to_quadratic with one
font at a time may yield slightly more optimized results.
Return True if fonts were modified, else return False.
By default, cu2qu stores the curve type in the fonts' lib, under a private
key "com.github.googlei18n.cu2qu.curve_type", and will not try to convert
them again if the curve type is already set to "quadratic".
Setting 'remember_curve_type' to False disables this optimization.
Raises IncompatibleFontsError if same-named glyphs from different fonts
have non-interpolatable outlines.
"""
if remember_curve_type:
curve_types = {f.lib.get(CURVE_TYPE_LIB_KEY, "cubic") for f in fonts}
if len(curve_types) == 1:
curve_type = next(iter(curve_types))
if curve_type == "quadratic":
logger.info("Curves already converted to quadratic")
return False
elif curve_type == "cubic":
pass # keep converting
else:
raise NotImplementedError(curve_type)
elif len(curve_types) > 1:
# going to crash later if they do differ
logger.warning("fonts may contain different curve types")
if stats is None:
stats = {}
if max_err_em and max_err:
raise TypeError('Only one of max_err and max_err_em can be specified.')
if not (max_err_em or max_err):
max_err_em = DEFAULT_MAX_ERR
if isinstance(max_err, (list, tuple)):
assert len(max_err) == len(fonts)
max_errors = max_err
elif max_err:
max_errors = [max_err] * len(fonts)
if isinstance(max_err_em, (list, tuple)):
assert len(fonts) == len(max_err_em)
max_errors = [f.info.unitsPerEm * e
for f, e in zip(fonts, max_err_em)]
elif max_err_em:
max_errors = [f.info.unitsPerEm * max_err_em for f in fonts]
modified = False
glyph_errors = {}
for name in set().union(*(f.keys() for f in fonts)):
glyphs = []
cur_max_errors = []
for font, error in zip(fonts, max_errors):
if name in font:
glyphs.append(font[name])
cur_max_errors.append(error)
try:
modified |= _glyphs_to_quadratic(
glyphs, cur_max_errors, reverse_direction, stats)
except IncompatibleGlyphsError as exc:
logger.error(exc)
glyph_errors[name] = exc
if glyph_errors:
raise IncompatibleFontsError(glyph_errors)
if modified and dump_stats:
spline_lengths = sorted(stats.keys())
logger.info('New spline lengths: %s' % (', '.join(
'%s: %d' % (l, stats[l]) for l in spline_lengths)))
if remember_curve_type:
for font in fonts:
curve_type = font.lib.get(CURVE_TYPE_LIB_KEY, "cubic")
if curve_type != "quadratic":
font.lib[CURVE_TYPE_LIB_KEY] = "quadratic"
modified = True
return modified
def glyph_to_quadratic(glyph, **kwargs):
"""Convenience wrapper around glyphs_to_quadratic, for just one glyph.
Return True if the glyph was modified, else return False.
"""
return glyphs_to_quadratic([glyph], **kwargs)
def font_to_quadratic(font, **kwargs):
"""Convenience wrapper around fonts_to_quadratic, for just one font.
Return True if the font was modified, else return False.
"""
return fonts_to_quadratic([font], **kwargs)
| {
"content_hash": "b88c293ea66b893432e2c40145df07f8",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 80,
"avg_line_length": 34.1294498381877,
"alnum_prop": 0.650104304949744,
"repo_name": "googlei18n/cu2qu",
"id": "0b59187383d958bca19ab1c34559689813e3ba5c",
"size": "11144",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Lib/cu2qu/ufo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "105382"
}
],
"symlink_target": ""
} |
import gzip
import inspect
import warnings
from io import BytesIO
from testfixtures import LogCapture
from twisted.trial import unittest
from scrapy import signals
from scrapy.settings import Settings
from scrapy.http import Request, Response, TextResponse, XmlResponse, HtmlResponse
from scrapy.spiders.init import InitSpider
from scrapy.spiders import Spider, BaseSpider, CrawlSpider, Rule, XMLFeedSpider, \
CSVFeedSpider, SitemapSpider
from scrapy.linkextractors import LinkExtractor
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.trackref import object_ref
from scrapy.utils.test import get_crawler
from tests import mock
class SpiderTest(unittest.TestCase):
spider_class = Spider
def setUp(self):
warnings.simplefilter("always")
def tearDown(self):
warnings.resetwarnings()
def test_base_spider(self):
spider = self.spider_class("example.com")
self.assertEqual(spider.name, 'example.com')
self.assertEqual(spider.start_urls, [])
def test_start_requests(self):
spider = self.spider_class('example.com')
start_requests = spider.start_requests()
self.assertTrue(inspect.isgenerator(start_requests))
self.assertEqual(list(start_requests), [])
def test_spider_args(self):
"""Constructor arguments are assigned to spider attributes"""
spider = self.spider_class('example.com', foo='bar')
self.assertEqual(spider.foo, 'bar')
def test_spider_without_name(self):
"""Constructor arguments are assigned to spider attributes"""
self.assertRaises(ValueError, self.spider_class)
self.assertRaises(ValueError, self.spider_class, somearg='foo')
def test_deprecated_set_crawler_method(self):
spider = self.spider_class('example.com')
crawler = get_crawler()
with warnings.catch_warnings(record=True) as w:
spider.set_crawler(crawler)
self.assertIn("set_crawler", str(w[0].message))
self.assertTrue(hasattr(spider, 'crawler'))
self.assertIs(spider.crawler, crawler)
self.assertTrue(hasattr(spider, 'settings'))
self.assertIs(spider.settings, crawler.settings)
def test_from_crawler_crawler_and_settings_population(self):
crawler = get_crawler()
spider = self.spider_class.from_crawler(crawler, 'example.com')
self.assertTrue(hasattr(spider, 'crawler'))
self.assertIs(spider.crawler, crawler)
self.assertTrue(hasattr(spider, 'settings'))
self.assertIs(spider.settings, crawler.settings)
def test_from_crawler_init_call(self):
with mock.patch.object(self.spider_class, '__init__',
return_value=None) as mock_init:
self.spider_class.from_crawler(get_crawler(), 'example.com',
foo='bar')
mock_init.assert_called_once_with('example.com', foo='bar')
def test_closed_signal_call(self):
class TestSpider(self.spider_class):
closed_called = False
def closed(self, reason):
self.closed_called = True
crawler = get_crawler()
spider = TestSpider.from_crawler(crawler, 'example.com')
crawler.signals.send_catch_log(signal=signals.spider_opened,
spider=spider)
crawler.signals.send_catch_log(signal=signals.spider_closed,
spider=spider, reason=None)
self.assertTrue(spider.closed_called)
def test_update_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
self.spider_class.custom_settings = spider_settings
settings = Settings(project_settings, priority='project')
self.spider_class.update_settings(settings)
self.assertEqual(settings.get('TEST1'), 'spider')
self.assertEqual(settings.get('TEST2'), 'spider')
self.assertEqual(settings.get('TEST3'), 'project')
def test_logger(self):
spider = self.spider_class('example.com')
with LogCapture() as l:
spider.logger.info('test log msg')
l.check(('example.com', 'INFO', 'test log msg'))
record = l.records[0]
self.assertIn('spider', record.__dict__)
self.assertIs(record.spider, spider)
def test_log(self):
spider = self.spider_class('example.com')
with mock.patch('scrapy.spiders.Spider.logger') as mock_logger:
spider.log('test log msg', 'INFO')
mock_logger.log.assert_called_once_with('INFO', 'test log msg')
class InitSpiderTest(SpiderTest):
spider_class = InitSpider
class XMLFeedSpiderTest(SpiderTest):
spider_class = XMLFeedSpider
def test_register_namespace(self):
body = b"""<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns:x="http://www.google.com/schemas/sitemap/0.84"
xmlns:y="http://www.example.com/schemas/extras/1.0">
<url><x:loc>http://www.example.com/Special-Offers.html</loc><y:updated>2009-08-16</updated><other value="bar" y:custom="fuu"/></url>
<url><loc>http://www.example.com/</loc><y:updated>2009-08-16</updated><other value="foo"/></url>
</urlset>"""
response = XmlResponse(url='http://example.com/sitemap.xml', body=body)
class _XMLSpider(self.spider_class):
itertag = 'url'
namespaces = (
('a', 'http://www.google.com/schemas/sitemap/0.84'),
('b', 'http://www.example.com/schemas/extras/1.0'),
)
def parse_node(self, response, selector):
yield {
'loc': selector.xpath('a:loc/text()').extract(),
'updated': selector.xpath('b:updated/text()').extract(),
'other': selector.xpath('other/@value').extract(),
'custom': selector.xpath('other/@b:custom').extract(),
}
for iterator in ('iternodes', 'xml'):
spider = _XMLSpider('example', iterator=iterator)
output = list(spider.parse(response))
self.assertEqual(len(output), 2, iterator)
self.assertEqual(output, [
{'loc': [u'http://www.example.com/Special-Offers.html'],
'updated': [u'2009-08-16'],
'custom': [u'fuu'],
'other': [u'bar']},
{'loc': [],
'updated': [u'2009-08-16'],
'other': [u'foo'],
'custom': []},
], iterator)
class CSVFeedSpiderTest(SpiderTest):
spider_class = CSVFeedSpider
class CrawlSpiderTest(SpiderTest):
test_body = b"""<html><head><title>Page title<title>
<body>
<p><a href="item/12.html">Item 12</a></p>
<div class='links'>
<p><a href="/about.html">About us</a></p>
</div>
<div>
<p><a href="/nofollow.html">This shouldn't be followed</a></p>
</div>
</body></html>"""
spider_class = CrawlSpider
def test_process_links(self):
response = HtmlResponse("http://example.org/somepage/index.html",
body=self.test_body)
class _CrawlSpider(self.spider_class):
name="test"
allowed_domains=['example.org']
rules = (
Rule(LinkExtractor(), process_links="dummy_process_links"),
)
def dummy_process_links(self, links):
return links
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertEqual([r.url for r in output],
['http://example.org/somepage/item/12.html',
'http://example.org/about.html',
'http://example.org/nofollow.html'])
def test_process_links_filter(self):
response = HtmlResponse("http://example.org/somepage/index.html",
body=self.test_body)
class _CrawlSpider(self.spider_class):
import re
name="test"
allowed_domains=['example.org']
rules = (
Rule(LinkExtractor(), process_links="filter_process_links"),
)
_test_regex = re.compile('nofollow')
def filter_process_links(self, links):
return [link for link in links
if not self._test_regex.search(link.url)]
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 2)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertEqual([r.url for r in output],
['http://example.org/somepage/item/12.html',
'http://example.org/about.html'])
def test_process_links_generator(self):
response = HtmlResponse("http://example.org/somepage/index.html",
body=self.test_body)
class _CrawlSpider(self.spider_class):
name="test"
allowed_domains=['example.org']
rules = (
Rule(LinkExtractor(), process_links="dummy_process_links"),
)
def dummy_process_links(self, links):
for link in links:
yield link
spider = _CrawlSpider()
output = list(spider._requests_to_follow(response))
self.assertEqual(len(output), 3)
self.assertTrue(all(map(lambda r: isinstance(r, Request), output)))
self.assertEqual([r.url for r in output],
['http://example.org/somepage/item/12.html',
'http://example.org/about.html',
'http://example.org/nofollow.html'])
def test_follow_links_attribute_population(self):
crawler = get_crawler()
spider = self.spider_class.from_crawler(crawler, 'example.com')
self.assertTrue(hasattr(spider, '_follow_links'))
self.assertTrue(spider._follow_links)
settings_dict = {'CRAWLSPIDER_FOLLOW_LINKS': False}
crawler = get_crawler(settings_dict=settings_dict)
spider = self.spider_class.from_crawler(crawler, 'example.com')
self.assertTrue(hasattr(spider, '_follow_links'))
self.assertFalse(spider._follow_links)
def test_follow_links_attribute_deprecated_population(self):
spider = self.spider_class('example.com')
self.assertFalse(hasattr(spider, '_follow_links'))
spider.set_crawler(get_crawler())
self.assertTrue(hasattr(spider, '_follow_links'))
self.assertTrue(spider._follow_links)
spider = self.spider_class('example.com')
settings_dict = {'CRAWLSPIDER_FOLLOW_LINKS': False}
spider.set_crawler(get_crawler(settings_dict=settings_dict))
self.assertTrue(hasattr(spider, '_follow_links'))
self.assertFalse(spider._follow_links)
class SitemapSpiderTest(SpiderTest):
spider_class = SitemapSpider
BODY = b"SITEMAP"
f = BytesIO()
g = gzip.GzipFile(fileobj=f, mode='w+b')
g.write(BODY)
g.close()
GZBODY = f.getvalue()
def assertSitemapBody(self, response, body):
spider = self.spider_class("example.com")
self.assertEqual(spider._get_sitemap_body(response), body)
def test_get_sitemap_body(self):
r = XmlResponse(url="http://www.example.com/", body=self.BODY)
self.assertSitemapBody(r, self.BODY)
r = HtmlResponse(url="http://www.example.com/", body=self.BODY)
self.assertSitemapBody(r, None)
r = Response(url="http://www.example.com/favicon.ico", body=self.BODY)
self.assertSitemapBody(r, None)
def test_get_sitemap_body_gzip_headers(self):
r = Response(url="http://www.example.com/sitemap", body=self.GZBODY,
headers={"content-type": "application/gzip"})
self.assertSitemapBody(r, self.BODY)
def test_get_sitemap_body_xml_url(self):
r = TextResponse(url="http://www.example.com/sitemap.xml", body=self.BODY)
self.assertSitemapBody(r, self.BODY)
def test_get_sitemap_body_xml_url_compressed(self):
r = Response(url="http://www.example.com/sitemap.xml.gz", body=self.GZBODY)
self.assertSitemapBody(r, self.BODY)
# .xml.gz but body decoded by HttpCompression middleware already
r = Response(url="http://www.example.com/sitemap.xml.gz", body=self.BODY)
self.assertSitemapBody(r, self.BODY)
def test_get_sitemap_urls_from_robotstxt(self):
robots = b"""# Sitemap files
Sitemap: http://example.com/sitemap.xml
Sitemap: http://example.com/sitemap-product-index.xml
Sitemap: HTTP://example.com/sitemap-uppercase.xml
Sitemap: /sitemap-relative-url.xml
"""
r = TextResponse(url="http://www.example.com/robots.txt", body=robots)
spider = self.spider_class("example.com")
self.assertEqual([req.url for req in spider._parse_sitemap(r)],
['http://example.com/sitemap.xml',
'http://example.com/sitemap-product-index.xml',
'http://example.com/sitemap-uppercase.xml',
'http://www.example.com/sitemap-relative-url.xml'])
def test_alternate_url_locs(self):
sitemap = b"""<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
xmlns:xhtml="http://www.w3.org/1999/xhtml">
<url>
<loc>http://www.example.com/english/</loc>
<xhtml:link rel="alternate" hreflang="de"
href="http://www.example.com/deutsch/"/>
<xhtml:link rel="alternate" hreflang="de-ch"
href="http://www.example.com/schweiz-deutsch/"/>
<xhtml:link rel="alternate" hreflang="it"
href="http://www.example.com/italiano/"/>
<xhtml:link rel="alternate" hreflang="it"/><!-- wrong tag without href -->
</url>
</urlset>"""
r = TextResponse(url="http://www.example.com/sitemap.xml", body=sitemap)
spider = self.spider_class("example.com")
self.assertEqual([req.url for req in spider._parse_sitemap(r)],
['http://www.example.com/english/'])
spider.sitemap_alternate_links = True
self.assertEqual([req.url for req in spider._parse_sitemap(r)],
['http://www.example.com/english/',
'http://www.example.com/deutsch/',
'http://www.example.com/schweiz-deutsch/',
'http://www.example.com/italiano/'])
class DeprecationTest(unittest.TestCase):
def test_basespider_is_deprecated(self):
with warnings.catch_warnings(record=True) as w:
class MySpider1(BaseSpider):
pass
self.assertEqual(len(w), 1)
self.assertEqual(w[0].category, ScrapyDeprecationWarning)
self.assertEqual(w[0].lineno, inspect.getsourcelines(MySpider1)[1])
def test_basespider_issubclass(self):
class MySpider2(Spider):
pass
class MySpider2a(MySpider2):
pass
class Foo(object):
pass
class Foo2(object_ref):
pass
assert issubclass(MySpider2, BaseSpider)
assert issubclass(MySpider2a, BaseSpider)
assert not issubclass(Foo, BaseSpider)
assert not issubclass(Foo2, BaseSpider)
def test_basespider_isinstance(self):
class MySpider3(Spider):
name = 'myspider3'
class MySpider3a(MySpider3):
pass
class Foo(object):
pass
class Foo2(object_ref):
pass
assert isinstance(MySpider3(), BaseSpider)
assert isinstance(MySpider3a(), BaseSpider)
assert not isinstance(Foo(), BaseSpider)
assert not isinstance(Foo2(), BaseSpider)
def test_crawl_spider(self):
assert issubclass(CrawlSpider, Spider)
assert issubclass(CrawlSpider, BaseSpider)
assert isinstance(CrawlSpider(name='foo'), Spider)
assert isinstance(CrawlSpider(name='foo'), BaseSpider)
def test_make_requests_from_url_deprecated(self):
class MySpider4(Spider):
name = 'spider1'
start_urls = ['http://example.com']
class MySpider5(Spider):
name = 'spider2'
start_urls = ['http://example.com']
def make_requests_from_url(self, url):
return Request(url + "/foo", dont_filter=True)
with warnings.catch_warnings(record=True) as w:
# spider without overridden make_requests_from_url method
# doesn't issue a warning
spider1 = MySpider4()
self.assertEqual(len(list(spider1.start_requests())), 1)
self.assertEqual(len(w), 0)
# spider with overridden make_requests_from_url issues a warning,
# but the method still works
spider2 = MySpider5()
requests = list(spider2.start_requests())
self.assertEqual(len(requests), 1)
self.assertEqual(requests[0].url, 'http://example.com/foo')
self.assertEqual(len(w), 1)
class NoParseMethodSpiderTest(unittest.TestCase):
spider_class = Spider
def test_undefined_parse_method(self):
spider = self.spider_class('example.com')
text = b'Random text'
resp = TextResponse(url="http://www.example.com/random_url", body=text)
exc_msg = 'Spider.parse callback is not defined'
with self.assertRaisesRegexp(NotImplementedError, exc_msg):
spider.parse(resp)
| {
"content_hash": "4b87cc86f8b527c20576d60d7fabfbc5",
"timestamp": "",
"source": "github",
"line_count": 472,
"max_line_length": 140,
"avg_line_length": 38.02542372881356,
"alnum_prop": 0.601181190104747,
"repo_name": "Parlin-Galanodel/scrapy",
"id": "929e0fea84e91fbe27ca5ae7b67833b60b29d98b",
"size": "17948",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/test_spider.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2076"
},
{
"name": "Python",
"bytes": "1328016"
},
{
"name": "Roff",
"bytes": "2010"
},
{
"name": "Shell",
"bytes": "258"
}
],
"symlink_target": ""
} |
import ipaddress
from typing import Mapping, NoReturn, Optional, Sequence, Type, TypeVar, Union, overload
import types
import logging
import psycopg2
import unittest
import unittest.mock
import dataclasses as dc
from . import db
from .table import Table, TSchema
from .common import OrderParam, ResultDict, ResultsDict, SupportedTypes, ValueParam, WhereParam
from .schemadb import DB
_orig_psycopg2: Optional[types.ModuleType] = None
_mocks: dict[str, object] = {}
Row = Sequence[SupportedTypes]
Rows = list[Row]
T = TypeVar('T')
def fail() -> NoReturn:
raise Exception('Should not have been called')
def _patch_psycopg2() -> None:
global _orig_psycopg2
if _orig_psycopg2 is not None:
return
_orig_psycopg2 = psycopg2
connect = unittest.mock.patch.object(db, 'psycopg2', side_effect=fail, spec=True)
connect.start()
_mocks['connect'] = connect
class TestDB(DB):
__test__ = False
# A dict of table -> column_names
_columns: dict[str, Sequence[str]]
# A dict of table -> contents where contents is a list(rows) of lists(columns)
_data: dict[str, Rows]
# A dict of seq_name -> value
_sequences: dict[str, int]
def __init__(self) -> None:
_patch_psycopg2()
super().__init__('NoDB', 'NoUser', 'NoPass', 'NoHost')
self._columns = {}
self._data = {}
self._sequences = {}
def _connect(self, dbname: str, dbuser: Optional[str], dbpass: Optional[str], dbhost: Optional[str],
dbport: Optional[int]) -> psycopg2.extensions.connection:
assert _orig_psycopg2 is not None
ret = unittest.mock.MagicMock(_orig_psycopg2.extensions.connection)
return ret
def set_data(self, table: str, columns: Sequence[str], data: Rows) -> None:
self.set_table_schema(table, columns)
self._data[table] = [list(x) for x in data]
def set_data_only(self, table: str, data: Rows) -> None:
self._data[table] = [list(x) for x in data]
def get_data(self, table: str) -> list[tuple[SupportedTypes, ...]]:
"""Returns all rows of a table as tuples."""
return [tuple(x) for x in self._data[table]]
@overload
def set_table_schema(self, table: str, columns: Sequence[str]) -> None:
...
@overload
def set_table_schema(self, table: str, columns: Type[T]) -> Table[T]:
...
def set_table_schema(self, table: str, columns: Union[Sequence[str], Type[T]] = ()) -> Optional[Table[T]]:
"""Sets the table schema either from a dataclass or from a sequence of field names."""
ret: Optional[Table[T]]
if isinstance(columns, Sequence):
if not columns:
raise Exception('Empty columns list')
self._columns[table] = columns
self._data.setdefault(table, [])
ret = None
elif dc.is_dataclass(columns):
# self._columns[table] = [x.name for x in dc.fields(columns)]
# Get table sets the schema
ret = self.get_table(table, columns)
else:
raise Exception(f'Bad column definition: {type(columns)}: {columns}')
self._data.setdefault(table, [])
return ret
def get_table(self, table: str, schema: Type[TSchema]) -> Table[TSchema]:
# Use the opportunity to set the schema on get_table()
self._columns[table] = [x.name for x in dc.fields(schema)]
self._data.setdefault(table, [])
return super().get_table(table, schema)
def _read_raw(self, query: str, kwargs: Optional[dict[str, str]] = None) -> ResultsDict:
fail()
def _exec(self, query: str, args: Optional[Mapping[str, SupportedTypes]] = None) -> psycopg2.extensions.cursor:
fail()
def begin(self) -> None:
pass
def commit(self) -> None:
pass
def rollback(self) -> None:
pass
def read_q(self, query: str, args: Optional[Mapping[str, SupportedTypes]] = None) -> ResultsDict:
fail()
def _matches_where(self, row: ResultDict, where: Optional[WhereParam]) -> bool:
if not where:
return True
for k, v in where.items():
if k not in row:
raise Exception(f'Missing column {k}: {row}')
if row[k] != v:
return False
return True
def _sort_results(self, data: ResultsDict, sort: Optional[OrderParam]) -> None:
if sort is None:
return
def _key(x: ResultDict) -> tuple:
assert sort is not None
ret: list[Union[SupportedTypes, tuple]] = []
for k in sort:
if x is None:
ret.append(None)
# elif dc.is_dataclass(x):
# ret.append(getattr(x, k))
elif isinstance(x, dict):
dt: Union[SupportedTypes, tuple] = x[k]
if isinstance(dt, (ipaddress.IPv4Interface, ipaddress.IPv6Interface)):
dt = (dt.ip.version, dt)
ret.append(dt)
else:
raise Exception('WTF?')
return tuple(ret)
data.sort(key=_key)
def read_flat(self, table: str, where: Optional[WhereParam] = None,
sort: Optional[OrderParam] = None) -> ResultsDict:
if table not in self._columns:
raise Exception(f'Unknown table: {table}')
if table not in self._data:
return []
ret: ResultsDict = []
for row in self._data[table]:
zipped: dict[str, SupportedTypes] = dict(zip(self._columns[table], row))
logging.debug('zipped %s: %s', table, zipped)
if not self._matches_where(zipped, where):
continue
ret.append(zipped)
logging.debug('Read from %s where %s: %s', table, where, ret)
self._sort_results(ret, sort)
return ret
def table_exists(self, table: str) -> bool:
return table in self._columns
def get_seq(self, sequence: str) -> int:
return self._sequences.get(sequence, 1)
def get_seq_next(self, sequence: str) -> int:
if sequence not in self._sequences:
ret = 1
self._sequences[sequence] = 2
else:
ret = self._sequences[sequence]
self._sequences[sequence] += 1
return ret
def insert(self, table: str, values: ValueParam) -> int:
assert not (set(values.keys()) - set(self._columns[table])), \
f'Bad columns in values: {values}, expected: {self._columns[table]}'
logging.debug('FakeDB Insert to %s: %s', table, values)
self._data[table].append([values.get(x, None) for x in self._columns[table]])
return 1
def update(self, table: str, values: ValueParam, where: WhereParam) -> int:
if table not in self._columns:
raise Exception(f'Unknown table: {table}')
if table not in self._data:
return 0
ret = 0
for idx, row in enumerate(self._data[table]):
zipped = dict(zip(self._columns[table], row))
if not self._matches_where(zipped, where):
continue
# The copy ensures that it's a list and not a tuple
new_row = list(row)
for idx2, column in enumerate(self._columns[table]):
if column in values:
new_row[idx2] = values[column]
self._data[table][idx] = new_row
ret += 1
return ret
def delete(self, table: str, where: WhereParam) -> int:
data = []
ret: int = 0
for row in self._data[table]:
zipped = dict(zip(self._columns[table], row))
if self._matches_where(zipped, where):
ret += 1
else:
data.append(row)
self._data[table] = data
return ret
| {
"content_hash": "dc777bcf061f43a713388e2ebbf15b70",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 115,
"avg_line_length": 33.601694915254235,
"alnum_prop": 0.5723833543505674,
"repo_name": "sharhalakis/vdns",
"id": "ca50d19eea2f0e798416ce9bc4394b447154e4b4",
"size": "9115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/vdb/src/_vdb/db_testlib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "646"
},
{
"name": "PLpgSQL",
"bytes": "13029"
},
{
"name": "Python",
"bytes": "278807"
}
],
"symlink_target": ""
} |
'''
author: Ross Kinsella
date: 2014/feb/11
'''
from django.db import models
from django.forms import ModelForm
from django.utils import timezone
from django.contrib.auth.models import User
from autoslug import AutoSlugField
class Society(models.Model):
'''
A master class which Accounts, Communication classes,
Meetings, Minutes and Society members belong to.
'''
name = models.CharField(max_length=50)
members = models.ManyToManyField(User)
member_requests = models.ManyToManyField(User,related_name='join_requests')
creation_date = models.DateTimeField(default=timezone.now, editable=False)
slug = AutoSlugField('slug', populate_from='name', unique=True)
def get_members(self):
return ",\n".join([t.username for t in self.members.all()])
get_members.short_description = 'Members'
def __unicode__(self):
return self.name
class SocietyForm(ModelForm):
class Meta:
model = Society
fields = [ 'name' ] | {
"content_hash": "0169029587e26e6364b390095ca13197",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 76,
"avg_line_length": 27.38235294117647,
"alnum_prop": 0.748657357679914,
"repo_name": "CS2014/USM",
"id": "a994a37b20a19e3ad9a5024881ad3de670be6843",
"size": "931",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "usm/main/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "543131"
},
{
"name": "JavaScript",
"bytes": "2084691"
},
{
"name": "PHP",
"bytes": "172065"
},
{
"name": "Python",
"bytes": "159122"
}
],
"symlink_target": ""
} |
"""Generate metrics for a Github org's pull request status hooks.
Monitoring CI tools, by tracking latency of status on a pull request,
and pending counts.
cat requirements.txt
click
dateparser
c7n
requests
"""
import click
import dateparser
from collections import Counter
from datetime import datetime
from dateutil.parser import parse as parse_date
from dateutil.tz import tzutc
import jmespath
import logging
import requests
from c7n.config import Bag
from c7n.credentials import SessionFactory
from c7n.output import MetricsOutput
from c7n.utils import dumps
log = logging.getLogger('c7nops.cimetrics')
query = """
query($organization: String!) {
organization(login: $organization) {
repositories(first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) {
nodes {
name
updatedAt
pullRequests(states: OPEN, last: 10) {
edges {
node {
commits(last: 1) {
nodes {
commit {
authoredDate
committedDate
pushedDate
status {
contexts {
targetUrl
description
createdAt
context
state
}
state
}
}
}
}
title
updatedAt
}
}
}
}
}
}
}
"""
class RepoMetrics(MetricsOutput):
BUF_SIZE = 1000
dims = None
def _default_dimensions(self):
return self.dims or {}
def _format_metric(self, key, value, unit, dimensions):
d = {
"MetricName": key,
"Timestamp": self.get_timestamp(),
"Value": value,
'Dimensions': [],
"Unit": unit}
dimensions.update(self._default_dimensions())
for k, v in dimensions.items():
d['Dimensions'].append({"Name": k, "Value": v})
return d
def process_commit(c, r, metrics, stats, since, now):
# Find the oldest of the pr commit/dates
# TODO find commit dates
commit_date = parse_date(max(filter(
None, (
c['authoredDate'],
c['committedDate'],
c['pushedDate']))))
if commit_date < since:
return
found = False
for status_ctx in c['status']['contexts']:
if status_ctx['context'] == metrics.dims['Hook']:
status_time = parse_date(status_ctx['createdAt'])
found = True
if found:
tdelta = (status_time - commit_date).total_seconds()
metrics.put_metric('RepoHookLatency', tdelta, 'Seconds')
stats['runtime'] += tdelta
stats['count'] += 1
else:
stats['missing'] += 1
stats['missing_time'] += (now - commit_date).total_seconds()
@click.group('status-metrics')
def cli():
"""github organization repository status hook metrics"""
@cli.command()
@click.option('--organization', envvar="GITHUB_ORG",
required=True, help="Github Organization")
@click.option('--hook-context', envvar="GITHUB_HOOK",
required=True, help="Webhook context name")
@click.option('--github-url', envvar="GITHUB_API_URL",
default='https://api.github.com/graphql')
@click.option('--github-token', envvar='GITHUB_TOKEN',
help="Github credential token")
@click.option('-v', '--verbose', help="Verbose output")
@click.option('-m', '--metrics', help="Publish metrics")
@click.option('--assume', help="Assume a role for publishing metrics")
@click.option('--region', help="Region to target for metrics")
@click.option('--since', help="Look at pull requests/commits younger than",
default="1 week")
def run(organization, hook_context, github_url, github_token,
verbose, metrics=False, since=None, assume=None, region=None):
"""scan org repo status hooks"""
logging.basicConfig(level=logging.DEBUG)
since = dateparser.parse(
since, settings={
'RETURN_AS_TIMEZONE_AWARE': True, 'TO_TIMEZONE': 'UTC'})
headers = {"Authorization": "token {}".format(github_token)}
response = requests.post(
github_url, headers=headers,
json={'query': query, 'variables': {'organization': organization}})
result = response.json()
if response.status_code != 200 or 'errors' in result:
raise Exception(
"Query failed to run by returning code of {}. {}".format(
response.status_code, response.content))
now = datetime.utcnow().replace(tzinfo=tzutc())
stats = Counter()
repo_metrics = RepoMetrics(
Bag(session_factory=SessionFactory(region, assume_role=assume)))
for r in result['data']['organization']['repositories']['nodes']:
commits = jmespath.search(
'pullRequests.edges[].node[].commits[].nodes[].commit[]', r)
if not commits:
continue
log.debug("processing repo: %s prs: %d", r['name'], len(commits))
repo_metrics.dims = {
'Hook': hook_context,
'Repo': '{}/{}'.format(organization, r['name'])}
# Each commit represents a separate pr
for c in commits:
process_commit(c, r, repo_metrics, stats, since, now)
repo_metrics.dims = None
if stats['missing']:
repo_metrics.put_metric(
'RepoHookPending', stats['missing'], 'Count',
Hook=hook_context)
repo_metrics.put_metric(
'RepoHookLatency', stats['missing_time'], 'Seconds',
Hook=hook_context)
if not metrics:
print(dumps(repo_metrics.buf, indent=2))
return
else:
repo_metrics.BUF_SIZE = 20
repo_metrics.flush()
if __name__ == '__main__':
try:
cli()
except Exception:
import traceback, sys, pdb
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[-1])
| {
"content_hash": "89489f989fa83f86180981c74423b65c",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 77,
"avg_line_length": 29.458536585365852,
"alnum_prop": 0.5712866368604074,
"repo_name": "taohungyang/cloud-custodian",
"id": "3879abd3770c84a5eee80c1f689147355615036c",
"size": "6039",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/ops/org-pr-monitor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "517"
},
{
"name": "Go",
"bytes": "131325"
},
{
"name": "HTML",
"bytes": "31"
},
{
"name": "Makefile",
"bytes": "10146"
},
{
"name": "Python",
"bytes": "3444793"
},
{
"name": "Shell",
"bytes": "2294"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import flask_socketio
from myapp.webserver.webapp import webapp
socketio = flask_socketio.SocketIO(webapp)
@socketio.on('connect', namespace='/')
def on_connect():
print('SocketIO connect /')
@socketio.on('disconnect', namespace='/')
def on_disconnect():
print('SocketIO disconnect /')
@socketio.on('connect', namespace='/browser')
def on_connect_browser():
print('SocketIO connect /browser')
@socketio.on('disconnect', namespace='/browser')
def on_disconnect_browser():
print('SocketIO disconnect /browser')
@socketio.on('connect', namespace='/worker')
def on_connect_worker():
print('SocketIO connect /worker')
@socketio.on('disconnect', namespace='/worker')
def on_disconnect_worker():
print('SocketIO disconnect /worker')
| {
"content_hash": "df3a60cd0659478fa16f42ff114d5c8f",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 48,
"avg_line_length": 21.72972972972973,
"alnum_prop": 0.7126865671641791,
"repo_name": "lukeyeager/flask-sqlalchemy-socketio-demo",
"id": "6997383082218f58e1d63a8a24e52eeffbaa58c9",
"size": "804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myapp/webserver/socketio.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "792"
},
{
"name": "Python",
"bytes": "10628"
}
],
"symlink_target": ""
} |
import os
# try/except added for compatibility with python < 3.8
try:
from unittest import mock
from unittest.mock import AsyncMock # pragma: NO COVER
except ImportError: # pragma: NO COVER
import mock
from collections.abc import Iterable
import json
import math
from google.api_core import (
future,
gapic_v1,
grpc_helpers,
grpc_helpers_async,
path_template,
)
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import extended_operation # type: ignore
import google.auth
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
from google.protobuf import json_format
import grpc
from grpc.experimental import aio
from proto.marshal.rules import wrappers
from proto.marshal.rules.dates import DurationRule, TimestampRule
import pytest
from requests import PreparedRequest, Request, Response
from requests.sessions import Session
from google.cloud.compute_v1.services.target_http_proxies import (
TargetHttpProxiesClient,
pagers,
transports,
)
from google.cloud.compute_v1.types import compute
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert TargetHttpProxiesClient._get_default_mtls_endpoint(None) is None
assert (
TargetHttpProxiesClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
TargetHttpProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
TargetHttpProxiesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
TargetHttpProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
TargetHttpProxiesClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class,transport_name",
[
(TargetHttpProxiesClient, "rest"),
],
)
def test_target_http_proxies_client_from_service_account_info(
client_class, transport_name
):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info, transport=transport_name)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == (
"compute.googleapis.com:443"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com"
)
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.TargetHttpProxiesRestTransport, "rest"),
],
)
def test_target_http_proxies_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class,transport_name",
[
(TargetHttpProxiesClient, "rest"),
],
)
def test_target_http_proxies_client_from_service_account_file(
client_class, transport_name
):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == (
"compute.googleapis.com:443"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com"
)
def test_target_http_proxies_client_get_transport_class():
transport = TargetHttpProxiesClient.get_transport_class()
available_transports = [
transports.TargetHttpProxiesRestTransport,
]
assert transport in available_transports
transport = TargetHttpProxiesClient.get_transport_class("rest")
assert transport == transports.TargetHttpProxiesRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest"),
],
)
@mock.patch.object(
TargetHttpProxiesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(TargetHttpProxiesClient),
)
def test_target_http_proxies_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(TargetHttpProxiesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(TargetHttpProxiesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case api_endpoint is provided
options = client_options.ClientOptions(
api_audience="https://language.googleapis.com"
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience="https://language.googleapis.com",
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
TargetHttpProxiesClient,
transports.TargetHttpProxiesRestTransport,
"rest",
"true",
),
(
TargetHttpProxiesClient,
transports.TargetHttpProxiesRestTransport,
"rest",
"false",
),
],
)
@mock.patch.object(
TargetHttpProxiesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(TargetHttpProxiesClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_target_http_proxies_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
@pytest.mark.parametrize("client_class", [TargetHttpProxiesClient])
@mock.patch.object(
TargetHttpProxiesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(TargetHttpProxiesClient),
)
def test_target_http_proxies_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest"),
],
)
def test_target_http_proxies_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
TargetHttpProxiesClient,
transports.TargetHttpProxiesRestTransport,
"rest",
None,
),
],
)
def test_target_http_proxies_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
@pytest.mark.parametrize(
"request_type",
[
compute.AggregatedListTargetHttpProxiesRequest,
dict,
],
)
def test_aggregated_list_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxyAggregatedList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
unreachables=["unreachables_value"],
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.AggregatedListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.unreachables == ["unreachables_value"]
def test_aggregated_list_rest_required_fields(
request_type=compute.AggregatedListTargetHttpProxiesRequest,
):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).aggregated_list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).aggregated_list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
(
"filter",
"include_all_scopes",
"max_results",
"order_by",
"page_token",
"return_partial_success",
)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxyAggregatedList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": pb_request,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_aggregated_list_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.aggregated_list._get_unset_required_fields({})
assert set(unset_fields) == (
set(
(
"filter",
"includeAllScopes",
"maxResults",
"orderBy",
"pageToken",
"returnPartialSuccess",
)
)
& set(("project",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_aggregated_list_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_aggregated_list"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_aggregated_list"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.AggregatedListTargetHttpProxiesRequest.pb(
compute.AggregatedListTargetHttpProxiesRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.TargetHttpProxyAggregatedList.to_json(
compute.TargetHttpProxyAggregatedList()
)
request = compute.AggregatedListTargetHttpProxiesRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.TargetHttpProxyAggregatedList()
client.aggregated_list(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_aggregated_list_rest_bad_request(
transport: str = "rest", request_type=compute.AggregatedListTargetHttpProxiesRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.aggregated_list(request)
def test_aggregated_list_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxyAggregatedList()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.aggregated_list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/aggregated/targetHttpProxies"
% client.transport._host,
args[1],
)
def test_aggregated_list_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.aggregated_list(
compute.AggregatedListTargetHttpProxiesRequest(),
project="project_value",
)
def test_aggregated_list_rest_pager(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.TargetHttpProxyAggregatedList(
items={
"a": compute.TargetHttpProxiesScopedList(),
"b": compute.TargetHttpProxiesScopedList(),
"c": compute.TargetHttpProxiesScopedList(),
},
next_page_token="abc",
),
compute.TargetHttpProxyAggregatedList(
items={},
next_page_token="def",
),
compute.TargetHttpProxyAggregatedList(
items={
"g": compute.TargetHttpProxiesScopedList(),
},
next_page_token="ghi",
),
compute.TargetHttpProxyAggregatedList(
items={
"h": compute.TargetHttpProxiesScopedList(),
"i": compute.TargetHttpProxiesScopedList(),
},
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(
compute.TargetHttpProxyAggregatedList.to_json(x) for x in response
)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
pager = client.aggregated_list(request=sample_request)
assert isinstance(pager.get("a"), compute.TargetHttpProxiesScopedList)
assert pager.get("h") is None
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, tuple) for i in results)
for result in results:
assert isinstance(result, tuple)
assert tuple(type(t) for t in result) == (
str,
compute.TargetHttpProxiesScopedList,
)
assert pager.get("a") is None
assert isinstance(pager.get("h"), compute.TargetHttpProxiesScopedList)
pages = list(client.aggregated_list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
compute.DeleteTargetHttpProxyRequest,
dict,
],
)
def test_delete_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete(request)
# Establish that the response is the type that we expect.
assert isinstance(response, extended_operation.ExtendedOperation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_rest_required_fields(request_type=compute.DeleteTargetHttpProxyRequest):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "delete",
"query_params": pb_request,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxy",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_delete_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_delete"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_delete"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.DeleteTargetHttpProxyRequest.pb(
compute.DeleteTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.DeleteTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.delete(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_delete_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete(request)
def test_delete_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.delete(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}"
% client.transport._host,
args[1],
)
def test_delete_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete(
compute.DeleteTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
)
def test_delete_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.DeleteTargetHttpProxyRequest,
dict,
],
)
def test_delete_unary_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
def test_delete_unary_rest_required_fields(
request_type=compute.DeleteTargetHttpProxyRequest,
):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "delete",
"query_params": pb_request,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_unary_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxy",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_delete_unary_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_delete"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_delete"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.DeleteTargetHttpProxyRequest.pb(
compute.DeleteTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.DeleteTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.delete_unary(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_delete_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_unary(request)
def test_delete_unary_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.delete_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}"
% client.transport._host,
args[1],
)
def test_delete_unary_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_unary(
compute.DeleteTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
)
def test_delete_unary_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.GetTargetHttpProxyRequest,
dict,
],
)
def test_get_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value",
description="description_value",
fingerprint="fingerprint_value",
id=205,
kind="kind_value",
name="name_value",
proxy_bind=True,
region="region_value",
self_link="self_link_value",
url_map="url_map_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxy.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.TargetHttpProxy)
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.fingerprint == "fingerprint_value"
assert response.id == 205
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.proxy_bind is True
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.url_map == "url_map_value"
def test_get_rest_required_fields(request_type=compute.GetTargetHttpProxyRequest):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxy()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": pb_request,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxy.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get._get_unset_required_fields({})
assert set(unset_fields) == (
set(())
& set(
(
"project",
"targetHttpProxy",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_get_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_get"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_get"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.GetTargetHttpProxyRequest.pb(
compute.GetTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.TargetHttpProxy.to_json(
compute.TargetHttpProxy()
)
request = compute.GetTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.TargetHttpProxy()
client.get(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxy()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxy.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}"
% client.transport._host,
args[1],
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
)
def test_get_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.InsertTargetHttpProxyRequest,
dict,
],
)
def test_insert_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert(request)
# Establish that the response is the type that we expect.
assert isinstance(response, extended_operation.ExtendedOperation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_insert_rest_required_fields(request_type=compute.InsertTargetHttpProxyRequest):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": pb_request,
}
transcode_result["body"] = pb_request
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_insert_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.insert._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxyResource",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_insert_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_insert"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_insert"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.InsertTargetHttpProxyRequest.pb(
compute.InsertTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.InsertTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.insert(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_insert_rest_bad_request(
transport: str = "rest", request_type=compute.InsertTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.insert(request)
def test_insert_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.insert(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies"
% client.transport._host,
args[1],
)
def test_insert_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert(
compute.InsertTargetHttpProxyRequest(),
project="project_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
def test_insert_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.InsertTargetHttpProxyRequest,
dict,
],
)
def test_insert_unary_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
def test_insert_unary_rest_required_fields(
request_type=compute.InsertTargetHttpProxyRequest,
):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": pb_request,
}
transcode_result["body"] = pb_request
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_insert_unary_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.insert._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxyResource",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_insert_unary_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_insert"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_insert"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.InsertTargetHttpProxyRequest.pb(
compute.InsertTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.InsertTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.insert_unary(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_insert_unary_rest_bad_request(
transport: str = "rest", request_type=compute.InsertTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.insert_unary(request)
def test_insert_unary_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.insert_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies"
% client.transport._host,
args[1],
)
def test_insert_unary_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert_unary(
compute.InsertTargetHttpProxyRequest(),
project="project_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
def test_insert_unary_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.ListTargetHttpProxiesRequest,
dict,
],
)
def test_list_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxyList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxyList.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_rest_required_fields(request_type=compute.ListTargetHttpProxiesRequest):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
(
"filter",
"max_results",
"order_by",
"page_token",
"return_partial_success",
)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxyList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": pb_request,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxyList.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list._get_unset_required_fields({})
assert set(unset_fields) == (
set(
(
"filter",
"maxResults",
"orderBy",
"pageToken",
"returnPartialSuccess",
)
)
& set(("project",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_list_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_list"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_list"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.ListTargetHttpProxiesRequest.pb(
compute.ListTargetHttpProxiesRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.TargetHttpProxyList.to_json(
compute.TargetHttpProxyList()
)
request = compute.ListTargetHttpProxiesRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.TargetHttpProxyList()
client.list(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_list_rest_bad_request(
transport: str = "rest", request_type=compute.ListTargetHttpProxiesRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list(request)
def test_list_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TargetHttpProxyList()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.TargetHttpProxyList.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies"
% client.transport._host,
args[1],
)
def test_list_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListTargetHttpProxiesRequest(),
project="project_value",
)
def test_list_rest_pager(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.TargetHttpProxyList(
items=[
compute.TargetHttpProxy(),
compute.TargetHttpProxy(),
compute.TargetHttpProxy(),
],
next_page_token="abc",
),
compute.TargetHttpProxyList(
items=[],
next_page_token="def",
),
compute.TargetHttpProxyList(
items=[
compute.TargetHttpProxy(),
],
next_page_token="ghi",
),
compute.TargetHttpProxyList(
items=[
compute.TargetHttpProxy(),
compute.TargetHttpProxy(),
],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.TargetHttpProxyList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
pager = client.list(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.TargetHttpProxy) for i in results)
pages = list(client.list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
compute.PatchTargetHttpProxyRequest,
dict,
],
)
def test_patch_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch(request)
# Establish that the response is the type that we expect.
assert isinstance(response, extended_operation.ExtendedOperation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpProxyRequest):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": pb_request,
}
transcode_result["body"] = pb_request
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_patch_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.patch._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxy",
"targetHttpProxyResource",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_patch_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_patch"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_patch"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.PatchTargetHttpProxyRequest.pb(
compute.PatchTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.PatchTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.patch(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_patch_rest_bad_request(
transport: str = "rest", request_type=compute.PatchTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.patch(request)
def test_patch_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.patch(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}"
% client.transport._host,
args[1],
)
def test_patch_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.patch(
compute.PatchTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
def test_patch_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.PatchTargetHttpProxyRequest,
dict,
],
)
def test_patch_unary_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
def test_patch_unary_rest_required_fields(
request_type=compute.PatchTargetHttpProxyRequest,
):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": pb_request,
}
transcode_result["body"] = pb_request
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_patch_unary_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.patch._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxy",
"targetHttpProxyResource",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_patch_unary_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_patch"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_patch"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.PatchTargetHttpProxyRequest.pb(
compute.PatchTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.PatchTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.patch_unary(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_patch_unary_rest_bad_request(
transport: str = "rest", request_type=compute.PatchTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["target_http_proxy_resource"] = {
"creation_timestamp": "creation_timestamp_value",
"description": "description_value",
"fingerprint": "fingerprint_value",
"id": 205,
"kind": "kind_value",
"name": "name_value",
"proxy_bind": True,
"region": "region_value",
"self_link": "self_link_value",
"url_map": "url_map_value",
}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.patch_unary(request)
def test_patch_unary_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.patch_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}"
% client.transport._host,
args[1],
)
def test_patch_unary_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.patch_unary(
compute.PatchTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
target_http_proxy_resource=compute.TargetHttpProxy(
creation_timestamp="creation_timestamp_value"
),
)
def test_patch_unary_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.SetUrlMapTargetHttpProxyRequest,
dict,
],
)
def test_set_url_map_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["url_map_reference_resource"] = {"url_map": "url_map_value"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_url_map(request)
# Establish that the response is the type that we expect.
assert isinstance(response, extended_operation.ExtendedOperation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_url_map_rest_required_fields(
request_type=compute.SetUrlMapTargetHttpProxyRequest,
):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_url_map._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_url_map._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": pb_request,
}
transcode_result["body"] = pb_request
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_url_map(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_url_map_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_url_map._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxy",
"urlMapReferenceResource",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_set_url_map_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_set_url_map"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_set_url_map"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.SetUrlMapTargetHttpProxyRequest.pb(
compute.SetUrlMapTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.SetUrlMapTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.set_url_map(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_set_url_map_rest_bad_request(
transport: str = "rest", request_type=compute.SetUrlMapTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["url_map_reference_resource"] = {"url_map": "url_map_value"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_url_map(request)
def test_set_url_map_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.set_url_map(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap"
% client.transport._host,
args[1],
)
def test_set_url_map_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_url_map(
compute.SetUrlMapTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"),
)
def test_set_url_map_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type",
[
compute.SetUrlMapTargetHttpProxyRequest,
dict,
],
)
def test_set_url_map_unary_rest(request_type):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["url_map_reference_resource"] = {"url_map": "url_map_value"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_url_map_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
def test_set_url_map_unary_rest_required_fields(
request_type=compute.SetUrlMapTargetHttpProxyRequest,
):
transport_class = transports.TargetHttpProxiesRestTransport
request_init = {}
request_init["project"] = ""
request_init["target_http_proxy"] = ""
request = request_type(**request_init)
pb_request = request_type.pb(request)
jsonified_request = json.loads(
json_format.MessageToJson(
pb_request,
including_default_value_fields=False,
use_integers_for_enums=False,
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_url_map._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["targetHttpProxy"] = "target_http_proxy_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_url_map._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "targetHttpProxy" in jsonified_request
assert jsonified_request["targetHttpProxy"] == "target_http_proxy_value"
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
request = request_type(**request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
pb_request = request_type.pb(request)
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": pb_request,
}
transcode_result["body"] = pb_request
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_url_map_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_url_map_unary_rest_unset_required_fields():
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_url_map._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"project",
"targetHttpProxy",
"urlMapReferenceResource",
)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_set_url_map_unary_rest_interceptors(null_interceptor):
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.TargetHttpProxiesRestInterceptor(),
)
client = TargetHttpProxiesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "post_set_url_map"
) as post, mock.patch.object(
transports.TargetHttpProxiesRestInterceptor, "pre_set_url_map"
) as pre:
pre.assert_not_called()
post.assert_not_called()
pb_message = compute.SetUrlMapTargetHttpProxyRequest.pb(
compute.SetUrlMapTargetHttpProxyRequest()
)
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": pb_message,
"query_params": pb_message,
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.SetUrlMapTargetHttpProxyRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation()
client.set_url_map_unary(
request,
metadata=[
("key", "val"),
("cephalopod", "squid"),
],
)
pre.assert_called_once()
post.assert_called_once()
def test_set_url_map_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetUrlMapTargetHttpProxyRequest
):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "target_http_proxy": "sample2"}
request_init["url_map_reference_resource"] = {"url_map": "url_map_value"}
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_url_map_unary(request)
def test_set_url_map_unary_rest_flattened():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "target_http_proxy": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
target_http_proxy="target_http_proxy_value",
url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
pb_return_value = compute.Operation.pb(return_value)
json_return_value = json_format.MessageToJson(pb_return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.set_url_map_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap"
% client.transport._host,
args[1],
)
def test_set_url_map_unary_rest_flattened_error(transport: str = "rest"):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_url_map_unary(
compute.SetUrlMapTargetHttpProxyRequest(),
project="project_value",
target_http_proxy="target_http_proxy_value",
url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"),
)
def test_set_url_map_unary_rest_error():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = TargetHttpProxiesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = TargetHttpProxiesClient(
client_options=options,
transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = TargetHttpProxiesClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = TargetHttpProxiesClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.TargetHttpProxiesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = TargetHttpProxiesClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize(
"transport_class",
[
transports.TargetHttpProxiesRestTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
@pytest.mark.parametrize(
"transport_name",
[
"rest",
],
)
def test_transport_kind(transport_name):
transport = TargetHttpProxiesClient.get_transport_class(transport_name)(
credentials=ga_credentials.AnonymousCredentials(),
)
assert transport.kind == transport_name
def test_target_http_proxies_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.TargetHttpProxiesTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_target_http_proxies_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.TargetHttpProxiesTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"aggregated_list",
"delete",
"get",
"insert",
"list",
"patch",
"set_url_map",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Catch all for all remaining methods and properties
remainder = [
"kind",
]
for r in remainder:
with pytest.raises(NotImplementedError):
getattr(transport, r)()
def test_target_http_proxies_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.TargetHttpProxiesTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_target_http_proxies_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.TargetHttpProxiesTransport()
adc.assert_called_once()
def test_target_http_proxies_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
TargetHttpProxiesClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_target_http_proxies_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.TargetHttpProxiesRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
@pytest.mark.parametrize(
"transport_name",
[
"rest",
],
)
def test_target_http_proxies_host_no_port(transport_name):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
transport=transport_name,
)
assert client.transport._host == (
"compute.googleapis.com:443"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com"
)
@pytest.mark.parametrize(
"transport_name",
[
"rest",
],
)
def test_target_http_proxies_host_with_port(transport_name):
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
transport=transport_name,
)
assert client.transport._host == (
"compute.googleapis.com:8000"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com:8000"
)
@pytest.mark.parametrize(
"transport_name",
[
"rest",
],
)
def test_target_http_proxies_client_transport_session_collision(transport_name):
creds1 = ga_credentials.AnonymousCredentials()
creds2 = ga_credentials.AnonymousCredentials()
client1 = TargetHttpProxiesClient(
credentials=creds1,
transport=transport_name,
)
client2 = TargetHttpProxiesClient(
credentials=creds2,
transport=transport_name,
)
session1 = client1.transport.aggregated_list._session
session2 = client2.transport.aggregated_list._session
assert session1 != session2
session1 = client1.transport.delete._session
session2 = client2.transport.delete._session
assert session1 != session2
session1 = client1.transport.get._session
session2 = client2.transport.get._session
assert session1 != session2
session1 = client1.transport.insert._session
session2 = client2.transport.insert._session
assert session1 != session2
session1 = client1.transport.list._session
session2 = client2.transport.list._session
assert session1 != session2
session1 = client1.transport.patch._session
session2 = client2.transport.patch._session
assert session1 != session2
session1 = client1.transport.set_url_map._session
session2 = client2.transport.set_url_map._session
assert session1 != session2
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = TargetHttpProxiesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = TargetHttpProxiesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = TargetHttpProxiesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(
folder=folder,
)
actual = TargetHttpProxiesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = TargetHttpProxiesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = TargetHttpProxiesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(
organization=organization,
)
actual = TargetHttpProxiesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = TargetHttpProxiesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = TargetHttpProxiesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(
project=project,
)
actual = TargetHttpProxiesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = TargetHttpProxiesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = TargetHttpProxiesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
actual = TargetHttpProxiesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = TargetHttpProxiesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = TargetHttpProxiesClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.TargetHttpProxiesTransport, "_prep_wrapped_messages"
) as prep:
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.TargetHttpProxiesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = TargetHttpProxiesClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = TargetHttpProxiesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
api_audience=None,
)
| {
"content_hash": "4d9b16834e896097b6e2b0cfb437b0be",
"timestamp": "",
"source": "github",
"line_count": 4667,
"max_line_length": 123,
"avg_line_length": 36.26226698092994,
"alnum_prop": 0.6369094046184027,
"repo_name": "googleapis/python-compute",
"id": "313b38fe1deee0ed25fab5bf155fcc9533fcc115",
"size": "169836",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/gapic/compute_v1/test_target_http_proxies.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "32681847"
},
{
"name": "Shell",
"bytes": "30663"
}
],
"symlink_target": ""
} |
""" manage PyTables query interface via Expressions """
import ast
from functools import partial
from typing import Any, Dict, Optional, Tuple
import numpy as np
from pandas._libs.tslibs import Timedelta, Timestamp
from pandas.compat.chainmap import DeepChainMap
from pandas.core.dtypes.common import is_list_like
import pandas as pd
import pandas.core.common as com
from pandas.core.computation import expr, ops, scope as _scope
from pandas.core.computation.common import ensure_decoded
from pandas.core.computation.expr import BaseExprVisitor
from pandas.core.computation.ops import UndefinedVariableError, is_term
from pandas.core.construction import extract_array
from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded
class PyTablesScope(_scope.Scope):
__slots__ = ("queryables",)
queryables: Dict[str, Any]
def __init__(
self,
level: int,
global_dict=None,
local_dict=None,
queryables: Optional[Dict[str, Any]] = None,
):
super().__init__(level + 1, global_dict=global_dict, local_dict=local_dict)
self.queryables = queryables or {}
class Term(ops.Term):
env: PyTablesScope
def __new__(cls, name, env, side=None, encoding=None):
if isinstance(name, str):
klass = cls
else:
klass = Constant
return object.__new__(klass)
def __init__(self, name, env: PyTablesScope, side=None, encoding=None):
super().__init__(name, env, side=side, encoding=encoding)
def _resolve_name(self):
# must be a queryables
if self.side == "left":
# Note: The behavior of __new__ ensures that self.name is a str here
if self.name not in self.env.queryables:
raise NameError(f"name {repr(self.name)} is not defined")
return self.name
# resolve the rhs (and allow it to be None)
try:
return self.env.resolve(self.name, is_local=False)
except UndefinedVariableError:
return self.name
# read-only property overwriting read/write property
@property # type: ignore[misc]
def value(self):
return self._value
class Constant(Term):
def __init__(self, value, env: PyTablesScope, side=None, encoding=None):
assert isinstance(env, PyTablesScope), type(env)
super().__init__(value, env, side=side, encoding=encoding)
def _resolve_name(self):
return self._name
class BinOp(ops.BinOp):
_max_selectors = 31
op: str
queryables: Dict[str, Any]
condition: Optional[str]
def __init__(self, op: str, lhs, rhs, queryables: Dict[str, Any], encoding):
super().__init__(op, lhs, rhs)
self.queryables = queryables
self.encoding = encoding
self.condition = None
def _disallow_scalar_only_bool_ops(self):
pass
def prune(self, klass):
def pr(left, right):
""" create and return a new specialized BinOp from myself """
if left is None:
return right
elif right is None:
return left
k = klass
if isinstance(left, ConditionBinOp):
if isinstance(right, ConditionBinOp):
k = JointConditionBinOp
elif isinstance(left, k):
return left
elif isinstance(right, k):
return right
elif isinstance(left, FilterBinOp):
if isinstance(right, FilterBinOp):
k = JointFilterBinOp
elif isinstance(left, k):
return left
elif isinstance(right, k):
return right
return k(
self.op, left, right, queryables=self.queryables, encoding=self.encoding
).evaluate()
left, right = self.lhs, self.rhs
if is_term(left) and is_term(right):
res = pr(left.value, right.value)
elif not is_term(left) and is_term(right):
res = pr(left.prune(klass), right.value)
elif is_term(left) and not is_term(right):
res = pr(left.value, right.prune(klass))
elif not (is_term(left) or is_term(right)):
res = pr(left.prune(klass), right.prune(klass))
return res
def conform(self, rhs):
""" inplace conform rhs """
if not is_list_like(rhs):
rhs = [rhs]
if isinstance(rhs, np.ndarray):
rhs = rhs.ravel()
return rhs
@property
def is_valid(self) -> bool:
""" return True if this is a valid field """
return self.lhs in self.queryables
@property
def is_in_table(self) -> bool:
"""
return True if this is a valid column name for generation (e.g. an
actual column in the table)
"""
return self.queryables.get(self.lhs) is not None
@property
def kind(self):
""" the kind of my field """
return getattr(self.queryables.get(self.lhs), "kind", None)
@property
def meta(self):
""" the meta of my field """
return getattr(self.queryables.get(self.lhs), "meta", None)
@property
def metadata(self):
""" the metadata of my field """
return getattr(self.queryables.get(self.lhs), "metadata", None)
def generate(self, v) -> str:
""" create and return the op string for this TermValue """
val = v.tostring(self.encoding)
return f"({self.lhs} {self.op} {val})"
def convert_value(self, v) -> "TermValue":
"""
convert the expression that is in the term to something that is
accepted by pytables
"""
def stringify(value):
if self.encoding is not None:
return pprint_thing_encoded(value, encoding=self.encoding)
return pprint_thing(value)
kind = ensure_decoded(self.kind)
meta = ensure_decoded(self.meta)
if kind == "datetime64" or kind == "datetime":
if isinstance(v, (int, float)):
v = stringify(v)
v = ensure_decoded(v)
v = Timestamp(v)
if v.tz is not None:
v = v.tz_convert("UTC")
return TermValue(v, v.value, kind)
elif kind == "timedelta64" or kind == "timedelta":
if isinstance(v, str):
v = Timedelta(v).value
else:
v = Timedelta(v, unit="s").value
return TermValue(int(v), v, kind)
elif meta == "category":
metadata = extract_array(self.metadata, extract_numpy=True)
result = metadata.searchsorted(v, side="left")
# result returns 0 if v is first element or if v is not in metadata
# check that metadata contains v
if not result and v not in metadata:
result = -1
return TermValue(result, result, "integer")
elif kind == "integer":
v = int(float(v))
return TermValue(v, v, kind)
elif kind == "float":
v = float(v)
return TermValue(v, v, kind)
elif kind == "bool":
if isinstance(v, str):
v = not v.strip().lower() in [
"false",
"f",
"no",
"n",
"none",
"0",
"[]",
"{}",
"",
]
else:
v = bool(v)
return TermValue(v, v, kind)
elif isinstance(v, str):
# string quoting
return TermValue(v, stringify(v), "string")
else:
raise TypeError(f"Cannot compare {v} of type {type(v)} to {kind} column")
def convert_values(self):
pass
class FilterBinOp(BinOp):
filter: Optional[Tuple[Any, Any, pd.Index]] = None
def __repr__(self) -> str:
if self.filter is None:
return "Filter: Not Initialized"
return pprint_thing(f"[Filter : [{self.filter[0]}] -> [{self.filter[1]}]")
def invert(self):
""" invert the filter """
if self.filter is not None:
self.filter = (
self.filter[0],
self.generate_filter_op(invert=True),
self.filter[2],
)
return self
def format(self):
""" return the actual filter format """
return [self.filter]
def evaluate(self):
if not self.is_valid:
raise ValueError(f"query term is not valid [{self}]")
rhs = self.conform(self.rhs)
values = list(rhs)
if self.is_in_table:
# if too many values to create the expression, use a filter instead
if self.op in ["==", "!="] and len(values) > self._max_selectors:
filter_op = self.generate_filter_op()
self.filter = (self.lhs, filter_op, pd.Index(values))
return self
return None
# equality conditions
if self.op in ["==", "!="]:
filter_op = self.generate_filter_op()
self.filter = (self.lhs, filter_op, pd.Index(values))
else:
raise TypeError(
f"passing a filterable condition to a non-table indexer [{self}]"
)
return self
def generate_filter_op(self, invert: bool = False):
if (self.op == "!=" and not invert) or (self.op == "==" and invert):
return lambda axis, vals: ~axis.isin(vals)
else:
return lambda axis, vals: axis.isin(vals)
class JointFilterBinOp(FilterBinOp):
def format(self):
raise NotImplementedError("unable to collapse Joint Filters")
def evaluate(self):
return self
class ConditionBinOp(BinOp):
def __repr__(self) -> str:
return pprint_thing(f"[Condition : [{self.condition}]]")
def invert(self):
""" invert the condition """
# if self.condition is not None:
# self.condition = "~(%s)" % self.condition
# return self
raise NotImplementedError(
"cannot use an invert condition when passing to numexpr"
)
def format(self):
""" return the actual ne format """
return self.condition
def evaluate(self):
if not self.is_valid:
raise ValueError(f"query term is not valid [{self}]")
# convert values if we are in the table
if not self.is_in_table:
return None
rhs = self.conform(self.rhs)
values = [self.convert_value(v) for v in rhs]
# equality conditions
if self.op in ["==", "!="]:
# too many values to create the expression?
if len(values) <= self._max_selectors:
vs = [self.generate(v) for v in values]
self.condition = f"({' | '.join(vs)})"
# use a filter after reading
else:
return None
else:
self.condition = self.generate(values[0])
return self
class JointConditionBinOp(ConditionBinOp):
def evaluate(self):
self.condition = f"({self.lhs.condition} {self.op} {self.rhs.condition})"
return self
class UnaryOp(ops.UnaryOp):
def prune(self, klass):
if self.op != "~":
raise NotImplementedError("UnaryOp only support invert type ops")
operand = self.operand
operand = operand.prune(klass)
if operand is not None and (
issubclass(klass, ConditionBinOp)
and operand.condition is not None
or not issubclass(klass, ConditionBinOp)
and issubclass(klass, FilterBinOp)
and operand.filter is not None
):
return operand.invert()
return None
class PyTablesExprVisitor(BaseExprVisitor):
const_type = Constant
term_type = Term
def __init__(self, env, engine, parser, **kwargs):
super().__init__(env, engine, parser)
for bin_op in self.binary_ops:
bin_node = self.binary_op_nodes_map[bin_op]
setattr(
self,
f"visit_{bin_node}",
lambda node, bin_op=bin_op: partial(BinOp, bin_op, **kwargs),
)
def visit_UnaryOp(self, node, **kwargs):
if isinstance(node.op, (ast.Not, ast.Invert)):
return UnaryOp("~", self.visit(node.operand))
elif isinstance(node.op, ast.USub):
return self.const_type(-self.visit(node.operand).value, self.env)
elif isinstance(node.op, ast.UAdd):
raise NotImplementedError("Unary addition not supported")
def visit_Index(self, node, **kwargs):
return self.visit(node.value).value
def visit_Assign(self, node, **kwargs):
cmpr = ast.Compare(
ops=[ast.Eq()], left=node.targets[0], comparators=[node.value]
)
return self.visit(cmpr)
def visit_Subscript(self, node, **kwargs):
# only allow simple subscripts
value = self.visit(node.value)
slobj = self.visit(node.slice)
try:
value = value.value
except AttributeError:
pass
if isinstance(slobj, Term):
# In py39 np.ndarray lookups with Term containing int raise
slobj = slobj.value
try:
return self.const_type(value[slobj], self.env)
except TypeError as err:
raise ValueError(
f"cannot subscript {repr(value)} with {repr(slobj)}"
) from err
def visit_Attribute(self, node, **kwargs):
attr = node.attr
value = node.value
ctx = type(node.ctx)
if ctx == ast.Load:
# resolve the value
resolved = self.visit(value)
# try to get the value to see if we are another expression
try:
resolved = resolved.value
except (AttributeError):
pass
try:
return self.term_type(getattr(resolved, attr), self.env)
except AttributeError:
# something like datetime.datetime where scope is overridden
if isinstance(value, ast.Name) and value.id == attr:
return resolved
raise ValueError(f"Invalid Attribute context {ctx.__name__}")
def translate_In(self, op):
return ast.Eq() if isinstance(op, ast.In) else op
def _rewrite_membership_op(self, node, left, right):
return self.visit(node.op), node.op, left, right
def _validate_where(w):
"""
Validate that the where statement is of the right type.
The type may either be String, Expr, or list-like of Exprs.
Parameters
----------
w : String term expression, Expr, or list-like of Exprs.
Returns
-------
where : The original where clause if the check was successful.
Raises
------
TypeError : An invalid data type was passed in for w (e.g. dict).
"""
if not (isinstance(w, (PyTablesExpr, str)) or is_list_like(w)):
raise TypeError(
"where must be passed as a string, PyTablesExpr, "
"or list-like of PyTablesExpr"
)
return w
class PyTablesExpr(expr.Expr):
"""
Hold a pytables-like expression, comprised of possibly multiple 'terms'.
Parameters
----------
where : string term expression, PyTablesExpr, or list-like of PyTablesExprs
queryables : a "kinds" map (dict of column name -> kind), or None if column
is non-indexable
encoding : an encoding that will encode the query terms
Returns
-------
a PyTablesExpr object
Examples
--------
'index>=date'
"columns=['A', 'D']"
'columns=A'
'columns==A'
"~(columns=['A','B'])"
'index>df.index[3] & string="bar"'
'(index>df.index[3] & index<=df.index[6]) | string="bar"'
"ts>=Timestamp('2012-02-01')"
"major_axis>=20130101"
"""
_visitor: Optional[PyTablesExprVisitor]
env: PyTablesScope
def __init__(
self,
where,
queryables: Optional[Dict[str, Any]] = None,
encoding=None,
scope_level: int = 0,
):
where = _validate_where(where)
self.encoding = encoding
self.condition = None
self.filter = None
self.terms = None
self._visitor = None
# capture the environment if needed
local_dict: DeepChainMap[Any, Any] = DeepChainMap()
if isinstance(where, PyTablesExpr):
local_dict = where.env.scope
_where = where.expr
elif isinstance(where, (list, tuple)):
where = list(where)
for idx, w in enumerate(where):
if isinstance(w, PyTablesExpr):
local_dict = w.env.scope
else:
w = _validate_where(w)
where[idx] = w
_where = " & ".join(f"({w})" for w in com.flatten(where))
else:
_where = where
self.expr = _where
self.env = PyTablesScope(scope_level + 1, local_dict=local_dict)
if queryables is not None and isinstance(self.expr, str):
self.env.queryables.update(queryables)
self._visitor = PyTablesExprVisitor(
self.env,
queryables=queryables,
parser="pytables",
engine="pytables",
encoding=encoding,
)
self.terms = self.parse()
def __repr__(self) -> str:
if self.terms is not None:
return pprint_thing(self.terms)
return pprint_thing(self.expr)
def evaluate(self):
""" create and return the numexpr condition and filter """
try:
self.condition = self.terms.prune(ConditionBinOp)
except AttributeError as err:
raise ValueError(
f"cannot process expression [{self.expr}], [{self}] "
"is not a valid condition"
) from err
try:
self.filter = self.terms.prune(FilterBinOp)
except AttributeError as err:
raise ValueError(
f"cannot process expression [{self.expr}], [{self}] "
"is not a valid filter"
) from err
return self.condition, self.filter
class TermValue:
""" hold a term value the we use to construct a condition/filter """
def __init__(self, value, converted, kind: str):
assert isinstance(kind, str), kind
self.value = value
self.converted = converted
self.kind = kind
def tostring(self, encoding) -> str:
""" quote the string if not encoded else encode and return """
if self.kind == "string":
if encoding is not None:
return str(self.converted)
return f'"{self.converted}"'
elif self.kind == "float":
# python 2 str(float) is not always
# round-trippable so use repr()
return repr(self.converted)
return str(self.converted)
def maybe_expression(s) -> bool:
""" loose checking if s is a pytables-acceptable expression """
if not isinstance(s, str):
return False
ops = PyTablesExprVisitor.binary_ops + PyTablesExprVisitor.unary_ops + ("=",)
# make sure we have an op at least
return any(op in s for op in ops)
| {
"content_hash": "83d19c625273d8332dd694baa64b24e8",
"timestamp": "",
"source": "github",
"line_count": 637,
"max_line_length": 88,
"avg_line_length": 30.72056514913658,
"alnum_prop": 0.5570034237825131,
"repo_name": "jreback/pandas",
"id": "b8198866878171c2b175bc527c77b36228150dac",
"size": "19569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/core/computation/pytables.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4879"
},
{
"name": "C",
"bytes": "406353"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "529"
},
{
"name": "Python",
"bytes": "14930989"
},
{
"name": "Shell",
"bytes": "29317"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
} |
setup_args = {
"name": "whisper-backup",
"version": "0.1.0",
"platforms": ["any"],
"description": "Backup whisper DB files into S3 or Swift",
"long_description": """\
Whisper-backup stores compressed WSP files in Amazon S3 or OpenStack Swift
from a Graphite setup. It can backup and restore selected metric globs,
has a retention policy setting, and does not stage backups on the local
server.
""",
"author": "Jack Neely",
"author_email": "jjneely@42lines.net",
"maintainer": "Jack Neely",
"maintainer_email": "jjneely@42lines.net",
"url": 'https://github.com/jjneely/whisper-backup',
"license": "Apache Software License",
"packages": ["whisperbackup"],
"install_requires": ['lockfile', 'whisper'],
"classifiers": [
"Development Status :: 4 - Beta",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: System :: Systems Administration"
],
"entry_points": {
"console_scripts": [
"whisper-backup = whisperbackup.whisperbackup:main"
]
}
}
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(**setup_args)
| {
"content_hash": "b21a2ebab8b9759ed58069677f80ed45",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 33.3,
"alnum_prop": 0.6381381381381381,
"repo_name": "lv0/whisper-backup",
"id": "83f53ec3a857e1a5947c5a0f84577adc741db97c",
"size": "1987",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "38683"
}
],
"symlink_target": ""
} |
from typing import Optional, List
import google.auth.transport.requests
from google.cloud import bigquery
class BackendBQClient:
BUILD_DATETIMES_QUERY = """
SELECT DISTINCT dlb.dt_build_datetime
FROM `etl_log.vw_data_lineage_build` AS dlb
ORDER BY dlb.dt_build_datetime DESC
"""
DL_BUILD_ID_QUERY = """
SELECT data_lineage_build_id
FROM `etl_log.vw_data_lineage_build` AS dlb
WHERE dlb.dt_build_datetime = DATETIME ('{datetime}')
"""
DOMAINS_QUERY = """
SELECT DISTINCT dlo.subject_area
FROM `etl_log.vw_data_lineage_object` AS dlo
WHERE dlo.data_lineage_build_id = '{build_id}'
ORDER BY dlo.subject_area ASC
"""
JOB_BUILD_IDS_QUERY = """
SELECT DISTINCT target_table_name
FROM `etl_log.vw_data_lineage_object` AS dlo
WHERE dlo.data_lineage_build_id = '{build_id}'
AND dlo.subject_area = '{domain}'
ORDER BY dlo.target_table_name ASC
"""
DLO_QUERY = """
SELECT
dlo.subject_area,
dlo.object_id,
dlo.parent_object_id,
dlo.object_type,
dlo.target_table_name,
dlo.object_data
FROM `etl_log.vw_data_lineage_object` AS dlo
WHERE dlo.data_lineage_build_id LIKE '{build_id}'
AND dlo.subject_area LIKE '{domain}'
AND dlo.target_table_name LIKE '{job_build_id}'
AND dlo.data_lineage_type = '{dl_type}'
"""
DLC_QUERY = """
SELECT
dlc.subject_area,
dlc.source_object_id,
dlc.target_object_id,
dlc.connection_data
FROM `etl_log.vw_data_lineage_objects_connection` AS dlc
WHERE dlc.data_lineage_build_id LIKE '{build_id}'
AND dlc.subject_area LIKE '{domain}'
AND dlc.target_table_name LIKE '{job_build_id}'
AND dlc.data_lineage_type = '{dl_type}'
"""
AUTH_SCOPES = ["https://www.googleapis.com/auth/cloud-platform"]
def _auth(self) -> None:
"""Perform authentication using default credentials."""
credentials, _ = google.auth.default(scopes=self._auth_scopes)
auth_req = google.auth.transport.requests.Request()
credentials.refresh(auth_req)
def __init__(self, project, auth_scopes: Optional[List] = None) -> None:
self.project = project
self._auth_scopes = auth_scopes or self.AUTH_SCOPES
self._auth()
self.client = bigquery.Client(project=project)
def get_build_datetimes(self) -> List[str]:
query = self.BUILD_DATETIMES_QUERY.format()
result = self.client.query(query).result()
return [r[0] for r in result]
def get_build_id_by_datetime(self, datetime: str) -> List[str]:
query = self.DL_BUILD_ID_QUERY.format(datetime=datetime)
result = self.client.query(query).result()
return [r[0] for r in result][0]
def get_domains(self, datetime: str) -> List[str]:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DOMAINS_QUERY.format(build_id=build_id)
result = self.client.query(query).result()
return [r[0] for r in result]
def get_job_build_ids(self, datetime: str, domain: str) -> List[str]:
build_id = self.get_build_id_by_datetime(datetime)
query = self.JOB_BUILD_IDS_QUERY.format(build_id=build_id,
domain=domain)
result = self.client.query(query).result()
return [r[0] for r in result]
def get_project_level_objects(self, datetime: str) -> List:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DLO_QUERY.format(
build_id=build_id,
domain="%",
job_build_id="%",
dl_type="PROJECT_LEVEL",
)
return list(self.client.query(query).result())
def get_project_level_connections(self, datetime: str) -> List:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DLC_QUERY.format(
build_id=build_id,
domain="%",
job_build_id="%",
dl_type="PROJECT_LEVEL",
)
return list(self.client.query(query).result())
def get_domain_level_objects(self, datetime: str, domain: str) -> List:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DLO_QUERY.format(
build_id=build_id,
domain=domain,
job_build_id="%",
dl_type="DOMAIN_LEVEL",
)
return list(self.client.query(query).result())
def get_domain_level_connections(self, datetime: str, domain: str) -> List:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DLC_QUERY.format(
build_id=build_id,
domain=domain,
job_build_id="%",
dl_type="DOMAIN_LEVEL",
)
return list(self.client.query(query).result())
def get_query_level_objects(self, datetime: str, domain: str,
job_build_id: str) -> List:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DLO_QUERY.format(
build_id=build_id,
domain=domain,
job_build_id=job_build_id,
dl_type="QUERY_LEVEL",
)
return list(self.client.query(query).result())
def get_query_level_connections(self, datetime: str, domain: str,
job_build_id: str) -> List:
build_id = self.get_build_id_by_datetime(datetime)
query = self.DLC_QUERY.format(
build_id=build_id,
domain=domain,
job_build_id=job_build_id,
dl_type="QUERY_LEVEL",
)
return list(self.client.query(query).result())
| {
"content_hash": "f85ef717a21b19f5e8013b7c97c6b94e",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 77,
"avg_line_length": 33.72611464968153,
"alnum_prop": 0.6381491973559962,
"repo_name": "google/grizzly",
"id": "6c06c519fd0648720692cec3a9322e78f8a92ecd",
"size": "5871",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "grizzly_data_lineage/backend/bq.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1075"
},
{
"name": "Dockerfile",
"bytes": "1294"
},
{
"name": "HCL",
"bytes": "107097"
},
{
"name": "HTML",
"bytes": "1152"
},
{
"name": "JavaScript",
"bytes": "52626"
},
{
"name": "Jinja",
"bytes": "8031"
},
{
"name": "Python",
"bytes": "569193"
},
{
"name": "Shell",
"bytes": "13761"
}
],
"symlink_target": ""
} |
import json
import os.path
import struct
import typing.io
import research.coding.varbyte
import research.utils
def raise_property_not_found(prop):
raise KeyError("property '%s' not found" % prop)
class Metadata:
f_coding = "coding"
f_path = "paths"
f_type = "type"
f_dir = "dir"
f_name = "name"
def __init__(self, properties):
"""
:param properties: dict-like JSON object with properties
"""
if Metadata.f_type not in properties:
raise_property_not_found(Metadata.f_type)
else:
self.type = properties[Metadata.f_type]
if Metadata.f_name not in properties:
raise_property_not_found(Metadata.f_name)
else:
self.name = properties[Metadata.f_name]
if Metadata.f_dir not in properties:
raise_property_not_found(Metadata.f_dir)
else:
self.dir = properties[Metadata.f_dir]
class Index:
def __init__(self, properties):
self.metadata = IndexMetadata(properties)
# def writer(self):
#
class IndexWriter:
def __init__(self, metadata):
self.counts_stream = open(metadata.counts_path, 'bw')
self.counts_encoder = metadata.coder_factory.encoder()
def close(self):
self.counts_stream.close()
class IndexMetadata(Metadata):
def __init__(self, properties):
super(IndexMetadata, self).__init__(properties)
# assert properties[Metadata.f_type] == "{0}.{1}".format(Index.__module__, Index.__name__)
def docs_path(self):
return os.path.join(self.dir, self.name + ".docs")
def docs_offsets_path(self):
return os.path.join(self.dir, self.name + ".docs#offsets")
def counts_path(self):
return os.path.join(self.dir, self.name + ".counts")
def counts_offsets_path(self):
return os.path.join(self.dir, self.name + ".counts#offsets")
def frequencies_path(self):
return os.path.join(self.dir, self.name + ".frequencies")
def terms_path(self):
return os.path.join(self.dir, self.name + ".terms")
class IndexFactory:
@staticmethod
def from_path(path):
with open(path) as file:
properties = json.loads(file.read())
properties["dir"] = path
return IndexFactory.from_json(properties)
@staticmethod
def from_json(properties):
if Metadata.f_type not in properties:
raise_property_not_found(Metadata.f_type)
return research.utils.get_class_of(properties[Metadata.f_type])(properties)
def write_header(header, stream: typing.io.BinaryIO) -> int:
"""
Write a header object to a byte stream.
:param header: a JSON object describing properties of the file, e.g., encoding
:param stream: a byte stream to write the header to
:return: the number of bytes written
"""
encoded = json.dumps(header).encode()
encoded_len = len(encoded)
stream.write(encoded_len.to_bytes(4, byteorder='big'))
stream.write(encoded)
return encoded_len + 4
def read_header(stream: typing.io.BinaryIO):
"""
Read a header object from a byte stream.
:param stream: a byte stream to read from
:return: (<a JSON object describing properties of the file, e.g., encoding>,
<the number of bytes written>)
"""
length = struct.unpack('>i', stream.read(4))[0]
return json.loads(stream.read(length).decode()), length + 4
def load_numbers(file_path: str) -> [int]:
"""
Load numbers from a file to a list.
:param file_path: the path to an offset file
:return: a list of offsets
"""
with open(file_path, 'br') as f:
header, l = read_header(f)
assert Metadata.f_coding in header, \
"File {0} metadata does not contain encoding information".format(file_path)
assert 'count' in header, \
"File {0} metadata does not contain the count".format(file_path)
decoder = research.utils.get_class_of(header[Metadata.f_coding]).Decoder(f)
return [decoder.decode() for i in range(header['count'])]
def getp(header, property: str):
assert property in header, "could not found property {0} in the header".format(property)
return header[property]
def resolve_decoder(header, stream):
return research.utils.get_class_of(getp(header, Metadata.f_coding)).Decoder(stream)
| {
"content_hash": "42b034a39188dc4bf12ad69cf39615f7",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 98,
"avg_line_length": 29.496644295302012,
"alnum_prop": 0.6368600682593857,
"repo_name": "west-tandon/ReSearch",
"id": "51a789b9d3d7b84a0eae071d51790a5dc13900c8",
"size": "4395",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "research/index/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2573"
},
{
"name": "Makefile",
"bytes": "1533"
},
{
"name": "Python",
"bytes": "54787"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import logging
import six
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from sentry.api.base import Endpoint
logger = logging.getLogger("sentry.integrations.cloudflare")
class CloudflareMetadataEndpoint(Endpoint):
permission_classes = (IsAuthenticated,)
def get(self, request):
logger.info("cloudflare.metadata", extra={"user_id": request.user.id})
return Response(
{
"metadata": {
"username": request.user.username,
"userId": six.text_type(request.user.id),
"email": request.user.email,
}
}
)
| {
"content_hash": "76c5c7c01a1218309c3b048d08a34b1f",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 78,
"avg_line_length": 27.48148148148148,
"alnum_prop": 0.6212938005390836,
"repo_name": "beeftornado/sentry",
"id": "788f2bfcee02bdd7a2b19807d85e3e2c53c99ee9",
"size": "742",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/sentry/integrations/cloudflare/metadata.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
} |
DEPTH_LIMIT = 1
SPIDER_MODULES = ['url_lover.spiders']
ITEM_PIPELINES = {
'url_lover.pipelines.DuplicatesPipeline': 100
}
| {
"content_hash": "d262f279d1012f3ae47e513658c4ddf4",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 49,
"avg_line_length": 18.285714285714285,
"alnum_prop": 0.703125,
"repo_name": "m-vdb/url-lover",
"id": "60fd9265da6339f408085f8f19cbb2af1bb268c4",
"size": "128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "url_lover/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "160"
},
{
"name": "Makefile",
"bytes": "447"
},
{
"name": "Python",
"bytes": "1734"
}
],
"symlink_target": ""
} |
from emailoto.token_client import TokenClient
from emailoto.authentication import EmailOtoAuthBackend
import time
from .test_base import EmailOtoTest
from django.core.urlresolvers import reverse
from emailoto.config import EmailOtoConfig, CONFIG
class TokenClientTest(EmailOtoTest):
def test_set_counter(self):
"""Setting a token should put it in redis with a count of '0'."""
tc = TokenClient()
token = tc._set_counter()
key = tc._redis_key(token)
self.assertEqual(tc._redis.get(key), '0')
def test_validate_counter(self):
"""A valid token with return True and increment the counter."""
tc = TokenClient()
token = tc._set_counter()
key = tc._redis_key(token)
self.assertTrue(tc._validate_counter(token))
self.assertEqual(tc._redis.get(key), '1')
def test_more_than_one_validation(self):
"""Only the first validation should return True."""
tc = TokenClient()
token = tc._set_counter()
self.assertTrue(tc._validate_counter(token))
self.assertFalse(tc._validate_counter(token))
def test_invalid_token(self):
"""Non-existant keys should be False."""
tc = TokenClient()
self.assertFalse(tc._validate_counter('world'))
def test_set_email(self):
"""Test setting an email and getting a token for that email."""
tc = TokenClient()
token = tc._set_email('test@example.com')
key = tc._redis_key(token)
self.assertEqual(tc._redis.get(key), 'test@example.com')
def test_set_and_validate_email(self):
"""Set and validate an email."""
tc = TokenClient()
token = tc._set_email('test@example.com')
self.assertEqual(tc._validate_email(token), 'test@example.com')
def test_get_token_pair(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
self.assertIsNotNone(e_token)
self.assertIsNotNone(c_token)
def test_validate_token_pair(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
email_result = TokenClient().validate_token_pair(e_token, c_token)
self.assertEqual(email_result, 'A@B.com')
def test_multiple_validation_attempts(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
TokenClient().validate_token_pair(e_token, c_token)
with self.assertRaises(TokenClient.InvalidTokenPair):
TokenClient().validate_token_pair(e_token, c_token)
def test_expired_email(self):
"""An email token should expire."""
tc = TokenClient()
token = tc._set_email('test@example.com')
time.sleep(1.1)
self.assertIsNone(tc._validate_email(token))
def test_expired_counter_token(self):
"""A counter token should expire."""
tc = TokenClient()
token = tc._set_counter()
time.sleep(1.1)
self.assertFalse(tc._validate_counter(token))
def test_expired_tokens(self):
"""Test both tokens at once for expiration."""
e_token, c_token = TokenClient().get_token_pair('A@B.com')
time.sleep(1.1)
with self.assertRaises(TokenClient.InvalidTokenPair):
TokenClient().validate_token_pair(e_token, c_token)
class AuthenticationTest(EmailOtoTest):
def test_valid_auth(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
backend = EmailOtoAuthBackend()
user = backend.authenticate(e_token, c_token)
self.assertEqual(user.email, 'A@B.com')
def test_invalid_auth(self):
backend = EmailOtoAuthBackend()
user = backend.authenticate('fake-email-token', 'fake-counter-token')
self.assertFalse(user)
def test_valid_email_invalid_counter(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
backend = EmailOtoAuthBackend()
user = backend.authenticate(e_token, 'fake-counter-token')
self.assertFalse(user)
def test_invalid_email_valid_counter(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
backend = EmailOtoAuthBackend()
user = backend.authenticate('fake-email-token', c_token)
self.assertFalse(user)
def test_full_circle_auth(self):
url = EmailOtoAuthBackend.get_auth_url('test@example.com')
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
self.assertIn(CONFIG.login_redirect, response['Location'])
class ValidateViewsTest(EmailOtoTest):
def test_valid_get_request(self):
e_token, c_token = TokenClient().get_token_pair('A@B.com')
url = reverse('emailoto-validate') + '?a=%s&b=%s' % (e_token, c_token)
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
self.assertIn(CONFIG.login_redirect, response['Location'])
def test_invalid_get_request(self):
e_token, c_token = 'fake-email-token', 'fake-counter-token'
url = reverse('emailoto-validate') + '?a=%s&b=%s' % (e_token, c_token)
response = response = self.client.get(url)
self.assertEqual(response.status_code, 403)
class ConfigTest(EmailOtoTest):
def test_missing_config(self):
with self.assertRaises(EmailOtoConfig.ImproperlyConfigured):
EmailOtoConfig().get_or_raise('jhbjhbjh')
| {
"content_hash": "913123e4d859573ac37511f1dce1abed",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 78,
"avg_line_length": 38.226950354609926,
"alnum_prop": 0.6428571428571429,
"repo_name": "qdonnellan/django_emailoto",
"id": "e8f98d5a66a1af8d1386f2f8ec3ca2a82997e0ad",
"size": "5390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "38"
},
{
"name": "Python",
"bytes": "15419"
}
],
"symlink_target": ""
} |
"""
Sponge Knowledge Base
Processors metadata
"""
from org.openksavi.sponge.examples import PowerEchoMetadataAction
class UpperEchoAction(Action):
def onConfigure(self):
self.withFeatures({"visibility":False})
def onCall(self, text):
return None
def onLoad():
sponge.enableJava(PowerEchoMetadataAction)
| {
"content_hash": "9d553e3b81f33674d79b3246fc247510",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 65,
"avg_line_length": 23.066666666666666,
"alnum_prop": 0.7109826589595376,
"repo_name": "softelnet/sponge",
"id": "e4263d633b4c11ad260103522ec90f003ccfd4e3",
"size": "346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sponge-integration-tests/examples/core/processors_metadata.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "482"
},
{
"name": "Dockerfile",
"bytes": "2389"
},
{
"name": "Groovy",
"bytes": "70914"
},
{
"name": "HTML",
"bytes": "6759"
},
{
"name": "Java",
"bytes": "3300560"
},
{
"name": "JavaScript",
"bytes": "70716"
},
{
"name": "Kotlin",
"bytes": "113542"
},
{
"name": "Mustache",
"bytes": "38"
},
{
"name": "Python",
"bytes": "426240"
},
{
"name": "Ruby",
"bytes": "65491"
},
{
"name": "SCSS",
"bytes": "6217"
},
{
"name": "Shell",
"bytes": "1388"
}
],
"symlink_target": ""
} |
import sys
from utils import describe_data
data_folder = sys.argv[1]
describe_data(data_folder)
| {
"content_hash": "ba32b7b00424cee2995a071161006617",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 31,
"avg_line_length": 16.333333333333332,
"alnum_prop": 0.7755102040816326,
"repo_name": "hristo-vrigazov/behavioral-cloning",
"id": "4c7dc10a822813667b78a0f3e774fd775f368a6f",
"size": "98",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "describe_data_folder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2618852"
},
{
"name": "Python",
"bytes": "29120"
},
{
"name": "Shell",
"bytes": "263"
}
],
"symlink_target": ""
} |
from .Child import Child
from .Node import Node # noqa: I201
EXPR_NODES = [
# An inout expression.
# &x
Node('InOutExpr', kind='Expr',
children=[
Child('Ampersand', kind='PrefixAmpersandToken'),
Child('Expression', kind='Expr'),
]),
# A #column expression.
Node('PoundColumnExpr', kind='Expr',
children=[
Child('PoundColumn', kind='PoundColumnToken'),
]),
Node('TupleExprElementList', kind='SyntaxCollection',
element='TupleExprElement'),
Node('ArrayElementList', kind='SyntaxCollection',
element='ArrayElement'),
Node('DictionaryElementList', kind='SyntaxCollection',
element='DictionaryElement'),
Node('StringLiteralSegments', kind='SyntaxCollection',
element='Syntax', element_name='Segment',
element_choices=['StringSegment', 'ExpressionSegment']),
# The try operator.
# try foo()
# try? foo()
# try! foo()
Node('TryExpr', kind='Expr',
children=[
Child('TryKeyword', kind='TryToken'),
Child('QuestionOrExclamationMark', kind='Token',
is_optional=True,
token_choices=[
'PostfixQuestionMarkToken',
'ExclamationMarkToken',
]),
Child('Expression', kind='Expr'),
]),
# declname-arguments -> '(' declname-argument-list ')'
# declname-argument-list -> declname-argument*
# declname-argument -> identifier ':'
Node('DeclNameArgument', kind='Syntax',
children=[
Child('Name', kind='Token'),
Child('Colon', kind='ColonToken'),
]),
Node('DeclNameArgumentList', kind='SyntaxCollection',
element='DeclNameArgument'),
Node('DeclNameArguments', kind='Syntax',
traits=['Parenthesized'],
children=[
Child('LeftParen', kind='LeftParenToken'),
Child('Arguments', kind='DeclNameArgumentList',
collection_element_name='Argument'),
Child('RightParen', kind='RightParenToken'),
]),
# An identifier expression.
Node('IdentifierExpr', kind='Expr',
children=[
Child('Identifier', kind='Token',
token_choices=[
'IdentifierToken',
'SelfToken',
'CapitalSelfToken',
'DollarIdentifierToken',
'SpacedBinaryOperatorToken',
]),
Child('DeclNameArguments', kind='DeclNameArguments',
is_optional=True),
]),
# An 'super' expression.
Node('SuperRefExpr', kind='Expr',
children=[
Child('SuperKeyword', kind='SuperToken'),
]),
# A nil expression.
Node('NilLiteralExpr', kind='Expr',
children=[
Child('NilKeyword', kind='NilToken'),
]),
# A _ expression.
Node('DiscardAssignmentExpr', kind='Expr',
children=[
Child('Wildcard', kind='WildcardToken'),
]),
# An = expression.
Node('AssignmentExpr', kind='Expr',
children=[
Child('AssignToken', kind='EqualToken'),
]),
# A flat list of expressions before sequence folding, e.g. 1 + 2 + 3.
Node('SequenceExpr', kind='Expr',
children=[
Child('Elements', kind='ExprList',
collection_element_name='Element'),
]),
Node('ExprList', kind='SyntaxCollection',
element='Expr',
element_name='Expression',
description='''
A list of expressions connected by operators. This list is contained
by a `SequenceExprSyntax`.
'''),
# A #line expression.
Node('PoundLineExpr', kind='Expr',
children=[
Child('PoundLine', kind='PoundLineToken'),
]),
# A #file expression.
Node('PoundFileExpr', kind='Expr',
children=[
Child('PoundFile', kind='PoundFileToken'),
]),
# A #filePath expression.
Node('PoundFilePathExpr', kind='Expr',
children=[
Child('PoundFilePath', kind='PoundFilePathToken'),
]),
# A #function expression.
Node('PoundFunctionExpr', kind='Expr',
children=[
Child('PoundFunction', kind='PoundFunctionToken'),
]),
# A #dsohandle expression.
Node('PoundDsohandleExpr', kind='Expr',
children=[
Child('PoundDsohandle', kind='PoundDsohandleToken'),
]),
# symbolic-reference-expression -> identifier generic-argument-clause?
Node('SymbolicReferenceExpr', kind='Expr',
children=[
Child('Identifier', kind='IdentifierToken'),
Child('GenericArgumentClause', kind='GenericArgumentClause',
is_optional=True),
]),
# A prefix operator expression.
# -x
# !true
Node('PrefixOperatorExpr', kind='Expr',
children=[
Child('OperatorToken', kind='PrefixOperatorToken',
is_optional=True),
Child('PostfixExpression', kind='Expr'),
]),
# An operator like + or -.
# NOTE: This appears only in SequenceExpr.
Node('BinaryOperatorExpr', kind='Expr',
children=[
Child('OperatorToken', kind='BinaryOperatorToken'),
]),
# arrow-expr -> 'throws'? '->'
# NOTE: This appears only in SequenceExpr.
Node('ArrowExpr', kind='Expr',
children=[
Child('ThrowsToken', kind='ThrowsToken',
is_optional=True),
Child('ArrowToken', kind='ArrowToken'),
]),
# A floating-point literal
# 4.0
# -3.9
# +4e20
Node('FloatLiteralExpr', kind='Expr',
children=[
Child('FloatingDigits', kind='FloatingLiteralToken'),
]),
Node('TupleExpr', kind='Expr',
traits=['Parenthesized'],
children=[
Child('LeftParen', kind='LeftParenToken'),
Child('ElementList', kind='TupleExprElementList',
collection_element_name='Element'),
Child('RightParen', kind='RightParenToken'),
]),
# Array literal, e.g. [1, 2, 3]
Node('ArrayExpr', kind='Expr',
children=[
Child('LeftSquare', kind='LeftSquareBracketToken'),
Child('Elements', kind='ArrayElementList',
collection_element_name='Element'),
Child('RightSquare', kind='RightSquareBracketToken'),
]),
# Dictionary literal, e.g. [1:1, 2:2, 3:3]
Node('DictionaryExpr', kind='Expr',
children=[
Child('LeftSquare', kind='LeftSquareBracketToken'),
Child('Content', kind='Syntax',
node_choices=[
Child('Colon', kind='ColonToken'),
Child('Elements', kind='DictionaryElementList'),
]),
Child('RightSquare', kind='RightSquareBracketToken'),
]),
# An element inside a tuple element list
Node('TupleExprElement', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('Label', kind='Token',
is_optional=True,
token_choices=[
'IdentifierToken',
'WildcardToken'
]),
Child('Colon', kind='ColonToken',
is_optional=True),
Child('Expression', kind='Expr'),
Child('TrailingComma', kind='CommaToken',
is_optional=True),
]),
# element inside an array expression: expression ','?
Node('ArrayElement', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('Expression', kind='Expr'),
Child('TrailingComma', kind='CommaToken', is_optional=True),
]),
# element inside an array expression: expression ','?
Node('DictionaryElement', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('KeyExpression', kind='Expr'),
Child('Colon', kind='ColonToken'),
Child('ValueExpression', kind='Expr'),
Child('TrailingComma', kind='CommaToken', is_optional=True),
]),
# An integer literal.
# 3
# +3_400
# +0x4f
Node('IntegerLiteralExpr', kind='Expr',
children=[
Child('Digits', kind='IntegerLiteralToken'),
]),
# true or false
Node('BooleanLiteralExpr', kind='Expr',
children=[
Child("BooleanLiteral", kind='Token',
token_choices=[
'TrueToken',
'FalseToken',
])
]),
# a ? 1 : 0
Node('TernaryExpr', kind='Expr',
children=[
Child("ConditionExpression", kind='Expr'),
Child("QuestionMark", kind='InfixQuestionMarkToken'),
Child("FirstChoice", kind='Expr'),
Child("ColonMark", kind='ColonToken'),
Child("SecondChoice", kind='Expr')
]),
# expr?.name
Node('MemberAccessExpr', kind='Expr',
children=[
# The base needs to be optional to parse expressions in key paths
# like \.a
Child("Base", kind='Expr', is_optional=True),
Child("Dot", kind='Token',
token_choices=[
'PeriodToken', 'PrefixPeriodToken'
]),
# Name could be 'self'
Child("Name", kind='Token'),
Child('DeclNameArguments', kind='DeclNameArguments',
is_optional=True),
]),
# is TypeName
Node('IsExpr', kind='Expr',
children=[
Child("IsTok", kind='IsToken'),
Child("TypeName", kind='Type')
]),
# as TypeName
Node('AsExpr', kind='Expr',
children=[
Child("AsTok", kind='AsToken'),
Child("QuestionOrExclamationMark", kind='Token',
is_optional=True,
token_choices=[
'PostfixQuestionMarkToken',
'ExclamationMarkToken',
]),
Child("TypeName", kind='Type')
]),
# Type
Node('TypeExpr', kind='Expr',
children=[
Child('Type', kind='Type'),
]),
Node('ClosureCaptureItem', kind='Syntax',
traits=['WithTrailingComma'],
children=[
# FIXME: Add a 'CaptureSpecifier' node kind for `Specifier`.
Child("Specifier", kind='TokenList',
collection_element_name='SpecifierToken', is_optional=True),
Child("Name", kind='IdentifierToken', is_optional=True),
Child('AssignToken', kind='EqualToken', is_optional=True),
Child("Expression", kind='Expr'),
Child('TrailingComma', kind='CommaToken', is_optional=True),
]),
Node('ClosureCaptureItemList', kind='SyntaxCollection',
element='ClosureCaptureItem'),
Node('ClosureCaptureSignature', kind='Syntax',
children=[
Child('LeftSquare', kind='LeftSquareBracketToken'),
Child('Items', kind='ClosureCaptureItemList',
collection_element_name='Item', is_optional=True),
Child('RightSquare', kind='RightSquareBracketToken'),
]),
Node('ClosureParam', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('Name', kind='Token',
token_choices=[
'IdentifierToken',
'WildcardToken',
]),
Child('TrailingComma', kind='CommaToken', is_optional=True),
]),
# a, b, c
Node('ClosureParamList', kind='SyntaxCollection', element='ClosureParam'),
Node('ClosureSignature', kind='Syntax',
children=[
Child('Capture', kind='ClosureCaptureSignature',
is_optional=True),
Child('Input', kind='Syntax', is_optional=True,
node_choices=[
Child('SimpleInput', kind='ClosureParamList'),
Child('Input', kind='ParameterClause'),
]),
Child('ThrowsTok', kind='ThrowsToken', is_optional=True),
Child('Output', kind='ReturnClause', is_optional=True),
Child('InTok', kind='InToken'),
]),
Node('ClosureExpr', kind='Expr',
traits=['Braced', 'WithStatements'],
children=[
Child('LeftBrace', kind='LeftBraceToken'),
Child('Signature', kind='ClosureSignature', is_optional=True),
Child('Statements', kind='CodeBlockItemList',
collection_element_name='Statement'),
Child('RightBrace', kind='RightBraceToken'),
]),
# unresolved-pattern-expr -> pattern
Node('UnresolvedPatternExpr', kind='Expr',
children=[
Child('Pattern', kind='Pattern'),
]),
# trailing-closure-element -> identifier ':' closure-expression
Node('MultipleTrailingClosureElement', kind='Syntax',
children=[
Child('Label', kind='Token',
token_choices=[
'IdentifierToken',
'WildcardToken'
]),
Child('Colon', kind='ColonToken'),
Child('Closure', kind='ClosureExpr'),
]),
Node('MultipleTrailingClosureElementList', kind='SyntaxCollection',
element='MultipleTrailingClosureElement'),
# call-expr -> expr '(' call-argument-list ')' closure-expr?
# | expr closure-expr
Node('FunctionCallExpr', kind='Expr',
children=[
Child('CalledExpression', kind='Expr'),
Child('LeftParen', kind='LeftParenToken',
is_optional=True),
Child('ArgumentList', kind='TupleExprElementList',
collection_element_name='Argument'),
Child('RightParen', kind='RightParenToken',
is_optional=True),
Child('TrailingClosure', kind='ClosureExpr',
is_optional=True),
Child('AdditionalTrailingClosures',
kind='MultipleTrailingClosureElementList',
collection_element_name='AdditionalTrailingClosure',
is_optional=True),
]),
# subscript-expr -> expr '[' call-argument-list ']' closure-expr?
Node('SubscriptExpr', kind='Expr',
children=[
Child('CalledExpression', kind='Expr'),
Child('LeftBracket', kind='LeftSquareBracketToken'),
Child('ArgumentList', kind='TupleExprElementList',
collection_element_name='Argument'),
Child('RightBracket', kind='RightSquareBracketToken'),
Child('TrailingClosure', kind='ClosureExpr',
is_optional=True),
Child('AdditionalTrailingClosures',
kind='MultipleTrailingClosureElementList',
collection_element_name='AdditionalTrailingClosure',
is_optional=True),
]),
# optional-chaining-expr -> expr '?'
Node('OptionalChainingExpr', kind='Expr',
children=[
Child('Expression', kind='Expr'),
Child('QuestionMark', kind='PostfixQuestionMarkToken'),
]),
# forced-value-expr -> expr '!'
Node('ForcedValueExpr', kind='Expr',
children=[
Child('Expression', kind='Expr'),
Child('ExclamationMark', kind='ExclamationMarkToken'),
]),
# postfix-unary-expr -> expr postfix-operator
Node('PostfixUnaryExpr', kind='Expr',
children=[
Child('Expression', kind='Expr'),
Child('OperatorToken', kind='PostfixOperatorToken'),
]),
# specialize-expr -> expr generic-argument-clause?
Node('SpecializeExpr', kind='Expr',
children=[
Child('Expression', kind='Expr'),
Child('GenericArgumentClause', kind='GenericArgumentClause'),
]),
# string literal segment in a string interpolation expression.
Node('StringSegment', kind='Syntax',
children=[
Child('Content', kind='StringSegmentToken'),
]),
# expression segment in a string interpolation expression.
Node('ExpressionSegment', kind='Syntax',
traits=['Parenthesized'],
children=[
Child('Backslash', kind='BackslashToken'),
Child('Delimiter', kind='RawStringDelimiterToken',
is_optional=True),
Child('LeftParen', kind='LeftParenToken',
classification='StringInterpolationAnchor',
force_classification=True),
Child('Expressions', kind='TupleExprElementList',
collection_element_name='Expression'),
Child('RightParen', kind='StringInterpolationAnchorToken'),
]),
# e.g. "abc \(foo()) def"
Node('StringLiteralExpr', kind='Expr',
children=[
Child('OpenDelimiter', kind='RawStringDelimiterToken',
is_optional=True),
Child('OpenQuote', kind='Token',
token_choices=[
'StringQuoteToken',
'MultilineStringQuoteToken',
]),
Child('Segments', kind='StringLiteralSegments',
collection_element_name='Segment'),
Child('CloseQuote', kind='Token',
token_choices=[
'StringQuoteToken',
'MultilineStringQuoteToken',
]),
Child('CloseDelimiter', kind='RawStringDelimiterToken',
is_optional=True),
]),
# e.g. "\a.b[2].a"
Node('KeyPathExpr', kind='Expr',
children=[
Child('Backslash', kind='BackslashToken'),
Child('RootExpr', kind='Expr', is_optional=True,
node_choices=[
Child('IdentifierExpr', kind='IdentifierExpr'),
Child('SpecializeExpr', kind='SpecializeExpr')
]),
Child('Expression', kind='Expr'),
]),
# The period in the key path serves as the base on which the
# right-hand-side of the key path is evaluated
Node('KeyPathBaseExpr', kind='Expr',
children=[
Child('Period', kind='PeriodToken'),
]),
# e.g. "a." or "a"
Node('ObjcNamePiece', kind='Syntax',
children=[
Child('Name', kind='IdentifierToken'),
Child('Dot', kind='PeriodToken', is_optional=True),
]),
# e.g. "a.b.c"
Node('ObjcName', kind='SyntaxCollection', element='ObjcNamePiece'),
# e.g. "#keyPath(a.b.c)"
Node('ObjcKeyPathExpr', kind='Expr',
traits=['Parenthesized'],
children=[
Child('KeyPath', kind='PoundKeyPathToken'),
Child('LeftParen', kind='LeftParenToken'),
Child('Name', kind='ObjcName',
collection_element_name='NamePiece'),
Child('RightParen', kind='RightParenToken'),
]),
# e.g. "#selector(getter:Foo.bar)"
Node('ObjcSelectorExpr', kind='Expr',
traits=['Parenthesized'],
children=[
Child('PoundSelector', kind='PoundSelectorToken'),
Child('LeftParen', kind='LeftParenToken'),
Child('Kind', kind='ContextualKeywordToken',
text_choices=['getter', 'setter'],
is_optional=True),
Child('Colon', kind='ColonToken',
is_optional=True),
Child('Name', kind='Expr'),
Child('RightParen', kind='RightParenToken'),
]),
# <#content#>
Node('EditorPlaceholderExpr', kind='Expr',
children=[
Child('Identifier', kind='IdentifierToken'),
]),
# #fileLiteral(a, b, c)
Node('ObjectLiteralExpr', kind='Expr',
traits=['Parenthesized'],
children=[
Child('Identifier', kind='Token',
token_choices=[
'PoundColorLiteralToken',
'PoundFileLiteralToken',
'PoundImageLiteralToken',
]),
Child('LeftParen', kind='LeftParenToken'),
Child('Arguments', kind='TupleExprElementList',
collection_element_name='Argument'),
Child('RightParen', kind='RightParenToken'),
]),
]
| {
"content_hash": "f788c26edf2d9afc9f8795575e4ec621",
"timestamp": "",
"source": "github",
"line_count": 592,
"max_line_length": 79,
"avg_line_length": 35.445945945945944,
"alnum_prop": 0.5273065192527641,
"repo_name": "stephentyrone/swift",
"id": "2d2295fec724c136e58ee77fcebb2fa8bcd66e44",
"size": "20984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/gyb_syntax_support/ExprNodes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13584"
},
{
"name": "C",
"bytes": "245193"
},
{
"name": "C++",
"bytes": "38276251"
},
{
"name": "CMake",
"bytes": "542159"
},
{
"name": "D",
"bytes": "1107"
},
{
"name": "DTrace",
"bytes": "2593"
},
{
"name": "Emacs Lisp",
"bytes": "57295"
},
{
"name": "LLVM",
"bytes": "70652"
},
{
"name": "MATLAB",
"bytes": "2576"
},
{
"name": "Makefile",
"bytes": "1841"
},
{
"name": "Objective-C",
"bytes": "425366"
},
{
"name": "Objective-C++",
"bytes": "246376"
},
{
"name": "Python",
"bytes": "1775045"
},
{
"name": "Roff",
"bytes": "3495"
},
{
"name": "Ruby",
"bytes": "2117"
},
{
"name": "Shell",
"bytes": "177384"
},
{
"name": "Swift",
"bytes": "33242293"
},
{
"name": "Vim Script",
"bytes": "19900"
},
{
"name": "sed",
"bytes": "1050"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import datetime
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from mock import patch
from zerver.lib.test_helpers import MockLDAP
from confirmation.models import Confirmation
from zilencer.models import Deployment
from zerver.forms import HomepageForm
from zerver.views import do_change_password
from zerver.views.invite import get_invitee_emails_set
from zerver.models import (
get_realm, get_prereg_user_by_email, get_user_profile_by_email,
PreregistrationUser, Realm, RealmAlias, Recipient,
Referral, ScheduledJob, UserProfile, UserMessage,
Stream, Subscription, ScheduledJob
)
from zerver.management.commands.deliver_email import send_email_job
from zerver.lib.actions import (
set_default_streams,
do_change_is_admin
)
from zerver.lib.initial_password import initial_password
from zerver.lib.actions import do_deactivate_realm, do_set_realm_default_language, \
add_new_user_history
from zerver.lib.digest import send_digest_email
from zerver.lib.notifications import (
enqueue_welcome_emails, one_click_unsubscribe_link, send_local_email_template_with_delay)
from zerver.lib.test_helpers import find_key_by_email, queries_captured, \
HostRequestMock
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.test_runner import slow
from zerver.lib.session_user import get_session_dict_user
from zerver.context_processors import common_context
import re
import ujson
from six.moves import urllib
from six.moves import range
import six
from typing import Any, Text
import os
class PublicURLTest(ZulipTestCase):
"""
Account creation URLs are accessible even when not logged in. Authenticated
URLs redirect to a page.
"""
def fetch(self, method, urls, expected_status):
# type: (str, List[str], int) -> None
for url in urls:
# e.g. self.client_post(url) if method is "post"
response = getattr(self, method)(url)
self.assertEqual(response.status_code, expected_status,
msg="Expected %d, received %d for %s to %s" % (
expected_status, response.status_code, method, url))
def test_public_urls(self):
# type: () -> None
"""
Test which views are accessible when not logged in.
"""
# FIXME: We should also test the Tornado URLs -- this codepath
# can't do so because this Django test mechanism doesn't go
# through Tornado.
get_urls = {200: ["/accounts/home/", "/accounts/login/"
"/en/accounts/home/", "/ru/accounts/home/",
"/en/accounts/login/", "/ru/accounts/login/",
"/help/"],
302: ["/", "/en/", "/ru/"],
401: ["/json/streams/Denmark/members",
"/api/v1/users/me/subscriptions",
"/api/v1/messages",
"/json/messages",
"/api/v1/streams",
],
404: ["/help/nonexistent"],
}
# Add all files in 'templates/zerver/help' directory (except for 'main.html' and
# 'index.md') to `get_urls['200']` list.
for doc in os.listdir('./templates/zerver/help'):
if doc not in {'main.html', 'index.md', 'include'}:
get_urls[200].append('/help/' + os.path.splitext(doc)[0]) # Strip the extension.
post_urls = {200: ["/accounts/login/"],
302: ["/accounts/logout/"],
401: ["/json/messages",
"/json/invite_users",
"/json/settings/change",
"/json/subscriptions/exists",
"/json/subscriptions/property",
"/json/fetch_api_key",
"/json/users/me/pointer",
"/json/users/me/subscriptions",
"/api/v1/users/me/subscriptions",
],
400: ["/api/v1/external/github",
"/api/v1/fetch_api_key",
],
}
put_urls = {401: ["/json/users/me/pointer"],
}
for status_code, url_set in six.iteritems(get_urls):
self.fetch("client_get", url_set, status_code)
for status_code, url_set in six.iteritems(post_urls):
self.fetch("client_post", url_set, status_code)
for status_code, url_set in six.iteritems(put_urls):
self.fetch("client_put", url_set, status_code)
def test_get_gcid_when_not_configured(self):
# type: () -> None
with self.settings(GOOGLE_CLIENT_ID=None):
resp = self.client_get("/api/v1/fetch_google_client_id")
self.assertEqual(400, resp.status_code,
msg="Expected 400, received %d for GET /api/v1/fetch_google_client_id" % (
resp.status_code,))
data = ujson.loads(resp.content)
self.assertEqual('error', data['result'])
def test_get_gcid_when_configured(self):
# type: () -> None
with self.settings(GOOGLE_CLIENT_ID="ABCD"):
resp = self.client_get("/api/v1/fetch_google_client_id")
self.assertEqual(200, resp.status_code,
msg="Expected 200, received %d for GET /api/v1/fetch_google_client_id" % (
resp.status_code,))
data = ujson.loads(resp.content)
self.assertEqual('success', data['result'])
self.assertEqual('ABCD', data['google_client_id'])
class AddNewUserHistoryTest(ZulipTestCase):
def test_add_new_user_history_race(self):
# type: () -> None
"""Sends a message during user creation"""
# Create a user who hasn't had historical messages added
stream_dict = {
"Denmark": {"description": "A Scandinavian country", "invite_only": False},
"Verona": {"description": "A city in Italy", "invite_only": False}
} # type: Dict[Text, Dict[Text, Any]]
set_default_streams(get_realm("zulip"), stream_dict)
with patch("zerver.lib.actions.add_new_user_history"):
self.register("test", "test")
user_profile = get_user_profile_by_email("test@zulip.com")
subs = Subscription.objects.select_related("recipient").filter(
user_profile=user_profile, recipient__type=Recipient.STREAM)
streams = Stream.objects.filter(id__in=[sub.recipient.type_id for sub in subs])
self.send_message("hamlet@zulip.com", streams[0].name, Recipient.STREAM, "test")
add_new_user_history(user_profile, streams)
class PasswordResetTest(ZulipTestCase):
"""
Log in, reset password, log out, log in with new password.
"""
def test_password_reset(self):
# type: () -> None
email = 'hamlet@zulip.com'
old_password = initial_password(email)
self.login(email)
# test password reset template
result = self.client_get('/accounts/password/reset/')
self.assert_in_response('Reset your password.', result)
# start the password reset process by supplying an email address
result = self.client_post('/accounts/password/reset/', {'email': email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email to finish the process.", result)
# Visit the password reset link.
password_reset_url = self.get_confirmation_url_from_outbox(email, "(\S+)")
result = self.client_get(password_reset_url)
self.assertEqual(result.status_code, 200)
# Reset your password
result = self.client_post(password_reset_url,
{'new_password1': 'new_password',
'new_password2': 'new_password'})
# password reset succeeded
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/password/done/"))
# log back in with new password
self.login(email, password='new_password')
user_profile = get_user_profile_by_email('hamlet@zulip.com')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
# make sure old password no longer works
self.login(email, password=old_password, fails=True)
def test_redirect_endpoints(self):
# type: () -> None
'''
These tests are mostly designed to give us 100% URL coverage
in our URL coverage reports. Our mechanism for finding URL
coverage doesn't handle redirects, so we just have a few quick
tests here.
'''
result = self.client_get('/accounts/password/reset/done/')
self.assert_in_success_response(["Check your email"], result)
result = self.client_get('/accounts/password/done/')
self.assert_in_success_response(["We've reset your password!"], result)
result = self.client_get('/accounts/send_confirm/alice@example.com')
self.assert_in_success_response(["Still no email?"], result)
class LoginTest(ZulipTestCase):
"""
Logging in, registration, and logging out.
"""
def test_login(self):
# type: () -> None
self.login("hamlet@zulip.com")
user_profile = get_user_profile_by_email('hamlet@zulip.com')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_login_bad_password(self):
# type: () -> None
self.login("hamlet@zulip.com", password="wrongpassword", fails=True)
self.assertIsNone(get_session_dict_user(self.client.session))
def test_login_nonexist_user(self):
# type: () -> None
result = self.login_with_return("xxx@zulip.com", "xxx")
self.assert_in_response("Please enter a correct email and password", result)
def test_register(self):
# type: () -> None
realm = get_realm("zulip")
stream_dict = {"stream_"+str(i): {"description": "stream_%s_description" % i, "invite_only": False}
for i in range(40)} # type: Dict[Text, Dict[Text, Any]]
for stream_name in stream_dict.keys():
self.make_stream(stream_name, realm=realm)
set_default_streams(realm, stream_dict)
with queries_captured() as queries:
self.register("test", "test")
# Ensure the number of queries we make is not O(streams)
self.assert_max_length(queries, 69)
user_profile = get_user_profile_by_email('test@zulip.com')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
self.assertFalse(user_profile.enable_stream_desktop_notifications)
def test_register_deactivated(self):
# type: () -> None
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.register("test", "test")
self.assert_in_response("has been deactivated", result)
with self.assertRaises(UserProfile.DoesNotExist):
get_user_profile_by_email('test@zulip.com')
def test_login_deactivated(self):
# type: () -> None
"""
If you try to log in to a deactivated realm, you get a clear error page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.login_with_return("hamlet@zulip.com")
self.assert_in_response("has been deactivated", result)
def test_logout(self):
# type: () -> None
self.login("hamlet@zulip.com")
self.client_post('/accounts/logout/')
self.assertIsNone(get_session_dict_user(self.client.session))
def test_non_ascii_login(self):
# type: () -> None
"""
You can log in even if your password contain non-ASCII characters.
"""
email = "test@zulip.com"
password = u"hümbüǵ"
# Registering succeeds.
self.register("test", password)
user_profile = get_user_profile_by_email(email)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
self.client_post('/accounts/logout/')
self.assertIsNone(get_session_dict_user(self.client.session))
# Logging in succeeds.
self.client_post('/accounts/logout/')
self.login(email, password)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
class InviteUserTest(ZulipTestCase):
def invite(self, users, streams):
# type: (str, List[Text]) -> HttpResponse
"""
Invites the specified users to Zulip with the specified streams.
users should be a string containing the users to invite, comma or
newline separated.
streams should be a list of strings.
"""
return self.client_post("/json/invite_users",
{"invitee_emails": users,
"stream": streams})
def check_sent_emails(self, correct_recipients):
# type: (List[str]) -> None
from django.core.mail import outbox
self.assertEqual(len(outbox), len(correct_recipients))
email_recipients = [email.recipients()[0] for email in outbox]
self.assertEqual(sorted(email_recipients), sorted(correct_recipients))
def test_bulk_invite_users(self):
# type: () -> None
"""The bulk_invite_users code path is for the first user in a realm."""
self.login('hamlet@zulip.com')
invitees = ['alice@zulip.com', 'bob@zulip.com']
params = {
'invitee_emails': ujson.dumps(invitees)
}
result = self.client_post('/json/bulk_invite_users', params)
self.assert_json_success(result)
self.check_sent_emails(invitees)
def test_successful_invite_user(self):
# type: () -> None
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet@zulip.com")
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee])
def test_successful_invite_user_with_name(self):
# type: () -> None
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet@zulip.com")
email = "alice-test@zulip.com"
invitee = "Alice Test <{}>".format(email)
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.check_sent_emails([email])
def test_successful_invite_user_with_name_and_normal_one(self):
# type: () -> None
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet@zulip.com")
email = "alice-test@zulip.com"
email2 = "bob-test@zulip.com"
invitee = "Alice Test <{}>, {}".format(email, email2)
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_invite_user_signup_initial_history(self):
# type: () -> None
"""
Test that a new user invited to a stream receives some initial
history but only from public streams.
"""
self.login("hamlet@zulip.com")
user_profile = get_user_profile_by_email("hamlet@zulip.com")
private_stream_name = "Secret"
self.make_stream(private_stream_name, invite_only=True)
self.subscribe_to_stream(user_profile.email, private_stream_name)
public_msg_id = self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM,
"Public topic", "Public message")
secret_msg_id = self.send_message("hamlet@zulip.com", private_stream_name, Recipient.STREAM,
"Secret topic", "Secret message")
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, [private_stream_name, "Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user("alice-test", "password")
invitee_profile = get_user_profile_by_email(invitee)
invitee_msg_ids = [um.message_id for um in
UserMessage.objects.filter(user_profile=invitee_profile)]
self.assertTrue(public_msg_id in invitee_msg_ids)
self.assertFalse(secret_msg_id in invitee_msg_ids)
def test_multi_user_invite(self):
# type: () -> None
"""
Invites multiple users with a variety of delimiters.
"""
self.login("hamlet@zulip.com")
# Intentionally use a weird string.
self.assert_json_success(self.invite(
"""bob-test@zulip.com, carol-test@zulip.com,
dave-test@zulip.com
earl-test@zulip.com""", ["Denmark"]))
for user in ("bob", "carol", "dave", "earl"):
self.assertTrue(find_key_by_email("%s-test@zulip.com" % (user,)))
self.check_sent_emails(["bob-test@zulip.com", "carol-test@zulip.com",
"dave-test@zulip.com", "earl-test@zulip.com"])
def test_missing_or_invalid_params(self):
# type: () -> None
"""
Tests inviting with various missing or invalid parameters.
"""
self.login("hamlet@zulip.com")
self.assert_json_error(
self.client_post("/json/invite_users", {"invitee_emails": "foo@zulip.com"}),
"You must specify at least one stream for invitees to join.")
for address in ("noatsign.com", "outsideyourdomain@example.net"):
self.assert_json_error(
self.invite(address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.")
self.check_sent_emails([])
def test_invalid_stream(self):
# type: () -> None
"""
Tests inviting to a non-existent stream.
"""
self.login("hamlet@zulip.com")
self.assert_json_error(self.invite("iago-test@zulip.com", ["NotARealStream"]),
"Stream does not exist: NotARealStream. No invites were sent.")
self.check_sent_emails([])
def test_invite_existing_user(self):
# type: () -> None
"""
If you invite an address already using Zulip, no invitation is sent.
"""
self.login("hamlet@zulip.com")
self.assert_json_error(
self.client_post("/json/invite_users",
{"invitee_emails": "hamlet@zulip.com",
"stream": ["Denmark"]}),
"We weren't able to invite anyone.")
self.assertRaises(PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(
email="hamlet@zulip.com"))
self.check_sent_emails([])
def test_invite_some_existing_some_new(self):
# type: () -> None
"""
If you invite a mix of already existing and new users, invitations are
only sent to the new users.
"""
self.login("hamlet@zulip.com")
existing = ["hamlet@zulip.com", "othello@zulip.com"]
new = ["foo-test@zulip.com", "bar-test@zulip.com"]
result = self.client_post("/json/invite_users",
{"invitee_emails": "\n".join(existing + new),
"stream": ["Denmark"]})
self.assert_json_error(result,
"Some of those addresses are already using Zulip, \
so we didn't send them an invitation. We did send invitations to everyone else!")
# We only created accounts for the new users.
for email in existing:
self.assertRaises(PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(
email=email))
for email in new:
self.assertTrue(PreregistrationUser.objects.get(email=email))
# We only sent emails to the new users.
self.check_sent_emails(new)
prereg_user = get_prereg_user_by_email('foo-test@zulip.com')
self.assertEqual(prereg_user.email, 'foo-test@zulip.com')
def test_invite_outside_domain_in_closed_realm(self):
# type: () -> None
"""
In a realm with `restricted_to_domain = True`, you can't invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.restricted_to_domain = True
zulip_realm.save()
self.login("hamlet@zulip.com")
external_address = "foo@example.com"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.")
def test_invite_outside_domain_in_open_realm(self):
# type: () -> None
"""
In a realm with `restricted_to_domain = False`, you can invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.restricted_to_domain = False
zulip_realm.save()
self.login("hamlet@zulip.com")
external_address = "foo@example.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
def test_invite_with_non_ascii_streams(self):
# type: () -> None
"""
Inviting someone to streams with non-ASCII characters succeeds.
"""
self.login("hamlet@zulip.com")
invitee = "alice-test@zulip.com"
stream_name = u"hümbüǵ"
# Make sure we're subscribed before inviting someone.
self.subscribe_to_stream("hamlet@zulip.com", stream_name)
self.assert_json_success(self.invite(invitee, [stream_name]))
def test_refer_friend(self):
# type: () -> None
self.login("hamlet@zulip.com")
user = get_user_profile_by_email('hamlet@zulip.com')
user.invites_granted = 1
user.invites_used = 0
user.save()
invitee = "alice-test@zulip.com"
result = self.client_post('/json/refer_friend', dict(email=invitee))
self.assert_json_success(result)
# verify this works
Referral.objects.get(user_profile=user, email=invitee)
user = get_user_profile_by_email('hamlet@zulip.com')
self.assertEqual(user.invites_used, 1)
def test_invitation_reminder_email(self):
# type: () -> None
from django.core.mail import outbox
current_user_email = "hamlet@zulip.com"
self.login(current_user_email)
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee])
data = {"email": invitee, "referrer_email": current_user_email}
invitee = get_prereg_user_by_email(data["email"])
referrer = get_user_profile_by_email(data["referrer_email"])
link = Confirmation.objects.get_link_for_object(invitee, host=referrer.realm.host)
context = common_context(referrer)
context.update({
'activate_url': link,
'referrer': referrer,
'verbose_support_offers': settings.VERBOSE_SUPPORT_OFFERS,
'support_email': settings.ZULIP_ADMINISTRATOR
})
with self.settings(EMAIL_BACKEND='django.core.mail.backends.console.EmailBackend'):
send_local_email_template_with_delay(
[{'email': data["email"], 'name': ""}],
"zerver/emails/invitation/invitation_reminder_email",
context,
datetime.timedelta(days=0),
tags=["invitation-reminders"],
sender={'email': settings.ZULIP_ADMINISTRATOR, 'name': 'Zulip'})
email_jobs_to_deliver = ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL,
scheduled_timestamp__lte=datetime.datetime.utcnow())
self.assertEqual(len(email_jobs_to_deliver), 1)
email_count = len(outbox)
for job in email_jobs_to_deliver:
self.assertTrue(send_email_job(job))
self.assertEqual(len(outbox), email_count + 1)
class InviteeEmailsParserTests(TestCase):
def setUp(self):
# type: () -> None
self.email1 = "email1@zulip.com"
self.email2 = "email2@zulip.com"
self.email3 = "email3@zulip.com"
def test_if_emails_separated_by_commas_are_parsed_and_striped_correctly(self):
# type: () -> None
emails_raw = "{} ,{}, {}".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_separated_by_newlines_are_parsed_and_striped_correctly(self):
# type: () -> None
emails_raw = "{}\n {}\n {} ".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_from_email_client_separated_by_newlines_are_parsed_correctly(self):
# type: () -> None
emails_raw = "Email One <{}>\nEmailTwo<{}>\nEmail Three<{}>".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_in_mixed_style_are_parsed_correctly(self):
# type: () -> None
emails_raw = "Email One <{}>,EmailTwo<{}>\n{}".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
class EmailUnsubscribeTests(ZulipTestCase):
def test_error_unsubscribe(self):
# type: () -> None
result = self.client_get('/accounts/unsubscribe/missed_messages/test123')
self.assert_in_response('Unknown email unsubscribe request', result)
def test_missedmessage_unsubscribe(self):
# type: () -> None
"""
We provide one-click unsubscribe links in missed message
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = get_user_profile_by_email("hamlet@zulip.com")
user_profile.enable_offline_email_notifications = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile,
"missed_messages")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile = UserProfile.objects.get(email="hamlet@zulip.com")
self.assertFalse(user_profile.enable_offline_email_notifications)
def test_welcome_unsubscribe(self):
# type: () -> None
"""
We provide one-click unsubscribe links in welcome e-mails that you can
click even when logged out to stop receiving them.
"""
email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email("hamlet@zulip.com")
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
enqueue_welcome_emails(email, "King Hamlet")
self.assertEqual(2, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
# Simulate unsubscribing from the welcome e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The welcome email jobs are no longer scheduled.
self.assertEqual(result.status_code, 200)
self.assertEqual(0, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
def test_digest_unsubscribe(self):
# type: () -> None
"""
We provide one-click unsubscribe links in digest e-mails that you can
click even when logged out to stop receiving them.
Unsubscribing from these emails also dequeues any digest email jobs that
have been queued.
"""
email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email("hamlet@zulip.com")
self.assertTrue(user_profile.enable_digest_emails)
# Enqueue a fake digest email.
send_digest_email(user_profile, "", "")
self.assertEqual(1, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
# Simulate unsubscribing from digest e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The setting is toggled off, and scheduled jobs have been removed.
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile = UserProfile.objects.get(email="hamlet@zulip.com")
self.assertFalse(user_profile.enable_digest_emails)
self.assertEqual(0, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
class RealmCreationTest(ZulipTestCase):
def test_create_realm(self):
# type: () -> None
username = "user1"
password = "test"
string_id = "zuliptest"
domain = 'test.com'
email = "user1@test.com"
realm = get_realm('test')
# Make sure the realm does not exist
self.assertIsNone(realm)
with self.settings(OPEN_REALM_CREATION=True):
# Create new realm with the email
result = self.client_post('/create_realm/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s" % (email,)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username, password, domain=domain,
realm_subdomain = string_id)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(string_id)
self.assertIsNotNone(realm)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user_profile_by_email(email).realm, realm)
# Check defaults
self.assertEqual(realm.org_type, Realm.COMMUNITY)
self.assertEqual(realm.restricted_to_domain, False)
self.assertEqual(realm.invite_required, True)
self.assertTrue(result["Location"].endswith("/"))
def test_create_realm_with_subdomain(self):
# type: () -> None
username = "user1"
password = "test"
string_id = "zuliptest"
domain = "test.com"
email = "user1@test.com"
realm_name = "Test"
# Make sure the realm does not exist
self.assertIsNone(get_realm('test'))
with self.settings(REALMS_HAVE_SUBDOMAINS=True), self.settings(OPEN_REALM_CREATION=True):
# Create new realm with the email
result = self.client_post('/create_realm/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s" % (email,)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username, password, domain=domain,
realm_subdomain = string_id,
realm_name=realm_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=string_id + ".testserver")
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(string_id)
self.assertIsNotNone(realm)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user_profile_by_email(email).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
def test_mailinator_signup(self):
# type: () -> None
with self.settings(OPEN_REALM_CREATION=True):
result = self.client_post('/create_realm/', {'email': "hi@mailinator.com"})
self.assert_in_response('Please use your real email address.', result)
def test_subdomain_restrictions(self):
# type: () -> None
username = "user1"
password = "test"
domain = "test.com"
email = "user1@test.com"
realm_name = "Test"
with self.settings(REALMS_HAVE_SUBDOMAINS=False), self.settings(OPEN_REALM_CREATION=True):
result = self.client_post('/create_realm/', {'email': email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
errors = {'id': "at least 3 characters",
'-id': "cannot start or end with a",
'string-ID': "lowercase letters",
'string_id': "lowercase letters",
'stream': "unavailable",
'streams': "unavailable",
'about': "unavailable",
'abouts': "unavailable",
'mit': "unavailable"}
for string_id, error_msg in errors.items():
result = self.submit_reg_form_for_user(username, password, domain = domain,
realm_subdomain = string_id,
realm_name = realm_name)
self.assert_in_response(error_msg, result)
# test valid subdomain
result = self.submit_reg_form_for_user(username, password, domain = domain,
realm_subdomain = 'a-0',
realm_name = realm_name)
self.assertEqual(result.status_code, 302)
class UserSignUpTest(ZulipTestCase):
def test_user_default_language(self):
# type: () -> None
"""
Check if the default language of new user is the default language
of the realm.
"""
username = "newguy"
email = "newguy@zulip.com"
password = "newpassword"
realm = get_realm('zulip')
domain = realm.domain
do_set_realm_default_language(realm, "de")
result = self.client_post('/accounts/home/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s@%s" % (username, domain)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(username, password, domain)
self.assertEqual(result.status_code, 302)
user_profile = get_user_profile_by_email(email)
self.assertEqual(user_profile.default_language, realm.default_language)
from django.core.mail import outbox
outbox.pop()
def test_unique_completely_open_domain(self):
# type: () -> None
username = "user1"
password = "test"
email = "user1@acme.com"
subdomain = "zulip"
realm_name = "Zulip"
realm = get_realm('zulip')
realm.restricted_to_domain = False
realm.invite_required = False
realm.save()
realm = get_realm('mit')
do_deactivate_realm(realm)
realm.save()
result = self.client_post('/register/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s" % (email,)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username,
password,
domain='acme.com',
realm_name=realm_name,
realm_subdomain=subdomain,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assert_in_success_response(["You're almost there."], result)
def test_completely_open_domain_success(self):
# type: () -> None
username = "user1"
password = "test"
email = "user1@acme.com"
subdomain = "zulip"
realm_name = "Zulip"
realm = get_realm('zulip')
realm.restricted_to_domain = False
realm.invite_required = False
realm.save()
result = self.client_post('/register/zulip/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s" % (email,)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username,
password,
domain='acme.com',
realm_name=realm_name,
realm_subdomain=subdomain,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assert_in_success_response(["You're almost there."], result)
def test_failed_signup_due_to_restricted_domain(self):
# type: () -> None
realm = get_realm('zulip')
with self.settings(REALMS_HAVE_SUBDOMAINS = True):
request = HostRequestMock(host = realm.host)
request.session = {} # type: ignore
form = HomepageForm({'email': 'user@acme.com'}, realm=realm)
self.assertIn("trying to join, zulip, only allows users with e-mail", form.errors['email'][0])
def test_failed_signup_due_to_invite_required(self):
# type: () -> None
realm = get_realm('zulip')
realm.invite_required = True
realm.save()
request = HostRequestMock(host = realm.host)
request.session = {} # type: ignore
form = HomepageForm({'email': 'user@zulip.com'}, realm=realm)
self.assertIn("Please request an invite from", form.errors['email'][0])
def test_failed_signup_due_to_nonexistent_realm(self):
# type: () -> None
with self.settings(REALMS_HAVE_SUBDOMAINS = True):
request = HostRequestMock(host = 'acme.' + settings.EXTERNAL_HOST)
request.session = {} # type: ignore
form = HomepageForm({'email': 'user@acme.com'}, realm=None)
self.assertIn("organization you are trying to join does not exist", form.errors['email'][0])
def test_registration_through_ldap(self):
# type: () -> None
username = "newuser"
password = "testing"
domain = "zulip.com"
email = "newuser@zulip.com"
subdomain = "zulip"
realm_name = "Zulip"
ldap_user_attr_map = {'full_name': 'fn', 'short_name': 'sn'}
ldap_patcher = patch('django_auth_ldap.config.ldap.initialize')
mock_initialize = ldap_patcher.start()
mock_ldap = MockLDAP()
mock_initialize.return_value = mock_ldap
mock_ldap.directory = {
'uid=newuser,ou=users,dc=zulip,dc=com': {
'userPassword': 'testing',
'fn': ['New User Name']
}
}
with patch('zerver.views.get_subdomain', return_value=subdomain):
result = self.client_post('/register/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s" % (email,)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN='zulip.com',
AUTH_LDAP_BIND_PASSWORD='',
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',),
AUTH_LDAP_USER_DN_TEMPLATE='uid=%(user)s,ou=users,dc=zulip,dc=com'):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username,
password,
domain=domain,
realm_name=realm_name,
realm_subdomain=subdomain,
from_confirmation='1',
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assert_in_success_response(["You're almost there.",
"New User Name",
"newuser@zulip.com"],
result)
# Test the TypeError exception handler
mock_ldap.directory = {
'uid=newuser,ou=users,dc=zulip,dc=com': {
'userPassword': 'testing',
'fn': None # This will raise TypeError
}
}
result = self.submit_reg_form_for_user(username,
password,
domain=domain,
realm_name=realm_name,
realm_subdomain=subdomain,
from_confirmation='1',
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assert_in_success_response(["You're almost there.",
"newuser@zulip.com"],
result)
mock_ldap.reset()
mock_initialize.stop()
@patch('DNS.dnslookup', return_value=[['sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh']])
def test_registration_of_mirror_dummy_user(self, ignored):
# type: (Any) -> None
username = "sipbtest"
password = "test"
domain = "mit.edu"
email = "sipbtest@mit.edu"
subdomain = "sipb"
realm_name = "MIT"
user_profile = get_user_profile_by_email(email)
user_profile.is_mirror_dummy = True
user_profile.is_active = False
user_profile.save()
result = self.client_post('/register/', {'email': email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s" % (email,)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username,
password,
domain=domain,
realm_name=realm_name,
realm_subdomain=subdomain,
from_confirmation='1',
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(username,
password,
domain=domain,
realm_name=realm_name,
realm_subdomain=subdomain,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assertEqual(result.status_code, 302)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
class DeactivateUserTest(ZulipTestCase):
def test_deactivate_user(self):
# type: () -> None
email = 'hamlet@zulip.com'
self.login(email)
user = get_user_profile_by_email('hamlet@zulip.com')
self.assertTrue(user.is_active)
result = self.client_delete('/json/users/me')
self.assert_json_success(result)
user = get_user_profile_by_email('hamlet@zulip.com')
self.assertFalse(user.is_active)
self.login(email, fails=True)
def test_do_not_deactivate_final_admin(self):
# type: () -> None
email = 'iago@zulip.com'
self.login(email)
user = get_user_profile_by_email('iago@zulip.com')
self.assertTrue(user.is_active)
result = self.client_delete('/json/users/me')
self.assert_json_error(result, "Cannot deactivate the only organization administrator")
user = get_user_profile_by_email('iago@zulip.com')
self.assertTrue(user.is_active)
self.assertTrue(user.is_realm_admin)
email = 'hamlet@zulip.com'
user_2 = get_user_profile_by_email('hamlet@zulip.com')
do_change_is_admin(user_2, True)
self.assertTrue(user_2.is_realm_admin)
result = self.client_delete('/json/users/me')
self.assert_json_success(result)
do_change_is_admin(user, True)
| {
"content_hash": "bf9187736f23108f33a3debb689a0d68",
"timestamp": "",
"source": "github",
"line_count": 1181,
"max_line_length": 114,
"avg_line_length": 43.38865368331922,
"alnum_prop": 0.5740213106436126,
"repo_name": "niftynei/zulip",
"id": "76b38b716420a0cf14a640f6a3603cfd2ac560cb",
"size": "51278",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zerver/tests/test_signup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "253334"
},
{
"name": "Groovy",
"bytes": "5509"
},
{
"name": "HTML",
"bytes": "484619"
},
{
"name": "JavaScript",
"bytes": "1449321"
},
{
"name": "Nginx",
"bytes": "1280"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "401825"
},
{
"name": "Puppet",
"bytes": "82787"
},
{
"name": "Python",
"bytes": "3153710"
},
{
"name": "Ruby",
"bytes": "249748"
},
{
"name": "Shell",
"bytes": "37195"
}
],
"symlink_target": ""
} |
from kid_readout.utils.time_tools import date_to_unix_time
by_unix_time_table = [
dict(date='2017-04-14',
description='Stanford SiNxonNb01-0404 SiNx on Nb CPW1 medley chip in holder H1 and lid L1 with mu-metal magnetic shield.',
optical_state='dark',
chip_id='SiNxonNb01-0404'
),
dict(date='2017-03-03',
description='Stanford TWB08-20A Nb-SiNx-Nb low-frequency (2-4 GHz) microstrip resonator chip in holder H5 and lid L5 with mu-metal magnetic shield.',
optical_state='dark',
chip_id='TWB08-20A'
),
dict(date='2017-02-22',
description='Stanford MKIDArray01-0101 niobium-over-aluminum 23-pixel filter-off-membrane (version 1) chip in holder H7 and lid L7 with new mu-metal magnetic shield.',
optical_state='dark',
chip_id='MKIDArray01-0101'
),
dict(date='2017-02-10',
description='Stanford OneLayerNb01-0403 niobium v1 CPW medley chip in holder H6 and lid L6 (taped) with new mu-metal magnetic shield.',
optical_state='dark',
chip_id='OneLayerNb01-0403 '
),
dict(date='2016-10-11',
description='Stanford OneLayerAlStep01-0403 aluminum v1 CPW medley chip in holder H2 and lid L2, taped.',
optical_state='dark',
chip_id='OneLayerAlStep01-0403 '
),
dict(date='2016-08-03',
description='Stanford TwoLayer02-0304 v1 CPW medley chip in holder H1 and lid L1, taped.',
optical_state='dark',
chip_id='TwoLayer02-0304'
),
dict(date='2016-07-05',
description='Stanford TwoLayer01-0202 v1 CPW medley chip in holder H1 and lid L1, taped.',
optical_state='dark',
chip_id='TwoLayer01-0202'
),
dict(date='2016-06-21',
description='Stanford TwoLayer01-0306 v1 CPW medley chip in holder H1 and lid L1, taped and mounted normal to ambient magnetic field.',
optical_state='dark',
chip_id='TwoLayer01-0306'
),
dict(date='2016-06-09',
description='Stanford TwoLayer01-0306 v1 CPW medley chip in holder H1 and lid L1, taped.',
optical_state='dark',
chip_id='TwoLayer01-0306'
),
dict(date='2016-04-20',
description='Stanford Demo05AlMn-0506 v1 CPW coupling chip in holder H1 and lid L1, not taped.',
optical_state='dark',
chip_id='Demo05AlMn-0506'
),
dict(date='2016-04-08',
description='Stanford Demo02AlMn-0506 v1 CPW coupling chip in holder H1 and lid L1, not taped.',
optical_state='dark',
chip_id='Demo02AlMn-0506'
),
dict(date='2016-03-04',
description='STAR Cryo 160105 v1 (almost) CPW coupling chip in holder H3 and lid L3.',
optical_state='dark',
chip_id='160105 v1 (almost) CPW coupling chip'
),
# Lots of missing cooldowns here.
dict(date='2015-07-09',
description='STAR Cryo 150509 2015-04 nevins dual pol test chip Al horn package, AR chip, aluminum tape over '
'horns. 1550 nm LED illumination via fiber.',
optical_state='light',
chip_id='150509 2015-04 nevins',
),
dict(date='2015-06-29',
description='STAR Cryo 150509 2015-04 nevins dual pol test chip Al horn package, AR chip, aluminum tape over '
'horns',
optical_state='dark',
chip_id='150509 2015-04 nevins',
),
dict(date='2015-06-15',
description='ASU-1 2015-04 nevins dual pol test chip Al horn package, AR chip, aluminum tape over '
'horns',
optical_state='dark',
chip_id='ASU-1 2015-04 nevins',
),
dict(date='2015-05-06',
description='JPL 5x4 array in Al horn package in superdark tamale with magnetic field test coil',
optical_state='dark',
chip_id='JPL 5x4',
),
dict(date='2015-04-29',
description='JPL 5x4 array with Stycast filter and blackbody load',
optical_state='light',
chip_id='JPL 5x4',
),
dict(date='2015-02-27',
description='YBCO 141217-3 4 element frontside illuminated with LEDs',
optical_state='light',
chip_id='YBCO 141217-3 4 element',
),
dict(date='2015-01-30',
description='YBCO 141217-3 9 element frontside illuminated with LEDs',
optical_state='light',
chip_id='YBCO 141217-3 9 element',
),
dict(date='2015-01-13',
description='YBCO 141217-3 9 element backside illuminated with LEDs',
optical_state='light',
chip_id='YBCO 141217-3 9 element',
),
dict(date='2014-12-11',
description='STAR Cryo 4x5 130919 0813f12 ASU Al horn package, AR chip, aluminum tape over horns, '
'copper shield soldered shut around package, steelcast coax filters, small hole in copper and '
'aluminum shields to let in 1550 nm and red LED light',
optical_state='light',
chip_id='130919 0813f12',
),
dict(date='2014-12-01',
description='STAR Cryo 4x5 130919 0813f12 ASU Al horn package, AR chip, aluminum tape over horns, '
'copper shield soldered shut around package, steelcast coax filters',
optical_state='dark',
chip_id='130919 0813f12',
),
dict(date='2014-08-11',
description='STAR Cryo 3x3 140423 0813f4 JPL window package, Al tape with hole for red LED',
optical_state='dark',
chip_id='140423 0813f4'),
dict(date='2014-07-28',
description='STAR Cryo 5x4 130919 0813f12 Aluminum package, taped horns',
optical_state='dark',
chip_id='130919 0813f12'),
dict(date='2014-04-14',
description='STAR Cryo 3x3 0813f5 JPL window package, Al tape cover, encased in copper tamale',
optical_state='dark',
chip_id='130919 0813f5'),
dict(date='2014-02-21',
description='STAR Cryo 3x3 0813f5 JPL window package, Al tape cover part 2',
optical_state='dark',
chip_id='130919 0813f5'),
dict(date='2014-02-10',
description='STAR Cryo 3x3 0813f5 JPL window package, Al tape cover',
optical_state='dark',
chip_id='130919 0813f5'),
]
by_unix_time_table.sort(key=lambda x: date_to_unix_time(x['date']))
_unix_time_index = [date_to_unix_time(x['date']) for x in by_unix_time_table]
| {
"content_hash": "e9606d97ba1131510d56c10f3d99e21e",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 176,
"avg_line_length": 44.65277777777778,
"alnum_prop": 0.615552099533437,
"repo_name": "ColumbiaCMB/kid_readout",
"id": "50653babccec2a3b3fa0875f08cd0a3bed402c97",
"size": "6430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kid_readout/analysis/resources/hpd_experiments.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "13672"
},
{
"name": "Python",
"bytes": "2033932"
}
],
"symlink_target": ""
} |
"""Contains the convolutional layer classes and their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from six.moves import xrange # pylint: disable=redefined-builtin
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
from tensorflow.python import framework
class _Conv(base.Layer):
"""Abstract nD convolution layer (private, used as implementation base).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of n integers, specifying the
length of the convolution window.
strides: An integer or tuple/list of n integers,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, ..., channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, ...)`.
dilation_rate: An integer or tuple/list of n integers, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, rank,
filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(_Conv, self).__init__(trainable=trainable,
name=name, **kwargs)
self.rank = rank
self.filters = filters
self.kernel_size = utils.normalize_tuple(kernel_size, rank, 'kernel_size')
self.strides = utils.normalize_tuple(strides, rank, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
self.dilation_rate = utils.normalize_tuple(
dilation_rate, rank, 'dilation_rate')
self.activation = activation
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.activity_regularizer = activity_regularizer
self.input_spec = base.InputSpec(ndim=self.rank + 2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis].value is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis].value
kernel_shape = self.kernel_size + (input_dim, self.filters)
self.kernel = self.add_variable(name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.input_spec = base.InputSpec(ndim=self.rank + 2,
axes={channel_axis: input_dim})
self.built = True
def call(self, inputs):
outputs = nn.convolution(
input=inputs,
filter=self.kernel,
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format, self.rank + 2))
if self.bias is not None:
if self.data_format == 'channels_first':
if self.rank == 1:
# nn.bias_add does not accept a 1D input tensor.
bias = array_ops.reshape(self.bias, (1, self.filters, 1))
outputs += bias
if self.rank == 2:
outputs = nn.bias_add(outputs, self.bias, data_format='NCHW')
if self.rank == 3:
# As of Mar 2017, direct addition is significantly slower than
# bias_add when computing gradients. To use bias_add, we collapse Z
# and Y into a single dimension to obtain a 4D input tensor.
outputs_shape = outputs.shape.as_list()
outputs_4d = array_ops.reshape(outputs,
[outputs_shape[0], outputs_shape[1],
outputs_shape[2] * outputs_shape[3],
outputs_shape[4]])
outputs_4d = nn.bias_add(outputs_4d, self.bias, data_format='NCHW')
outputs = array_ops.reshape(outputs_4d, outputs_shape)
else:
outputs = nn.bias_add(outputs, self.bias, data_format='NHWC')
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_last':
space = input_shape[1:-1]
new_space = []
for i in range(len(space)):
new_dim = utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0]] + new_space +
[self.filters])
else:
space = input_shape[2:]
new_space = []
for i in range(len(space)):
new_dim = utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0], self.filters] +
new_space)
class Conv1D(_Conv):
"""1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of a single integer, specifying the
length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
dilation_rate: An integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(Convolution1D, self).__init__(
rank=1,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name, **kwargs)
def conv1d(inputs,
filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for 1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of a single integer, specifying the
length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
dilation_rate: An integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv1D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv2D(_Conv):
"""2D convolution layer (e.g. spatial convolution over images).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
width and height of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(Conv2D, self).__init__(
rank=2,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name, **kwargs)
def conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 2D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
width and height of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv3D(_Conv):
"""3D convolution layer (e.g. spatial convolution over volumes).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth,
height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
dilation_rate: An integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(Conv3D, self).__init__(
rank=3,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name, **kwargs)
def conv3d(inputs,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 3D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth,
height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
dilation_rate: An integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv3D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class SeparableConv2D(Conv2D):
"""Depthwise separable 2D convolution.
This layer performs a depthwise convolution that acts separately on
channels, followed by a pointwise convolution that mixes channels.
If `use_bias` is True and a bias initializer is provided,
it adds a bias vector to the output.
It then optionally applies an activation function to produce the final output.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 integers specifying the spatial
dimensions of of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any `stride` value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
depthwise_initializer: An initializer for the depthwise convolution kernel.
pointwise_initializer: An initializer for the pointwise convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
depthwise_regularizer: Optional regularizer for the depthwise
convolution kernel.
pointwise_regularizer: Optional regularizer for the pointwise
convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
depth_multiplier=1,
activation=None,
use_bias=True,
depthwise_initializer=None,
pointwise_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
depthwise_regularizer=None,
pointwise_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(SeparableConv2D, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
**kwargs)
self.depth_multiplier = depth_multiplier
self.depthwise_initializer = depthwise_initializer
self.pointwise_initializer = pointwise_initializer
self.depthwise_regularizer = depthwise_regularizer
self.pointwise_regularizer = pointwise_regularizer
def build(self, input_shape):
if len(input_shape) < 4:
raise ValueError('Inputs to `SeparableConv2D` should have rank 4. '
'Received input shape:', str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = 3
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs to '
'`SeparableConv2D` '
'should be defined. Found `None`.')
input_dim = int(input_shape[channel_axis])
self.input_spec = base.InputSpec(ndim=4, axes={channel_axis: input_dim})
depthwise_kernel_shape = (self.kernel_size[0],
self.kernel_size[1],
input_dim,
self.depth_multiplier)
pointwise_kernel_shape = (1, 1,
self.depth_multiplier * input_dim,
self.filters)
self.depthwise_kernel = self.add_variable(
name='depthwise_kernel',
shape=depthwise_kernel_shape,
initializer=self.depthwise_initializer,
regularizer=self.depthwise_regularizer,
trainable=True,
dtype=self.dtype)
self.pointwise_kernel = self.add_variable(
name='pointwise_kernel',
shape=pointwise_kernel_shape,
initializer=self.pointwise_initializer,
regularizer=self.pointwise_regularizer,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
if self.data_format == 'channels_first':
# Reshape to channels last
inputs = array_ops.transpose(inputs, (0, 2, 3, 1))
# Apply the actual ops.
outputs = nn.separable_conv2d(
inputs,
self.depthwise_kernel,
self.pointwise_kernel,
strides=(1,) + self.strides + (1,),
padding=self.padding.upper(),
rate=self.dilation_rate)
if self.data_format == 'channels_first':
# Reshape to channels first
outputs = array_ops.transpose(outputs, (0, 3, 1, 2))
if self.bias is not None:
outputs = nn.bias_add(
outputs,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
else:
rows = input_shape[1]
cols = input_shape[2]
rows = utils.conv_output_length(rows, self.kernel_size[0],
self.padding, self.strides[0])
cols = utils.conv_output_length(cols, self.kernel_size[1],
self.padding, self.strides[1])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], self.filters, rows, cols])
else:
return tensor_shape.TensorShape(
[input_shape[0], rows, cols, self.filters])
def separable_conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
depth_multiplier=1,
activation=None,
use_bias=True,
depthwise_initializer=None,
pointwise_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
depthwise_regularizer=None,
pointwise_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the depthwise separable 2D convolution layer.
This layer performs a depthwise convolution that acts separately on
channels, followed by a pointwise convolution that mixes channels.
If `use_bias` is True and a bias initializer is provided,
it adds a bias vector to the output.
It then optionally applies an activation function to produce the final output.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 integers specifying the spatial
dimensions of of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any `stride` value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
depthwise_initializer: An initializer for the depthwise convolution kernel.
pointwise_initializer: An initializer for the pointwise convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
depthwise_regularizer: Optional regularizer for the depthwise
convolution kernel.
pointwise_regularizer: Optional regularizer for the pointwise
convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = SeparableConv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
depth_multiplier=depth_multiplier,
activation=activation,
use_bias=use_bias,
depthwise_initializer=depthwise_initializer,
pointwise_initializer=pointwise_initializer,
bias_initializer=bias_initializer,
depthwise_regularizer=depthwise_regularizer,
pointwise_regularizer=pointwise_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv2DTranspose(Conv2D):
"""Transposed 2D convolution layer (sometimes called 2D Deconvolution).
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 positive integers specifying the spatial
dimensions of of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(Conv2DTranspose, self).__init__(
filters,
kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
**kwargs)
self.input_spec = base.InputSpec(ndim=4)
def build(self, input_shape):
if len(input_shape) != 4:
raise ValueError('Inputs should have rank ' +
str(4) +
'Received input shape:', str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis]
self.input_spec = base.InputSpec(ndim=4, axes={channel_axis: input_dim})
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_variable(name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
inputs_shape = array_ops.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
c_axis, h_axis, w_axis = 1, 2, 3
else:
c_axis, h_axis, w_axis = 3, 1, 2
height, width = inputs_shape[h_axis], inputs_shape[w_axis]
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
# Infer the dynamic output shape:
out_height = utils.deconv_output_length(height,
kernel_h,
self.padding,
stride_h)
out_width = utils.deconv_output_length(width,
kernel_w,
self.padding,
stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_height, out_width)
strides = (1, 1, stride_h, stride_w)
else:
output_shape = (batch_size, out_height, out_width, self.filters)
strides = (1, stride_h, stride_w, 1)
output_shape_tensor = array_ops.stack(output_shape)
outputs = nn.conv2d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format, ndim=4))
# Infer the static output shape:
out_shape = inputs.get_shape().as_list()
out_shape[c_axis] = self.filters
out_shape[h_axis] = utils.deconv_output_length(out_shape[h_axis],
kernel_h,
self.padding,
stride_h)
out_shape[w_axis] = utils.deconv_output_length(out_shape[w_axis],
kernel_w,
self.padding,
stride_w)
outputs.set_shape(out_shape)
if self.bias:
outputs = nn.bias_add(
outputs,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = list(input_shape)
if self.data_format == 'channels_first':
c_axis, h_axis, w_axis = 1, 2, 3
else:
c_axis, h_axis, w_axis = 3, 1, 2
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
output_shape[c_axis] = self.filters
output_shape[h_axis] = utils.deconv_output_length(
output_shape[h_axis], kernel_h, self.padding, stride_h)
output_shape[w_axis] = utils.deconv_output_length(
output_shape[w_axis], kernel_w, self.padding, stride_w)
return tensor_shape.TensorShape(output_shape)
def conv2d_transpose(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for transposed 2D convolution layer.
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 positive integers specifying the spatial
dimensions of of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to `None` to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If `None`, then no
bias will be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv2DTranspose(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv3DTranspose(Conv3D):
"""Transposed 3D convolution layer (sometimes called 3D Deconvolution).
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for all spatial
dimensions.
strides: An integer or tuple/list of 3 integers, specifying the strides
of the convolution along the depth, height and width.
Can be a single integer to specify the same value for all spatial
dimensions.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
activation: Activation function. Set it to `None` to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If `None`, then no
bias will be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(Conv3DTranspose, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
**kwargs)
def build(self, input_shape):
if len(input_shape) != 5:
raise ValueError('Inputs should have rank 5, received input shape:',
str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined, found None: ' + str(input_shape))
input_dim = input_shape[channel_axis]
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_variable(
'kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(
'bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
def call(self, inputs):
inputs_shape = array_ops.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4
else:
c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3
depth = inputs_shape[d_axis]
height = inputs_shape[h_axis]
width = inputs_shape[w_axis]
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
# Infer the dynamic output shape:
out_depth = utils.deconv_output_length(depth,
kernel_d,
self.padding,
stride_d)
out_height = utils.deconv_output_length(height,
kernel_h,
self.padding,
stride_h)
out_width = utils.deconv_output_length(width,
kernel_w,
self.padding,
stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_depth, out_height,
out_width)
strides = (1, 1, stride_d, stride_h, stride_w)
else:
output_shape = (batch_size, out_depth, out_height, out_width,
self.filters)
strides = (1, stride_d, stride_h, stride_w, 1)
output_shape_tensor = array_ops.stack(output_shape)
outputs = nn.conv3d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
data_format=utils.convert_data_format(self.data_format, ndim=5),
padding=self.padding.upper())
# Infer the static output shape:
out_shape = inputs.get_shape().as_list()
out_shape[c_axis] = self.filters
out_shape[d_axis] = utils.deconv_output_length(out_shape[d_axis],
kernel_d,
self.padding,
stride_d)
out_shape[h_axis] = utils.deconv_output_length(out_shape[h_axis],
kernel_h,
self.padding,
stride_h)
out_shape[w_axis] = utils.deconv_output_length(out_shape[w_axis],
kernel_w,
self.padding,
stride_w)
outputs.set_shape(out_shape)
if self.bias:
outputs_shape = outputs.shape.as_list()
if self.data_format == 'channels_first':
outputs_4d = array_ops.reshape(outputs, [
outputs_shape[0], outputs_shape[1],
outputs_shape[2] * outputs_shape[3], outputs_shape[4]
])
else:
outputs_4d = array_ops.reshape(outputs, [
outputs_shape[0], outputs_shape[1] * outputs_shape[2],
outputs_shape[3], outputs_shape[4]
])
outputs_4d = nn.bias_add(
outputs_4d,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
outputs = array_ops.reshape(outputs_4d, outputs_shape)
if self.activation is not None:
return self.activation(outputs)
return outputs
def conv3d_transpose(inputs,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for transposed 3D convolution layer.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 3 positive integers specifying the spatial
dimensions of of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 3 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv3DTranspose(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
# Aliases
Convolution1D = Conv1D
Convolution2D = Conv2D
Convolution3D = Conv3D
SeparableConvolution2D = SeparableConv2D
Convolution2DTranspose = Deconvolution2D = Deconv2D = Conv2DTranspose
Convolution3DTranspose = Deconvolution3D = Deconv3D = Conv3DTranspose
convolution1d = conv1d
convolution2d = conv2d
convolution3d = conv3d
separable_convolution2d = separable_conv2d
convolution2d_transpose = deconvolution2d = deconv2d = conv2d_transpose
convolution3d_transpose = deconvolution3d = deconv3d = conv3d_transpose
| {
"content_hash": "0d99b7a40ef5c5eb0598bd68a8c72abd",
"timestamp": "",
"source": "github",
"line_count": 1541,
"max_line_length": 80,
"avg_line_length": 42.06878650227125,
"alnum_prop": 0.6349725427284507,
"repo_name": "jhaux/tensorflow",
"id": "fdf1b134b9cd4b659d519725ed3d71f3f67245be",
"size": "65568",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/python/layers/convolutional.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7908"
},
{
"name": "C",
"bytes": "186881"
},
{
"name": "C++",
"bytes": "25375441"
},
{
"name": "CMake",
"bytes": "166479"
},
{
"name": "Go",
"bytes": "858855"
},
{
"name": "HTML",
"bytes": "593130"
},
{
"name": "Java",
"bytes": "319061"
},
{
"name": "JavaScript",
"bytes": "1399"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37393"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63700"
},
{
"name": "Protocol Buffer",
"bytes": "227623"
},
{
"name": "Python",
"bytes": "22404212"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "338633"
},
{
"name": "TypeScript",
"bytes": "801168"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CV',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('birtdate', models.DateField()),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'cv',
'verbose_name_plural': 'cv',
},
),
]
| {
"content_hash": "fa6de25e2aa1121f3807de93263bf18d",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 121,
"avg_line_length": 28.93103448275862,
"alnum_prop": 0.5816448152562574,
"repo_name": "airportmarc/the416life",
"id": "50e407e7b3962d65d0e5bad1a8469b9331aedb9a",
"size": "912",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/apps/cv/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "18"
},
{
"name": "CSS",
"bytes": "430385"
},
{
"name": "HTML",
"bytes": "174632"
},
{
"name": "JavaScript",
"bytes": "224762"
},
{
"name": "Python",
"bytes": "477212"
},
{
"name": "Shell",
"bytes": "4240"
},
{
"name": "Vue",
"bytes": "80363"
}
],
"symlink_target": ""
} |
import logging
from sqlalchemy.orm import sessionmaker
logger = logging.getLogger('usesthis_crawler')
console_handler = logging.StreamHandler()
console_handler.setFormatter(
logging.Formatter(u'%(message)s')
)
logger.addHandler(console_handler)
Session = sessionmaker()
| {
"content_hash": "bdaa6035de10837b2e7ab0f139eecd34",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 46,
"avg_line_length": 25.09090909090909,
"alnum_prop": 0.7934782608695652,
"repo_name": "gbrener/usesthis_crawler",
"id": "cb37a89be65d96bc36ced877da7f3e728719c761",
"size": "276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "usesthis_crawler/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "66777"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
# This flag is used to mark that a migration shouldn't be automatically run in
# production. We set this to True for operations that we think are risky and want
# someone from ops to run manually and monitor.
# General advice is that if in doubt, mark your migration as `is_dangerous`.
# Some things you should always mark as dangerous:
# - Large data migrations. Typically we want these to be run manually by ops so that
# they can be monitored. Since data migrations will now hold a transaction open
# this is even more important.
# - Adding columns to highly active tables, even ones that are NULL.
is_dangerous = False
# This flag is used to decide whether to run this migration in a transaction or not.
# By default we prefer to run in a transaction, but for migrations where you want
# to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
# want to create an index concurrently when adding one to an existing table.
atomic = True
dependencies = [
('sentry', '0137_dashboard_widget_interval'),
]
operations = [
migrations.SeparateDatabaseAndState(
state_operations=[
migrations.RemoveField(
model_name='dashboardwidgetquery',
name='interval',
),
]
)
]
| {
"content_hash": "6434331473f14accbe1796d90d17d43e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 88,
"avg_line_length": 39.526315789473685,
"alnum_prop": 0.6671105193075899,
"repo_name": "beeftornado/sentry",
"id": "c5ae221bd4a9da07a021880642f66cf5ef278b83",
"size": "1576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/migrations/0138_widget_query_remove_interval.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
} |
from tornado.concurrent import Future
from tornado.httpclient import HTTPError, HTTPRequest
from tornado.log import gen_log
from tornado.testing import AsyncHTTPTestCase, gen_test, bind_unused_port, ExpectLog
from tornado.test.util import unittest
from tornado.web import Application, RequestHandler
from tornado.websocket import WebSocketHandler, websocket_connect, WebSocketError, _websocket_mask_python
try:
from tornado import speedups
except ImportError:
speedups = None
class TestWebSocketHandler(WebSocketHandler):
"""Base class for testing handlers that exposes the on_close event.
This allows for deterministic cleanup of the associated socket.
"""
def initialize(self, close_future):
self.close_future = close_future
def on_close(self):
self.close_future.set_result(None)
class EchoHandler(TestWebSocketHandler):
def on_message(self, message):
self.write_message(message, isinstance(message, bytes))
class HeaderHandler(TestWebSocketHandler):
def open(self):
self.write_message(self.request.headers.get('X-Test', ''))
class NonWebSocketHandler(RequestHandler):
def get(self):
self.write('ok')
class WebSocketTest(AsyncHTTPTestCase):
def get_app(self):
self.close_future = Future()
return Application([
('/echo', EchoHandler, dict(close_future=self.close_future)),
('/non_ws', NonWebSocketHandler),
('/header', HeaderHandler, dict(close_future=self.close_future)),
])
@gen_test
def test_websocket_gen(self):
ws = yield websocket_connect(
'ws://localhost:%d/echo' % self.get_http_port(),
io_loop=self.io_loop)
ws.write_message('hello')
response = yield ws.read_message()
self.assertEqual(response, 'hello')
ws.close()
yield self.close_future
def test_websocket_callbacks(self):
websocket_connect(
'ws://localhost:%d/echo' % self.get_http_port(),
io_loop=self.io_loop, callback=self.stop)
ws = self.wait().result()
ws.write_message('hello')
ws.read_message(self.stop)
response = self.wait().result()
self.assertEqual(response, 'hello')
ws.close()
yield self.close_future
@gen_test
def test_websocket_http_fail(self):
with self.assertRaises(HTTPError) as cm:
yield websocket_connect(
'ws://localhost:%d/notfound' % self.get_http_port(),
io_loop=self.io_loop)
self.assertEqual(cm.exception.code, 404)
@gen_test
def test_websocket_http_success(self):
with self.assertRaises(WebSocketError):
yield websocket_connect(
'ws://localhost:%d/non_ws' % self.get_http_port(),
io_loop=self.io_loop)
@gen_test
def test_websocket_network_timeout(self):
sock, port = bind_unused_port()
sock.close()
with self.assertRaises(HTTPError) as cm:
with ExpectLog(gen_log, ".*"):
yield websocket_connect(
'ws://localhost:%d/' % port,
io_loop=self.io_loop,
connect_timeout=0.01)
self.assertEqual(cm.exception.code, 599)
@gen_test
def test_websocket_network_fail(self):
sock, port = bind_unused_port()
sock.close()
with self.assertRaises(HTTPError) as cm:
with ExpectLog(gen_log, ".*"):
yield websocket_connect(
'ws://localhost:%d/' % port,
io_loop=self.io_loop,
connect_timeout=3600)
self.assertEqual(cm.exception.code, 599)
@gen_test
def test_websocket_close_buffered_data(self):
ws = yield websocket_connect(
'ws://localhost:%d/echo' % self.get_http_port())
ws.write_message('hello')
ws.write_message('world')
ws.stream.close()
yield self.close_future
@gen_test
def test_websocket_headers(self):
# Ensure that arbitrary headers can be passed through websocket_connect.
ws = yield websocket_connect(
HTTPRequest('ws://localhost:%d/header' % self.get_http_port(),
headers={'X-Test': 'hello'}))
response = yield ws.read_message()
self.assertEqual(response, 'hello')
ws.close()
yield self.close_future
class MaskFunctionMixin(object):
# Subclasses should define self.mask(mask, data)
def test_mask(self):
self.assertEqual(self.mask(b'abcd', b''), b'')
self.assertEqual(self.mask(b'abcd', b'b'), b'\x03')
self.assertEqual(self.mask(b'abcd', b'54321'), b'TVPVP')
self.assertEqual(self.mask(b'ZXCV', b'98765432'), b'c`t`olpd')
# Include test cases with \x00 bytes (to ensure that the C
# extension isn't depending on null-terminated strings) and
# bytes with the high bit set (to smoke out signedness issues).
self.assertEqual(self.mask(b'\x00\x01\x02\x03',
b'\xff\xfb\xfd\xfc\xfe\xfa'),
b'\xff\xfa\xff\xff\xfe\xfb')
self.assertEqual(self.mask(b'\xff\xfb\xfd\xfc',
b'\x00\x01\x02\x03\x04\x05'),
b'\xff\xfa\xff\xff\xfb\xfe')
class PythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
def mask(self, mask, data):
return _websocket_mask_python(mask, data)
@unittest.skipIf(speedups is None, "tornado.speedups module not present")
class CythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
def mask(self, mask, data):
return speedups.websocket_mask(mask, data)
| {
"content_hash": "7323c5b912b7a3f80d70a392dcbb0f3d",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 105,
"avg_line_length": 36.40506329113924,
"alnum_prop": 0.6157858136300417,
"repo_name": "bufferx/tornado",
"id": "3c6e3a9a2cc06a4bb9b865cf59b60188cec133a9",
"size": "5752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tornado/test/websocket_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1003"
},
{
"name": "CSS",
"bytes": "9014"
},
{
"name": "JavaScript",
"bytes": "6045"
},
{
"name": "Python",
"bytes": "1080288"
},
{
"name": "Ruby",
"bytes": "1889"
},
{
"name": "Shell",
"bytes": "5377"
}
],
"symlink_target": ""
} |
"""
Created on Wed Feb 04 15:20:56 2015
@author: Scott
"""
import numpy as np
from neurosrc.spectral.filter import scfilter
import math
import statsmodels.api as sm
def scpac(x, flo_range, fhi_range, rate, pac_method, filt_method, **kwargs):
'''
Calculate phase-amplitude coupling between the phase of a low-frequency-
bandpass filtered signal and the amplitude of a high-frequency-bandpass
filtered signal.
Parameters
----------
x : array-like 1d
Time series of data
flo_range : 2-element list
Low and High cutoff frequencies (Hz)
fhi_range : 2-element list
Low and High cutoff frequencies (Hz)
rate : numeric
Sampling rate of x
pac_method : string
Method to calculate PAC.
See Tort, 2008 for method 'mi' (used in de Hemptinne, 2013)
See Tort, 2010 for method 'plv'
See Penny, 2008 for method 'glm'
See Canolty, 2006 for method 'mi_canolty'
filt_method : string
Filtering method
**kwargs : dictionary
Filter parameters
Returns
-------
pac : numeric
Phase-amplitude coupling value
'''
# Bandpass filter raw signal
xlo = scfilter(x, flo_range, filt_method, rate = rate, **kwargs)
xhi = scfilter(x, fhi_range, filt_method, rate = rate, **kwargs)
#Calculate PAC
if pac_method == 'plv':
ahi = np.abs(fasthilbert(xhi))
xlo_ahi = scfilter(ahi, flo_range, filt_method, **kwargs)
pac = pac_plv(xlo, xlo_ahi)
elif pac_method == 'mi':
pac = pac_mi(xlo, xhi)
elif pac_method == 'glm':
pac = pac_glm(xlo, xhi)
elif pac_method == 'mi_canolty':
pac = pac_mi_canolty(xlo, xhi)
else:
raise ValueError('Not a valid PAC method')
return pac
def power_two(n):
'''
Calculate the next power of 2 from a number
'''
return 2**(int(math.log(n, 2))+1)
def fasthilbert(x, axis=-1):
'''
Redefinition of scipy.signal.hilbert, which is very slow for some lengths
of the signal. This version zero-pads the signal to the next power of 2
for speed.
'''
x = np.array(x)
N = x.shape[axis]
N2 = power_two(len(x))
Xf = np.fft.fft(x, N2, axis=axis)
h = np.zeros(N2)
h[0] = 1
h[1:(N2 + 1) // 2] = 2
x = np.fft.ifft(Xf * h, axis=axis)
return x[:N]
def pac_mi(xlo, xhi):
'''
Calculate PAC using the modulation index (MI) method
Parameters
----------
xlo : array-like 1d
Low-frequency-bandpass filtered signal
xhi : array-like 1d
High-frequency-bandpass filtered signal
Returns
-------
pac : numeric
PAC value
'''
# Calculate phase and amplitude
pha = np.angle(fasthilbert(xlo))
amp = np.abs(fasthilbert(xhi))
pha = np.degrees(pha)
# Calculate PAC
bin_phase_lo = np.arange(-180,180,20)
binned_meanA = np.zeros(len(bin_phase_lo))
p_j = np.zeros(len(bin_phase_lo))
for b in range(len(bin_phase_lo)):
phaserange = np.logical_and(pha>=bin_phase_lo[b],pha<(bin_phase_lo[b]+20))
binned_meanA[b] = np.mean(amp[phaserange])
for b in range(len(bin_phase_lo)):
p_j[b] = binned_meanA[b]/sum(binned_meanA)
H = -sum(np.multiply(p_j,np.log10(p_j)))
Hmax = np.log10(18)
pac = (Hmax-H)/Hmax
return pac
def pac_plv(xlo, xlo_ahi):
'''
Calculate PAC using the phase-locking value (PLV) method
Parameters
----------
xlo : array-like 1d
Low-frequency-bandpass filtered signal
xlo_ahi : array-like 1d
Low-frequency bandpass filtered signal of the Hilbert amplitude of the
original high-frequency-bandpass filtered signal
Returns
-------
pac : numeric
PAC value
'''
# Calculate phase, and amplitude phase, and PAC
pha = np.angle(fasthilbert(xlo))
amp_pha = np.angle(fasthilbert(xlo_ahi))
pac = np.abs(np.sum(np.exp(1j * (pha - amp_pha)))) / len(xlo)
return pac
def pac_glm(xlo, xhi):
'''
Calculate PAC using the generalized linear model (GLM) method
Parameters
----------
xlo : array-like 1d
Low-frequency-bandpass filtered signal
xhi : array-like 1d
High-frequency-bandpass filtered signal
Returns
-------
pac : numeric
PAC value
'''
# Calculate phase and amplitude
pha = np.angle(fasthilbert(xlo))
amp = np.abs(fasthilbert(xhi))
# Prepare GLM
y = amp
X_pre = np.vstack((np.cos(pha), np.sin(pha)))
X = X_pre.T
X = sm.add_constant(X, prepend=False)
# Run GLM
my_glm = sm.GLM(y,X)
res = my_glm.fit()
#print(res.summary())
# Calculate R^2 value. Equivalent to mdl.Rsquared.Ordinary in MATLAB
pac = 1 - np.sum(res.resid_deviance**2) / np.sum((amp-np.mean(amp))**2)
return pac
def pac_mi_canolty(xlo, xhi):
'''
Calculate PAC using the modulation index (MI) method defined in Canolty,
2006
Parameters
----------
xlo : array-like 1d
Low-frequency-bandpass filtered signal
xhi : array-like 1d
High-frequency-bandpass filtered signal
Returns
-------
pac : numeric
PAC value
'''
# Calculate phase and amplitude
pha = np.angle(fasthilbert(xlo))
amp = np.abs(fasthilbert(xhi))
# Calculate PAC
pac = np.sum(amp * np.exp(1j * pha))
return pac
def pac_palette(x, pac_method, filt_method, rate = 1000,
dp = 2, da = 4, p_range = None, a_range = None, **kwargs):
'''
Calculate PAC for many phase and frequency bands
'''
if p_range == None:
p_range = (4,50)
if a_range == None:
a_range = (10,200)
# Calculate palette frequency parameters
f_phases = np.arange(p_range[0],p_range[1],dp)
f_amps = np.arange(a_range[0],a_range[1],da)
P = len(f_phases)
A = len(f_amps)
# Calculate PAC for every combination of P and A
pac = np.zeros((P,A))
for p in range(P):
flo = [f_phases[p],f_phases[p]+dp]
for a in range(A):
#print p,a
fhi = [f_amps[a],f_amps[a]+da]
pac[p,a] = scpac(x, flo, fhi, rate, pac_method, filt_method, **kwargs)
return pac
| {
"content_hash": "0ff6de3c7d16c0239b9638e53d1e61a4",
"timestamp": "",
"source": "github",
"line_count": 243,
"max_line_length": 82,
"avg_line_length": 26.292181069958847,
"alnum_prop": 0.5772421349193927,
"repo_name": "parenthetical-e/neurosrc",
"id": "e54db843fa257fe5411f516c7b1978869832a1fd",
"size": "6413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spectral/pac.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "25492"
},
{
"name": "Python",
"bytes": "14121"
}
],
"symlink_target": ""
} |
def http_exception(response):
raise HTTPException(response)
class HTTPException(Exception):
def __init__(self, response):
self._response = response
def __str__(self):
return self._response.message
@property
def is_redirect(self):
return self._response.is_redirect
@property
def is_client_error(self):
return self._response.is_client_error
@property
def is_server_error(self):
return self._response.is_server_error
| {
"content_hash": "c7ee43ff011dc9b42fc842d0484fc0a7",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 45,
"avg_line_length": 21.608695652173914,
"alnum_prop": 0.647887323943662,
"repo_name": "XDrake99/IDTP",
"id": "12eaf5a84942f1f3eaa499da95d5602ddb355a94",
"size": "497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "protocol/http/exception.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "99219"
}
],
"symlink_target": ""
} |
from __future__ import division
from __future__ import print_function
import argparse
import pandas
import re
import genhub
def cli():
"""Define the command-line interface of the program."""
desc = 'Summarize iLocus content of the specified genome(s)'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-v', '--version', action='version',
version='GenHub v%s' % genhub.__version__)
parser.add_argument('-c', '--cfgdir', default=None, metavar='DIR',
help='directory (or comma-separated list of '
'directories) from which to load user-supplied genome '
'configuration files')
parser.add_argument('-w', '--workdir', metavar='WD', default='./species',
help='working directory for data files; default is '
'"./species"')
parser.add_argument('--outfmt', metavar='FMT', choices=['tsv', 'tex'],
default='tsv', help='output format; "tsv" for machine '
'readability, "tex" for typesetting')
parser.add_argument('species', nargs='+', help='species label(s)')
return parser
def count_seqs(data):
"""Count sequences from iLocus positions."""
seqids = dict()
for locuspos in data['LocusPos']:
posmatch = re.search(r'(\S+)_(\d+)-(\d+)', locuspos)
assert posmatch, 'error parsing iLocus position: ' + locuspos
seqid = posmatch.group(1)
seqids[seqid] = True
return len(seqids)
def get_row(data, fmt):
"""Calculate the summary for a row of the table."""
assert fmt in ['tsv', 'tex']
row = [
data['Species'][0],
data['EffectiveLength'].sum() / 1000000,
count_seqs(data),
len(data.loc[data.LocusClass == 'fiLocus']),
len(data.loc[data.LocusClass == 'iiLocus']),
len(data.loc[data.LocusClass == 'niLocus']),
len(data.loc[data.LocusClass == 'siLocus']),
len(data.loc[data.LocusClass == 'ciLocus']),
]
if fmt == 'tex':
row[1] = '{:.1f}'.format(row[1])
for i in range(2, len(row)):
row[i] = '{:,d}'.format(row[i])
return row
def print_row(values, fmt):
assert fmt in ['tsv', 'tex']
if fmt == 'tsv':
print(*values, sep='\t')
elif fmt == 'tex':
vals = ['{:>12}'.format(v) for v in values] + [' \\\\']
print(*vals, sep=' & ')
def main(args):
column_names = ['Species', 'Mb', '#Seq', 'fiLoci', 'iiLoci', 'niLoci',
'siLoci', 'ciLoci']
print_row(column_names, args.outfmt)
registry = genhub.registry.Registry()
if args.cfgdir:
for cfgdirpath in args.cfgdir.split(','):
registry.update(cfgdirpath)
for species in args.species:
db = registry.genome(species, workdir=args.workdir)
data = pandas.read_table(db.ilocustable)
row = get_row(data, args.outfmt)
print_row(row, args.outfmt)
if __name__ == '__main__':
main(args=cli().parse_args())
| {
"content_hash": "caa09a07043757aba0d7e0e8fff4be72",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 79,
"avg_line_length": 33.73626373626374,
"alnum_prop": 0.5644951140065146,
"repo_name": "standage/genhub",
"id": "265df4b7d21d9b2f763ceb434203c279d1729256",
"size": "3499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/genhub-ilocus-summary.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "907"
},
{
"name": "Python",
"bytes": "291540"
},
{
"name": "Shell",
"bytes": "961"
}
],
"symlink_target": ""
} |
"""
Command-line application controller for opc-diag
"""
from opcdiag.model import Package
from opcdiag.presenter import DiffPresenter, ItemPresenter
from opcdiag.view import OpcView
_CONTENT_TYPES_URI = '[Content_Types].xml'
class OpcController(object):
"""
Mediate between the command-line interface and the package model
entities, orchestrating the execution of user commands by creating
entity objects, delegating work to them, and using the appropriate view
object to format the results to be displayed.
"""
def browse(self, pkg_path, uri_tail):
"""
Display pretty-printed XML contained in package item with URI ending
with *uri_tail* in package at *pkg_path*.
"""
pkg = Package.read(pkg_path)
pkg_item = pkg.find_item_by_uri_tail(uri_tail)
item_presenter = ItemPresenter(pkg_item)
OpcView.pkg_item(item_presenter)
def diff_item(self, package_1_path, package_2_path, uri_tail):
"""
Display the meaningful differences between the item identified by
*uri_tail* in the package at *package_1_path* and its counterpart in
the package at *package_2_path*. Each path can be either a standard
zip package (e.g. a .pptx file) or a directory containing an extracted
package.
"""
package_1 = Package.read(package_1_path)
package_2 = Package.read(package_2_path)
diff = DiffPresenter.named_item_diff(package_1, package_2, uri_tail)
OpcView.item_diff(diff)
def diff_pkg(self, package_1_path, package_2_path):
"""
Display the meaningful differences between the packages at
*package_1_path* and *package_2_path*. Each path can be either a
standard zip package (e.g. .pptx file) or a directory containing an
extracted package.
"""
package_1 = Package.read(package_1_path)
package_2 = Package.read(package_2_path)
content_types_diff = DiffPresenter.named_item_diff(
package_1, package_2, _CONTENT_TYPES_URI)
rels_diffs = DiffPresenter.rels_diffs(package_1, package_2)
xml_part_diffs = DiffPresenter.xml_part_diffs(package_1, package_2)
OpcView.package_diff(content_types_diff, rels_diffs, xml_part_diffs)
def extract_package(self, package_path, extract_dirpath):
"""
Extract the contents of the package at *package_path* to individual
files in a directory at *extract_dirpath*.
"""
package = Package.read(package_path)
package.prettify_xml()
package.save_to_dir(extract_dirpath)
def repackage(self, package_path, new_package_path):
"""
Write the contents of the package found at *package_path* to a new
zip package at *new_package_path*.
"""
package = Package.read(package_path)
package.save(new_package_path)
def substitute(self, uri_tail, src_pkg_path, tgt_pkg_path, new_pkg_path):
"""
Substitute the package item identified by *uri_tail* from the package
at *src_pkg_path* into the package at *tgt_pkg_path* and save the
resulting package at *new_pkg_path*. This can be handy for
identifying which package item(s) in the source package are causing a
"repair needed" error when loading the target package in MS Office.
"""
package_1 = Package.read(src_pkg_path)
package_2 = Package.read(tgt_pkg_path)
pkg_item = package_1.find_item_by_uri_tail(uri_tail)
package_2.substitute_item(pkg_item)
package_2.save(new_pkg_path)
OpcView.substitute(pkg_item.uri, src_pkg_path, tgt_pkg_path,
new_pkg_path)
| {
"content_hash": "fd73d8109144a82f7071b83ebd36b12a",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 78,
"avg_line_length": 41.98876404494382,
"alnum_prop": 0.6534653465346535,
"repo_name": "python-openxml/opc-diag",
"id": "40f4bd2933740975edbcf8144b7d8dca0c2ac274",
"size": "3762",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "opcdiag/controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "125559"
}
],
"symlink_target": ""
} |
from luigi import Task, Parameter, LocalTarget, WrapperTask
from tasks.base_tasks import (ColumnsTask, TableTask, TagsTask, GeoFile2TempTableTask, SimplifiedTempTableTask,
RepoFile)
from lib.timespan import get_timespan
from tasks.util import shell, classpath, copyfile
from tasks.tags import SectionTags, SubsectionTags, BoundaryTags
from tasks.meta import OBSTable, OBSColumn, GEOM_REF, GEOM_NAME, OBSTag, current_session
from collections import OrderedDict
import os
class DownloadGeometry(Task):
seq = Parameter()
# request url: http://centrodedescargas.cnig.es/CentroDescargas/descargaDir
# arguments:
# secDescDirLA:114023
# pagActual:1
# numTotReg:5
# codSerieSel:CAANE
URL = 'http://centrodedescargas.cnig.es/CentroDescargas/descargaDir?secDescDirLA={seq}&pagActual=1&numTotReg=5&codSerieSel=CAANE'
def requires(self):
return RepoFile(resource_id=self.task_id, version=self.version(), url=self.URL.format(seq=self.seq))
def version(self):
return 1
def run(self):
copyfile(self.input().path, '{output}.zip'.format(output=self.output().path))
shell('unzip -d {output} {output}.zip'.format(output=self.output().path))
def output(self):
return LocalTarget(os.path.join('tmp', classpath(self), self.seq).lower())
class ImportGeometry(GeoFile2TempTableTask):
resolution = Parameter()
timestamp = Parameter()
id_aux = Parameter() # X for Peninsula and Balearic Islands, Y for Canary Islands
def requires(self):
return DownloadGeometry(seq='114023')
def input_files(self):
'''
We don't know precise name of file inside zip archive beforehand, so
use find to track it down.
'''
return shell("find '{dirpath}' -iname *_{resolution}_*_{aux}.shp | grep {timestamp}".format(
dirpath=self.input().path,
timestamp=self.timestamp,
aux=self.id_aux,
resolution=self.resolution
)).strip()
class SimplifiedImportGeometry(SimplifiedTempTableTask):
resolution = Parameter()
timestamp = Parameter()
id_aux = Parameter() # X for Peninsula and Balearic Islands, Y for Canary Islands
def requires(self):
return ImportGeometry(resolution=self.resolution, timestamp=self.timestamp, id_aux=self.id_aux)
class LicenseTags(TagsTask):
def version(self):
return 1
def tags(self):
return [
OBSTag(id='cnig-license',
name='License of CNIG',
type='license',
description='Royal Decree 663/2007, more information `here <https://www.cnig.es/propiedadIntelectual.do>`_.'
)]
class SourceTags(TagsTask):
def version(self):
return 1
def tags(self):
return [
OBSTag(id='cnig-source',
name='National Center for Geographic Information (CNIG)',
type='source',
description='`The National Center for Geographic Information (CNIG) <https://www.cnig.es/>`_'
)]
class GeometryColumns(ColumnsTask):
def version(self):
return 5
def requires(self):
return {
'sections': SectionTags(),
'subsections': SubsectionTags(),
'source': SourceTags(),
'license': LicenseTags(),
'boundary': BoundaryTags(),
}
def columns(self):
input_ = self.input()
sections = input_['sections']
subsections = input_['subsections']
source = input_['source']['cnig-source']
license = input_['license']['cnig-license']
boundary_type = input_['boundary']
ccaa = OBSColumn(
type='Geometry',
name='Autonomous Community',
weight=6,
description='The first-level administrative subdivision of Spain. ',
tags=[sections['spain'], subsections['boundary']],
)
prov = OBSColumn(
type='Geometry',
name='Province',
weight=7,
description='The second-level administrative subdivision of Spain, '
'used primarily as electoral districts and geographic '
'references. Provinces do not cross between autonomous '
'communities.',
tags=[sections['spain'], subsections['boundary'], boundary_type['cartographic_boundary'], boundary_type['interpolation_boundary']],
)
muni = OBSColumn(
type='Geometry',
name='Municipality',
weight=8,
description='The lowest level of territorial organization in Spain. '
'Municipal boundaries do not cross between provinces. ',
tags=[sections['spain'], subsections['boundary'], boundary_type['cartographic_boundary'], boundary_type['interpolation_boundary']],
)
columns = OrderedDict([
('ccaa', ccaa),
('prov', prov),
('muni', muni),
])
for _, col in columns.items():
col.tags.append(source)
col.tags.append(license)
return columns
class GeomRefColumns(ColumnsTask):
def version(self):
return 1
def requires(self):
return {
'geom_cols':GeometryColumns(),
'subsections': SubsectionTags(),
'sections': SectionTags(),
}
def columns(self):
cols = OrderedDict()
for colname, coltarget in self.input()['geom_cols'].items():
cols['id_' + colname] = OBSColumn(
type='Text',
name='',
weight=0,
targets={coltarget: GEOM_REF},
)
return cols
class GeomNameColumns(ColumnsTask):
def version(self):
return 1
def requires(self):
return {
'geom_cols':GeometryColumns(),
'subsections':SubsectionTags(),
'sections':SectionTags(),
}
def columns(self):
cols = OrderedDict()
for colname, coltarget in self.input()['geom_cols'].items():
cols['name_' + colname] = OBSColumn(
id='name_' + colname,
type='Text',
name='Proper name of {}'.format(colname),
tags=[self.input()['subsections']['names'],self.input()['sections']['spain']],
weight=1,
targets={coltarget: GEOM_NAME},
)
return cols
class Geometry(TableTask):
resolution = Parameter()
timestamp = Parameter(default='20150101')
def version(self):
return 13
# TODO: https://github.com/CartoDB/bigmetadata/issues/435
def targets(self):
return {
OBSTable(id='.'.join([self.schema(), self.name()])): GEOM_REF,
}
def requires(self):
return {
'geom_columns': GeometryColumns(),
'geomref_columns': GeomRefColumns(),
'geomname_columns': GeomNameColumns(),
'peninsula_data': SimplifiedImportGeometry(resolution=self.resolution,
timestamp=self.timestamp,
id_aux='x'),
'canary_data': SimplifiedImportGeometry(resolution=self.resolution,
timestamp=self.timestamp,
id_aux='y')
}
def table_timespan(self):
return get_timespan(str(self.timestamp))
def columns(self):
return OrderedDict([
('geom_ref', self.input()['geomref_columns']['id_' + self.resolution]),
('rotulo', self.input()['geomname_columns']['name_' + self.resolution]),
('the_geom', self.input()['geom_columns'][self.resolution])
])
def geom_ref_colname(self):
if self.resolution.lower() == 'ccaa':
return 'id_ccaa'
elif self.resolution.lower() == 'prov':
return 'id_prov'
elif self.resolution.lower() == 'muni':
return 'id_ine'
else:
raise 'Unknown resolution {resolution}'.format(resolution=self.resolution)
def populate(self):
session = current_session()
peninsula_query = 'INSERT INTO {output} ' \
'SELECT {geom_ref_colname} geom_ref, rotulo, wkb_geometry the_geom ' \
'FROM {input}'.format(
output=self.output().table,
input=self.input()['peninsula_data'].table,
geom_ref_colname=self.geom_ref_colname())
canary_query = 'INSERT INTO {output} ' \
'SELECT {geom_ref_colname} geom_ref, rotulo, wkb_geometry the_geom ' \
'FROM {input}'.format(
output=self.output().table,
input=self.input()['canary_data'].table,
geom_ref_colname=self.geom_ref_colname())
session.execute(peninsula_query)
session.execute(canary_query)
class AllGeometries(WrapperTask):
def requires(self):
for resolution in ('ccaa', 'muni', 'prov', ):
yield Geometry(resolution=resolution)
| {
"content_hash": "aa0f7b910989c8c4d1182f73d77c1143",
"timestamp": "",
"source": "github",
"line_count": 278,
"max_line_length": 143,
"avg_line_length": 33.66546762589928,
"alnum_prop": 0.5650176300886847,
"repo_name": "CartoDB/bigmetadata",
"id": "55ecc3b53a176e9dfdfe81a77bab450f6292ac8d",
"size": "9384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasks/es/cnig.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "143"
},
{
"name": "Dockerfile",
"bytes": "2305"
},
{
"name": "HTML",
"bytes": "19058"
},
{
"name": "JavaScript",
"bytes": "5864"
},
{
"name": "Makefile",
"bytes": "27552"
},
{
"name": "PLpgSQL",
"bytes": "32699"
},
{
"name": "Python",
"bytes": "2967442"
},
{
"name": "Shell",
"bytes": "11590"
}
],
"symlink_target": ""
} |
from __future__ import division, print_function, absolute_import
import os
import sys
import functools
from textwrap import TextWrapper
from collections import defaultdict
from plumbum.lib import six, getdoc
from plumbum.cli.terminal import get_terminal_size
from plumbum.cli.switches import (SwitchError, UnknownSwitch, MissingArgument, WrongArgumentType,
MissingMandatorySwitch, SwitchCombinationError, PositionalArgumentsError, switch,
SubcommandError, Flag, CountOf)
from plumbum import colors, local
class ShowHelp(SwitchError):
pass
class ShowHelpAll(SwitchError):
pass
class ShowVersion(SwitchError):
pass
class SwitchParseInfo(object):
__slots__ = ["swname", "val", "index"]
def __init__(self, swname, val, index):
self.swname = swname
self.val = val
self.index = index
class Subcommand(object):
def __init__(self, name, subapplication):
self.name = name
self.subapplication = subapplication
def get(self):
if isinstance(self.subapplication, str):
modname, clsname = self.subapplication.rsplit(".", 1)
mod = __import__(modname, None, None, "*")
try:
cls = getattr(mod, clsname)
except AttributeError:
raise ImportError("cannot import name %s" % (clsname,))
self.subapplication = cls
return self.subapplication
def __repr__(self):
return "Subcommand(%r, %r)" % (self.name, self.subapplication)
#===================================================================================================
# CLI Application base class
#===================================================================================================
class Application(object):
"""
The base class for CLI applications; your "entry point" class should derive from it,
define the relevant switch functions and attributes, and the ``main()`` function.
The class defines two overridable "meta switches" for version (``-v``, ``--version``)
and help (``-h``, ``--help``).
The signature of the main function matters: any positional arguments (e.g., non-switch
arguments) given on the command line are passed to the ``main()`` function; if you wish
to allow unlimited number of positional arguments, use varargs (``*args``). The names
of the arguments will be shown in the help message.
The classmethod ``run`` serves as the entry point of the class. It parses the command-line
arguments, invokes switch functions and enter ``main``. You should **not override** this
method.
Usage::
class FileCopier(Application):
stat = Flag("p", "copy stat info as well")
def main(self, src, dst):
if self.stat:
shutil.copy2(src, dst)
else:
shutil.copy(src, dst)
if __name__ == "__main__":
FileCopier.run()
There are several class-level attributes you may set:
* ``PROGNAME`` - the name of the program; if ``None`` (the default), it is set to the
name of the executable (``argv[0]``), can be in color. If only a color, will be applied to the name.
* ``VERSION`` - the program's version (defaults to ``1.0``, can be in color)
* ``DESCRIPTION`` - a short description of your program (shown in help). If not set,
the class' ``__doc__`` will be used. Can be in color.
* ``USAGE`` - the usage line (shown in help)
* ``COLOR_USAGE`` - The color of the usage line
* ``COLOR_GROUPS`` - A dictionary that sets colors for the groups, like Meta-switches, Switches,
and Subcommands
A note on sub-commands: when an application is the root, its ``parent`` attribute is set to
``None``. When it is used as a nested-command, ``parent`` will point to be its direct ancestor.
Likewise, when an application is invoked with a sub-command, its ``nested_command`` attribute
will hold the chosen sub-application and its command-line arguments (a tuple); otherwise, it
will be set to ``None``
"""
PROGNAME = None
DESCRIPTION = None
VERSION = None
USAGE = None
COLOR_USAGE = None
COLOR_GROUPS = None
CALL_MAIN_IF_NESTED_COMMAND = True
parent = None
nested_command = None
_unbound_switches = ()
def __init__(self, executable):
# Filter colors
if self.PROGNAME is None:
self.PROGNAME = os.path.basename(executable)
elif isinstance(self.PROGNAME, colors._style):
self.PROGNAME = self.PROGNAME | os.path.basename(executable)
elif colors.filter(self.PROGNAME) == '':
self.PROGNAME = colors.extract(self.PROGNAME) | os.path.basename(executable)
if self.DESCRIPTION is None:
self.DESCRIPTION = getdoc(self)
# Allow None for the colors
self.COLOR_GROUPS=defaultdict(lambda:colors.do_nothing, dict() if type(self).COLOR_GROUPS is None else type(self).COLOR_GROUPS )
if type(self).COLOR_USAGE is None:
self.COLOR_USAGE=colors.do_nothing
self.executable = executable
self._switches_by_name = {}
self._switches_by_func = {}
self._switches_by_envar = {}
self._subcommands = {}
for cls in reversed(type(self).mro()):
for obj in cls.__dict__.values():
if isinstance(obj, Subcommand):
name = colors.filter(obj.name)
if name.startswith("-"):
raise SubcommandError("Subcommand names cannot start with '-'")
# it's okay for child classes to override subcommands set by their parents
self._subcommands[name] = obj
continue
swinfo = getattr(obj, "_switch_info", None)
if not swinfo:
continue
for name in swinfo.names:
if name in self._unbound_switches:
continue
if name in self._switches_by_name and not self._switches_by_name[name].overridable:
raise SwitchError("Switch %r already defined and is not overridable" % (name,))
self._switches_by_name[name] = swinfo
self._switches_by_func[swinfo.func] = swinfo
if swinfo.envname:
self._switches_by_envar[swinfo.envname] = swinfo
@property
def root_app(self):
return self.parent.root_app if self.parent else self
@classmethod
def unbind_switches(cls, *switch_names):
"""Unbinds the given switch names from this application. For example
::
class MyApp(cli.Application):
pass
MyApp.unbind_switches("--version")
"""
cls._unbound_switches += tuple(name.lstrip("-") for name in switch_names if name)
@classmethod
def subcommand(cls, name, subapp = None):
"""Registers the given sub-application as a sub-command of this one. This method can be
used both as a decorator and as a normal ``classmethod``::
@MyApp.subcommand("foo")
class FooApp(cli.Application):
pass
Or ::
MyApp.subcommand("foo", FooApp)
.. versionadded:: 1.1
.. versionadded:: 1.3
The subcommand can also be a string, in which case it is treated as a
fully-qualified class name and is imported on demand. For examples,
MyApp.subcommand("foo", "fully.qualified.package.FooApp")
"""
def wrapper(subapp):
attrname = "_subcommand_%s" % (subapp if isinstance(subapp, str) else subapp.__name__,)
setattr(cls, attrname, Subcommand(name, subapp))
return subapp
return wrapper(subapp) if subapp else wrapper
def _parse_args(self, argv):
tailargs = []
swfuncs = {}
index = 0
while argv:
index += 1
a = argv.pop(0)
val = None
if a == "--":
# end of options, treat the rest as tailargs
tailargs.extend(argv)
break
if a in self._subcommands:
subcmd = self._subcommands[a].get()
self.nested_command = (subcmd, [self.PROGNAME + " " + self._subcommands[a].name] + argv)
break
elif a.startswith("--") and len(a) >= 3:
# [--name], [--name=XXX], [--name, XXX], [--name, ==, XXX],
# [--name=, XXX], [--name, =XXX]
eqsign = a.find("=")
if eqsign >= 0:
name = a[2:eqsign]
argv.insert(0, a[eqsign:])
else:
name = a[2:]
swname = "--" + name
if name not in self._switches_by_name:
raise UnknownSwitch("Unknown switch %s" % (swname,))
swinfo = self._switches_by_name[name]
if swinfo.argtype:
if not argv:
raise MissingArgument("Switch %s requires an argument" % (swname,))
a = argv.pop(0)
if a and a[0] == "=":
if len(a) >= 2:
val = a[1:]
else:
if not argv:
raise MissingArgument("Switch %s requires an argument" % (swname))
val = argv.pop(0)
else:
val = a
elif a.startswith("-") and len(a) >= 2:
# [-a], [-a, XXX], [-aXXX], [-abc]
name = a[1]
swname = "-" + name
if name not in self._switches_by_name:
raise UnknownSwitch("Unknown switch %s" % (swname,))
swinfo = self._switches_by_name[name]
if swinfo.argtype:
if len(a) >= 3:
val = a[2:]
else:
if not argv:
raise MissingArgument("Switch %s requires an argument" % (swname,))
val = argv.pop(0)
elif len(a) >= 3:
argv.insert(0, "-" + a[2:])
else:
if a.startswith("-"):
raise UnknownSwitch("Unknown switch %s" % (a,))
tailargs.append(a)
continue
# handle argument
val = self._handle_argument(val, swinfo.argtype, name)
if swinfo.func in swfuncs:
if swinfo.list:
swfuncs[swinfo.func].val[0].append(val)
else:
if swfuncs[swinfo.func].swname == swname:
raise SwitchError("Switch %r already given" % (swname,))
else:
raise SwitchError("Switch %r already given (%r is equivalent)" % (
swfuncs[swinfo.func].swname, swname))
else:
if swinfo.list:
swfuncs[swinfo.func] = SwitchParseInfo(swname, ([val],), index)
elif val is NotImplemented:
swfuncs[swinfo.func] = SwitchParseInfo(swname, (), index)
else:
swfuncs[swinfo.func] = SwitchParseInfo(swname, (val,), index)
# Extracting arguments from environment variables
envindex = 0
for env, swinfo in self._switches_by_envar.items():
envindex -= 1
envval = local.env.get(env)
if envval is None:
continue
if swinfo.func in swfuncs:
continue # skip if overridden by command line arguments
val = self._handle_argument(envval, swinfo.argtype, env)
envname = "$%s" % (env,)
if swinfo.list:
# multiple values over environment variables are not supported,
# this will require some sort of escaping and separator convention
swfuncs[swinfo.func] = SwitchParseInfo(envname, ([val],), envindex)
elif val is NotImplemented:
swfuncs[swinfo.func] = SwitchParseInfo(envname, (), envindex)
else:
swfuncs[swinfo.func] = SwitchParseInfo(envname, (val,), envindex)
return swfuncs, tailargs
@classmethod
def autocomplete(cls, argv):
"""This is supplied to make subclassing and testing argument completion methods easier"""
pass
@staticmethod
def _handle_argument(val, argtype, name):
if argtype:
try:
return argtype(val)
except (TypeError, ValueError):
ex = sys.exc_info()[1] # compat
raise WrongArgumentType("Argument of %s expected to be %r, not %r:\n %r" % (
name, argtype, val, ex))
else:
return NotImplemented
def _validate_args(self, swfuncs, tailargs):
if six.get_method_function(self.help) in swfuncs:
raise ShowHelp()
if six.get_method_function(self.helpall) in swfuncs:
raise ShowHelpAll()
if six.get_method_function(self.version) in swfuncs:
raise ShowVersion()
requirements = {}
exclusions = {}
for swinfo in self._switches_by_func.values():
if swinfo.mandatory and not swinfo.func in swfuncs:
raise MissingMandatorySwitch("Switch %s is mandatory" %
("/".join(("-" if len(n) == 1 else "--") + n for n in swinfo.names),))
requirements[swinfo.func] = set(self._switches_by_name[req] for req in swinfo.requires)
exclusions[swinfo.func] = set(self._switches_by_name[exc] for exc in swinfo.excludes)
# TODO: compute topological order
gotten = set(swfuncs.keys())
for func in gotten:
missing = set(f.func for f in requirements[func]) - gotten
if missing:
raise SwitchCombinationError("Given %s, the following are missing %r" %
(swfuncs[func].swname, [self._switches_by_func[f].names[0] for f in missing]))
invalid = set(f.func for f in exclusions[func]) & gotten
if invalid:
raise SwitchCombinationError("Given %s, the following are invalid %r" %
(swfuncs[func].swname, [swfuncs[f].swname for f in invalid]))
m = six.getfullargspec(self.main)
max_args = six.MAXSIZE if m.varargs else len(m.args) - 1
min_args = len(m.args) - 1 - (len(m.defaults) if m.defaults else 0)
if len(tailargs) < min_args:
raise PositionalArgumentsError("Expected at least %d positional arguments, got %r" %
(min_args, tailargs))
elif len(tailargs) > max_args:
raise PositionalArgumentsError("Expected at most %d positional arguments, got %r" %
(max_args, tailargs))
# Positional arguement validataion
if hasattr(self.main, 'positional'):
tailargs = self._positional_validate(tailargs, self.main.positional, self.main.positional_varargs, m.args[1:], m.varargs)
elif hasattr(m, 'annotations'):
args_names = list(m.args[1:])
positional = [None]*len(args_names)
varargs = None
# All args are positional, so convert kargs to positional
for item in m.annotations:
if item == m.varargs:
varargs = m.annotations[item]
else:
positional[args_names.index(item)] = m.annotations[item]
tailargs = self._positional_validate(tailargs, positional, varargs,
m.args[1:], m.varargs)
ordered = [(f, a) for _, f, a in
sorted([(sf.index, f, sf.val) for f, sf in swfuncs.items()])]
return ordered, tailargs
def _positional_validate(self, args, validator_list, varargs, argnames, varargname):
"""Makes sure args follows the validation given input"""
out_args = list(args)
for i in range(min(len(args),len(validator_list))):
if validator_list[i] is not None:
out_args[i] = self._handle_argument(args[i], validator_list[i], argnames[i])
if len(args) > len(validator_list):
if varargs is not None:
out_args[len(validator_list):] = [
self._handle_argument(a, varargs, varargname) for a in args[len(validator_list):]]
else:
out_args[len(validator_list):] = args[len(validator_list):]
return out_args
@classmethod
def run(cls, argv = None, exit = True): # @ReservedAssignment
"""
Runs the application, taking the arguments from ``sys.argv`` by default if
nothing is passed. If ``exit`` is
``True`` (the default), the function will exit with the appropriate return code;
otherwise it will return a tuple of ``(inst, retcode)``, where ``inst`` is the
application instance created internally by this function and ``retcode`` is the
exit code of the application.
.. note::
Setting ``exit`` to ``False`` is intendend for testing/debugging purposes only -- do
not override it other situations.
"""
if argv is None:
argv = sys.argv
cls.autocomplete(argv)
argv = list(argv)
inst = cls(argv.pop(0))
retcode = 0
try:
swfuncs, tailargs = inst._parse_args(argv)
ordered, tailargs = inst._validate_args(swfuncs, tailargs)
except ShowHelp:
inst.help()
except ShowHelpAll:
inst.helpall()
except ShowVersion:
inst.version()
except SwitchError:
ex = sys.exc_info()[1] # compatibility with python 2.5
print("Error: %s" % (ex,))
print("------")
inst.help()
retcode = 2
else:
for f, a in ordered:
f(inst, *a)
cleanup = None
if not inst.nested_command or inst.CALL_MAIN_IF_NESTED_COMMAND:
retcode = inst.main(*tailargs)
cleanup = functools.partial(inst.cleanup, retcode)
if not retcode and inst.nested_command:
subapp, argv = inst.nested_command
subapp.parent = inst
inst, retcode = subapp.run(argv, exit = False)
if cleanup:
cleanup()
if retcode is None:
retcode = 0
if exit:
sys.exit(retcode)
else:
return inst, retcode
@classmethod
def invoke(cls, *args, **switches):
"""Invoke this application programmatically (as a function), in the same way ``run()``
would. There are two key differences: the return value of ``main()`` is not converted to
an integer (returned as-is), and exceptions are not swallowed either.
:param args: any positional arguments for ``main()``
:param switches: command-line switches are passed as keyword arguments,
e.g., ``foo=5`` for ``--foo=5``
"""
inst = cls("")
swfuncs = inst._parse_kwd_args(switches)
ordered, tailargs = inst._validate_args(swfuncs, args)
for f, a in ordered:
f(inst, *a)
cleanup = None
if not inst.nested_command or inst.CALL_MAIN_IF_NESTED_COMMAND:
retcode = inst.main(*tailargs)
cleanup = functools.partial(inst.cleanup, retcode)
if not retcode and inst.nested_command:
subapp, argv = inst.nested_command
subapp.parent = inst
inst, retcode = subapp.run(argv, exit = False)
if cleanup:
cleanup()
return inst, retcode
def _parse_kwd_args(self, switches):
"""Parses keywords (positional arguments), used by invoke."""
swfuncs = {}
for index, (swname, val) in enumerate(switches.items(), 1):
switch = getattr(type(self), swname)
swinfo = self._switches_by_func[switch._switch_info.func]
if isinstance(switch, CountOf):
p = (range(val),)
elif swinfo.list and not hasattr(val, "__iter__"):
raise SwitchError("Switch %r must be a sequence (iterable)" % (swname,))
elif not swinfo.argtype:
# a flag
if val not in (True, False, None, Flag):
raise SwitchError("Switch %r is a boolean flag" % (swname,))
p = ()
else:
p = (val,)
swfuncs[swinfo.func] = SwitchParseInfo(swname, p, index)
return swfuncs
def main(self, *args):
"""Implement me (no need to call super)"""
if self._subcommands:
if args:
print("Unknown sub-command %r" % (args[0],))
print("------")
self.help()
return 1
if not self.nested_command:
print("No sub-command given")
print("------")
self.help()
return 1
else:
print("main() not implemented")
return 1
def cleanup(self, retcode):
"""Called after ``main()`` and all subapplications have executed, to perform any necessary cleanup.
:param retcode: the return code of ``main()``
"""
@switch(["--help-all"], overridable = True, group = "Meta-switches")
def helpall(self):
"""Print help messages of all subcommands and quit"""
self.help()
print("")
if self._subcommands:
for name, subcls in sorted(self._subcommands.items()):
subapp = (subcls.get())("%s %s" % (self.PROGNAME, name))
subapp.parent = self
for si in subapp._switches_by_func.values():
if si.group == "Meta-switches":
si.group = "Hidden-switches"
subapp.helpall()
@switch(["-h", "--help"], overridable = True, group = "Meta-switches")
def help(self): # @ReservedAssignment
"""Prints this help message and quits"""
if self._get_prog_version():
self.version()
print("")
if self.DESCRIPTION:
print(self.DESCRIPTION.strip() + '\n')
m = six.getfullargspec(self.main)
tailargs = m.args[1:] # skip self
if m.defaults:
for i, d in enumerate(reversed(m.defaults)):
tailargs[-i - 1] = "[%s=%r]" % (tailargs[-i - 1], d)
if m.varargs:
tailargs.append("%s..." % (m.varargs,))
tailargs = " ".join(tailargs)
with self.COLOR_USAGE:
print("Usage:")
if not self.USAGE:
if self._subcommands:
self.USAGE = " %(progname)s [SWITCHES] [SUBCOMMAND [SWITCHES]] %(tailargs)s\n"
else:
self.USAGE = " %(progname)s [SWITCHES] %(tailargs)s\n"
print(self.USAGE % {"progname": colors.filter(self.PROGNAME), "tailargs": tailargs})
by_groups = {}
for si in self._switches_by_func.values():
if si.group not in by_groups:
by_groups[si.group] = []
by_groups[si.group].append(si)
def switchs(by_groups, show_groups):
for grp, swinfos in sorted(by_groups.items(), key = lambda item: item[0]):
if show_groups:
print(self.COLOR_GROUPS[grp] | grp)
for si in sorted(swinfos, key = lambda si: si.names):
swnames = ", ".join(("-" if len(n) == 1 else "--") + n for n in si.names
if n in self._switches_by_name and self._switches_by_name[n] == si)
if si.argtype:
if isinstance(si.argtype, type):
typename = si.argtype.__name__
else:
typename = str(si.argtype)
argtype = " %s:%s" % (si.argname.upper(), typename)
else:
argtype = ""
prefix = swnames + argtype
yield si, prefix, self.COLOR_GROUPS[grp]
if show_groups:
print("")
sw_width = max(len(prefix) for si, prefix, color in switchs(by_groups, False)) + 4
cols, _ = get_terminal_size()
description_indent = " %s%s%s"
wrapper = TextWrapper(width = max(cols - min(sw_width, 60), 50) - 6)
indentation = "\n" + " " * (cols - wrapper.width)
for si, prefix, color in switchs(by_groups, True):
help = si.help # @ReservedAssignment
if si.list:
help += "; may be given multiple times"
if si.mandatory:
help += "; required"
if si.requires:
help += "; requires %s" % (", ".join((("-" if len(s) == 1 else "--") + s) for s in si.requires))
if si.excludes:
help += "; excludes %s" % (", ".join((("-" if len(s) == 1 else "--") + s) for s in si.excludes))
msg = indentation.join(wrapper.wrap(" ".join(l.strip() for l in help.splitlines())))
if len(prefix) + wrapper.width >= cols:
padding = indentation
else:
padding = " " * max(cols - wrapper.width - len(prefix) - 4, 1)
print(description_indent % (color | prefix, padding, color | msg))
if self._subcommands:
gc = self.COLOR_GROUPS["Subcommands"]
print(gc | "Subcommands:")
for name, subcls in sorted(self._subcommands.items()):
with gc:
subapp = subcls.get()
doc = subapp.DESCRIPTION if subapp.DESCRIPTION else getdoc(subapp)
help = doc + "; " if doc else "" # @ReservedAssignment
help += "see '%s %s --help' for more info" % (self.PROGNAME, name)
msg = indentation.join(wrapper.wrap(" ".join(l.strip() for l in help.splitlines())))
if len(name) + wrapper.width >= cols:
padding = indentation
else:
padding = " " * max(cols - wrapper.width - len(name) - 4, 1)
print(description_indent % (subcls.name, padding, gc | colors.filter(msg)))
def _get_prog_version(self):
ver = None
curr = self
while curr is not None:
ver = getattr(curr, "VERSION", None)
if ver is not None:
return ver
curr = curr.parent
return ver
@switch(["-v", "--version"], overridable = True, group = "Meta-switches")
def version(self):
"""Prints the program's version and quits"""
ver = self._get_prog_version()
ver_name = ver if ver is not None else "(version not set)"
print('{0} {1}'.format(self.PROGNAME, ver_name))
| {
"content_hash": "e8ccbb5c2f1155924701a20b881c30e5",
"timestamp": "",
"source": "github",
"line_count": 687,
"max_line_length": 136,
"avg_line_length": 40.0844250363901,
"alnum_prop": 0.5296317815382381,
"repo_name": "henryiii/plumbum",
"id": "858d3aff09ed26c59f590292ee1e70a38e0eafbf",
"size": "27538",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plumbum/cli/application.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "355251"
},
{
"name": "Shell",
"bytes": "590"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.utils import cstr
from frappe.model import default_fields
@frappe.whitelist()
def make_mapped_doc(method, source_name, selected_children=None):
'''Returns the mapped document calling the given mapper method.
Sets selected_children as flags for the `get_mapped_doc` method.
Called from `open_mapped_doc` from create_new.js'''
method = frappe.get_attr(method)
if method not in frappe.whitelisted:
raise frappe.PermissionError
if selected_children:
selected_children = json.loads(selected_children)
frappe.flags.selected_children = selected_children or None
return method(source_name)
@frappe.whitelist()
def map_docs(method, source_names, target_doc):
'''Returns the mapped document calling the given mapper method
with each of the given source docs on the target doc'''
method = frappe.get_attr(method)
if method not in frappe.whitelisted:
raise frappe.PermissionError
for src in json.loads(source_names):
target_doc = method(src, target_doc)
return target_doc
def get_mapped_doc(from_doctype, from_docname, table_maps, target_doc=None,
postprocess=None, ignore_permissions=False, ignore_child_tables=False):
# main
if not target_doc:
target_doc = frappe.new_doc(table_maps[from_doctype]["doctype"])
elif isinstance(target_doc, basestring):
target_doc = frappe.get_doc(json.loads(target_doc))
if not ignore_permissions and not target_doc.has_permission("create"):
target_doc.raise_no_permission_to("create")
source_doc = frappe.get_doc(from_doctype, from_docname)
if not ignore_permissions:
if not source_doc.has_permission("read"):
source_doc.raise_no_permission_to("read")
map_doc(source_doc, target_doc, table_maps[source_doc.doctype])
row_exists_for_parentfield = {}
# children
if not ignore_child_tables:
for df in source_doc.meta.get_table_fields():
source_child_doctype = df.options
table_map = table_maps.get(source_child_doctype)
# if table_map isn't explicitly specified check if both source and target have the same fieldname and same table options and both of them don't have no_copy
if not table_map:
target_df = target_doc.meta.get_field(df.fieldname)
if target_df:
target_child_doctype = target_df.options
if target_df and target_child_doctype==source_child_doctype and not df.no_copy and not target_df.no_copy:
table_map = {
"doctype": target_child_doctype
}
if table_map:
for source_d in source_doc.get(df.fieldname):
if "condition" in table_map:
if not table_map["condition"](source_d):
continue
# if children are selected (checked from UI) for this table type,
# and this record is not in the selected children, then continue
if (frappe.flags.selected_children
and (df.fieldname in frappe.flags.selected_children)
and source_d.name not in frappe.flags.selected_children[df.fieldname]):
continue
target_child_doctype = table_map["doctype"]
target_parentfield = target_doc.get_parentfield_of_doctype(target_child_doctype)
# does row exist for a parentfield?
if target_parentfield not in row_exists_for_parentfield:
row_exists_for_parentfield[target_parentfield] = (True
if target_doc.get(target_parentfield) else False)
if table_map.get("add_if_empty") and \
row_exists_for_parentfield.get(target_parentfield):
continue
if table_map.get("filter") and table_map.get("filter")(source_d):
continue
map_child_doc(source_d, target_doc, table_map, source_doc)
if postprocess:
postprocess(source_doc, target_doc)
target_doc.set_onload("load_after_mapping", True)
return target_doc
def map_doc(source_doc, target_doc, table_map, source_parent=None):
if table_map.get("validation"):
for key, condition in table_map["validation"].items():
if condition[0]=="=":
if source_doc.get(key) != condition[1]:
frappe.throw(_("Cannot map because following condition fails: ")
+ key + "=" + cstr(condition[1]))
map_fields(source_doc, target_doc, table_map, source_parent)
if "postprocess" in table_map:
table_map["postprocess"](source_doc, target_doc, source_parent)
def map_fields(source_doc, target_doc, table_map, source_parent):
no_copy_fields = set([d.fieldname for d in source_doc.meta.get("fields") if (d.no_copy==1 or d.fieldtype=="Table")]
+ [d.fieldname for d in target_doc.meta.get("fields") if (d.no_copy==1 or d.fieldtype=="Table")]
+ list(default_fields)
+ list(table_map.get("field_no_map", [])))
for df in target_doc.meta.get("fields"):
if df.fieldname not in no_copy_fields:
# map same fields
val = source_doc.get(df.fieldname)
if val not in (None, ""):
target_doc.set(df.fieldname, val)
elif df.fieldtype == "Link":
if not target_doc.get(df.fieldname):
# map link fields having options == source doctype
if df.options == source_doc.doctype:
target_doc.set(df.fieldname, source_doc.name)
elif source_parent and df.options == source_parent.doctype:
target_doc.set(df.fieldname, source_parent.name)
# map other fields
field_map = table_map.get("field_map")
if field_map:
if isinstance(field_map, dict):
for source_key, target_key in field_map.items():
val = source_doc.get(source_key)
if val not in (None, ""):
target_doc.set(target_key, val)
else:
for fmap in field_map:
val = source_doc.get(fmap[0])
if val not in (None, ""):
target_doc.set(fmap[1], val)
# map idx
if source_doc.idx:
target_doc.idx = source_doc.idx
# add fetch
for df in target_doc.meta.get("fields", {"fieldtype": "Link"}):
if target_doc.get(df.fieldname):
map_fetch_fields(target_doc, df, no_copy_fields)
def map_fetch_fields(target_doc, df, no_copy_fields):
linked_doc = None
# options should be like "link_fieldname.fieldname_in_liked_doc"
for fetch_df in target_doc.meta.get("fields", {"options": "^{0}.".format(df.fieldname)}):
if not (fetch_df.fieldtype == "Read Only" or fetch_df.read_only):
continue
if not target_doc.get(fetch_df.fieldname) and fetch_df.fieldname not in no_copy_fields:
source_fieldname = fetch_df.options.split(".")[1]
if not linked_doc:
try:
linked_doc = frappe.get_doc(df.options, target_doc.get(df.fieldname))
except:
return
val = linked_doc.get(source_fieldname)
if val not in (None, ""):
target_doc.set(fetch_df.fieldname, val)
def map_child_doc(source_d, target_parent, table_map, source_parent=None):
target_child_doctype = table_map["doctype"]
target_parentfield = target_parent.get_parentfield_of_doctype(target_child_doctype)
target_d = frappe.new_doc(target_child_doctype, target_parent, target_parentfield)
map_doc(source_d, target_d, table_map, source_parent)
target_d.idx = None
target_parent.append(target_parentfield, target_d)
return target_d
| {
"content_hash": "1fb649c7d6e6e0fa4037d87ae8578d34",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 159,
"avg_line_length": 34.398009950248756,
"alnum_prop": 0.7061035579982644,
"repo_name": "bcornwellmott/frappe",
"id": "72a9a77e3c825997349cde6b24f21eb925a998ad",
"size": "7015",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/model/mapper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "387069"
},
{
"name": "HTML",
"bytes": "206997"
},
{
"name": "JavaScript",
"bytes": "1664389"
},
{
"name": "Python",
"bytes": "1863544"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
import base64
from asyncio import coroutine
from .base import Base
class Target(Base):
unary = False
@coroutine
def __call__(self, text, encoding='utf-8'):
return base64.b64decode(text).decode(encoding)
| {
"content_hash": "1edc983fdbeba17dabacdccaf10288aa",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 54,
"avg_line_length": 18.916666666666668,
"alnum_prop": 0.6872246696035242,
"repo_name": "Answeror/torabot",
"id": "7625c59eedab9b897756abc2144685989529fe6e",
"size": "227",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "torabot/lang/targets/base64_decode.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "174712"
},
{
"name": "JavaScript",
"bytes": "2849805"
},
{
"name": "Python",
"bytes": "552234"
},
{
"name": "Shell",
"bytes": "822"
},
{
"name": "TeX",
"bytes": "3381"
},
{
"name": "XSLT",
"bytes": "5063"
}
],
"symlink_target": ""
} |
"""
Low level deployment operations.
"""
from random import randint, Random
from os import path, makedirs
from spur import LocalShell
from time import time
from signal import SIGKILL
class TimeoutError(Exception):
"""
Exception dealing with executed commands that timeout.
"""
pass
def execute(cmd, timeout=5, **kwargs):
"""
Executes the given shell command
Args:
cmd: List of command arguments
timeout: maximum alloted time for the command
**kwargs: passes to LocalShell.spawn
Returns:
An execution result.
Raises:
NoSuchCommandError, RunProcessError, FileNotFoundError
"""
shell = LocalShell()
#It is unlikely that someone actually intends to supply
#a string based on how spur works.
if type(cmd) == str:
cmd = ["bash", "-c"] + [cmd]
process = shell.spawn(cmd, store_pid=True, **kwargs)
start_time = time()
while process.is_running():
delta_time = time() - start_time
if delta_time > timeout:
process.send_signal(SIGKILL)
raise TimeoutError(cmd, timeout)
return process.wait_for_result()
def create_user(username, home_directory_root="/home/"):
"""
Creates a user with the given username
Args:
username: the username to create
home_directory_root: the parent directory to create the
home directory in. Defaults to /home/
Returns:
The new user's home directory
"""
home_directory = path.join(home_directory_root, username)
if not path.isdir(home_directory):
makedirs(home_directory)
execute(["useradd", "-s", "/bin/bash", "-m", "-d", home_directory, username])
return home_directory
| {
"content_hash": "8a16b0f7866264f30ac5433d8070ecc9",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 81,
"avg_line_length": 25.492753623188406,
"alnum_prop": 0.6378624218305856,
"repo_name": "RitwikGupta/picoCTF-shell-manager",
"id": "4b35a1f3d9f5c65e7cc950a73d5c158fb4bd064f",
"size": "1759",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hacksport/operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2892"
},
{
"name": "JavaScript",
"bytes": "189727"
},
{
"name": "Makefile",
"bytes": "160"
},
{
"name": "Python",
"bytes": "77948"
},
{
"name": "Shell",
"bytes": "449"
}
],
"symlink_target": ""
} |
"""
Tests deploying the presto configuration
"""
from mock import patch
from fabric.api import env
from prestoadmin import deploy
from tests.base_test_case import BaseTestCase
from tests.unit import SudoResult
class TestDeploy(BaseTestCase):
def test_output_format_dict(self):
conf = {'a': 'b', 'c': 'd'}
self.assertEqual(deploy.output_format(conf),
"a=b\nc=d")
def test_output_format_list(self):
self.assertEqual(deploy.output_format(['a', 'b']),
'a\nb')
def test_output_format_string(self):
conf = "A string"
self.assertEqual(deploy.output_format(conf), conf)
def test_output_format_int(self):
conf = 1
self.assertEqual(deploy.output_format(conf), str(conf))
@patch('prestoadmin.deploy.configure_presto')
@patch('prestoadmin.deploy.util.get_coordinator_role')
@patch('prestoadmin.deploy.env')
def test_worker_is_coordinator(self, env_mock, coord_mock, configure_mock):
env_mock.host = "my.host"
coord_mock.return_value = ["my.host"]
deploy.workers()
assert not configure_mock.called
@patch('prestoadmin.deploy.w.Worker')
@patch('prestoadmin.deploy.configure_presto')
def test_worker_not_coordinator(self, configure_mock, get_conf_mock):
env.host = "my.host1"
env.roledefs["worker"] = ["my.host1"]
env.roledefs["coordinator"] = ["my.host2"]
deploy.workers()
assert configure_mock.called
@patch('prestoadmin.deploy.configure_presto')
@patch('prestoadmin.deploy.coord.Coordinator')
def test_coordinator(self, coord_mock, configure_mock):
env.roledefs['coordinator'] = ['master']
env.host = 'master'
deploy.coordinator()
assert configure_mock.called
@patch('prestoadmin.deploy.sudo')
def test_deploy(self, sudo_mock):
sudo_mock.return_value = SudoResult()
files = {"jvm.config": "a=b"}
deploy.deploy(files, "/my/remote/dir")
sudo_mock.assert_any_call("mkdir -p /my/remote/dir")
sudo_mock.assert_any_call("echo 'a=b' > /my/remote/dir/jvm.config")
@patch('__builtin__.open')
@patch('prestoadmin.deploy.exists')
@patch('prestoadmin.deploy.files.append')
@patch('prestoadmin.deploy.sudo')
def test_deploy_node_properties(self, sudo_mock, append_mock, exists_mock, open_mock):
sudo_mock.return_value = SudoResult()
exists_mock.return_value = True
file_manager = open_mock.return_value.__enter__.return_value
file_manager.read.return_value = ("key=value")
command = (
"if ! ( grep -q -s 'node.id' /my/remote/dir/node.properties ); "
"then "
"uuid=$(uuidgen); "
"echo node.id=$uuid >> /my/remote/dir/node.properties;"
"fi; "
"sed -i '/node.id/!d' /my/remote/dir/node.properties; ")
deploy.deploy_node_properties("key=value", "/my/remote/dir")
sudo_mock.assert_called_with(command)
append_mock.assert_called_with("/my/remote/dir/node.properties",
"key=value", True, shell=True)
@patch('prestoadmin.deploy.sudo')
@patch('prestoadmin.deploy.secure_create_file')
def test_deploys_as_presto_user(self, secure_create_file_mock, sudo_mock):
deploy.deploy({'my_file': 'hello!'}, '/remote/path')
secure_create_file_mock.assert_called_with('/remote/path/my_file', 'presto:presto', 600)
sudo_mock.assert_called_with("echo 'hello!' > /remote/path/my_file")
@patch('prestoadmin.deploy.deploy')
@patch('prestoadmin.deploy.deploy_node_properties')
def test_configure_presto(self, deploy_node_mock, deploy_mock):
env.host = 'localhost'
conf = {"node.properties": {"key": "value"}, "jvm.config": ["list"]}
remote_dir = "/my/remote/dir"
deploy.configure_presto(conf, remote_dir)
deploy_mock.assert_called_with({"jvm.config": "list"}, remote_dir)
def test_escape_quotes_do_nothing(self):
text = 'basic_text'
self.assertEqual('basic_text', deploy.escape_single_quotes(text))
def test_escape_quotes_has_quote(self):
text = "A quote! ' A quote!"
self.assertEqual("A quote! '\\'' A quote!",
deploy.escape_single_quotes(text))
| {
"content_hash": "e586a011c3e9cabd95efeffd7f315097",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 96,
"avg_line_length": 40.48148148148148,
"alnum_prop": 0.6221408966148216,
"repo_name": "prestodb/presto-admin",
"id": "30a87332a78f9fb2425c453ba766beee6eedfded",
"size": "4939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/test_deploy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "7410"
},
{
"name": "Python",
"bytes": "695142"
},
{
"name": "Shell",
"bytes": "6738"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_corellia_beldonna_large2.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "4bd6037af78837d94959410cdd55c54e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 76,
"avg_line_length": 23.846153846153847,
"alnum_prop": 0.6967741935483871,
"repo_name": "obi-two/Rebelion",
"id": "80f7d02bd37e65623ab37e4860fbd01974a8f486",
"size": "455",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/building/poi/shared_corellia_beldonna_large2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
import json
from django.urls import reverse
from netaddr import IPNetwork
from rest_framework import status
from dcim.models import Device, DeviceRole, DeviceType, Interface, Manufacturer, Site
from ipam.choices import *
from ipam.models import *
from tenancy.models import Tenant
from utilities.testing import APITestCase, APIViewTestCases, create_test_device, disable_warnings
class AppTest(APITestCase):
def test_root(self):
url = reverse('ipam-api:api-root')
response = self.client.get('{}?format=api'.format(url), **self.header)
self.assertEqual(response.status_code, 200)
class ASNTest(APIViewTestCases.APIViewTestCase):
model = ASN
brief_fields = ['asn', 'display', 'id', 'url']
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
rirs = [
RIR.objects.create(name='RFC 6996', slug='rfc-6996', description='Private Use', is_private=True),
RIR.objects.create(name='RFC 7300', slug='rfc-7300', description='IANA Use', is_private=True),
]
sites = [
Site.objects.create(name='Site 1', slug='site-1'),
Site.objects.create(name='Site 2', slug='site-2')
]
tenants = [
Tenant.objects.create(name='Tenant 1', slug='tenant-1'),
Tenant.objects.create(name='Tenant 2', slug='tenant-2'),
]
asns = (
ASN(asn=64513, rir=rirs[0], tenant=tenants[0]),
ASN(asn=65534, rir=rirs[0], tenant=tenants[1]),
ASN(asn=4200000000, rir=rirs[0], tenant=tenants[0]),
ASN(asn=4200002301, rir=rirs[1], tenant=tenants[1]),
)
ASN.objects.bulk_create(asns)
asns[0].sites.set([sites[0]])
asns[1].sites.set([sites[1]])
asns[2].sites.set([sites[0]])
asns[3].sites.set([sites[1]])
cls.create_data = [
{
'asn': 64512,
'rir': rirs[0].pk,
},
{
'asn': 65543,
'rir': rirs[0].pk,
},
{
'asn': 4294967294,
'rir': rirs[0].pk,
},
]
class VRFTest(APIViewTestCases.APIViewTestCase):
model = VRF
brief_fields = ['display', 'id', 'name', 'prefix_count', 'rd', 'url']
create_data = [
{
'name': 'VRF 4',
'rd': '65000:4',
},
{
'name': 'VRF 5',
'rd': '65000:5',
},
{
'name': 'VRF 6',
'rd': '65000:6',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
vrfs = (
VRF(name='VRF 1', rd='65000:1'),
VRF(name='VRF 2', rd='65000:2'),
VRF(name='VRF 3'), # No RD
)
VRF.objects.bulk_create(vrfs)
class RouteTargetTest(APIViewTestCases.APIViewTestCase):
model = RouteTarget
brief_fields = ['display', 'id', 'name', 'url']
create_data = [
{
'name': '65000:1004',
},
{
'name': '65000:1005',
},
{
'name': '65000:1006',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
route_targets = (
RouteTarget(name='65000:1001'),
RouteTarget(name='65000:1002'),
RouteTarget(name='65000:1003'),
)
RouteTarget.objects.bulk_create(route_targets)
class RIRTest(APIViewTestCases.APIViewTestCase):
model = RIR
brief_fields = ['aggregate_count', 'display', 'id', 'name', 'slug', 'url']
create_data = [
{
'name': 'RIR 4',
'slug': 'rir-4',
},
{
'name': 'RIR 5',
'slug': 'rir-5',
},
{
'name': 'RIR 6',
'slug': 'rir-6',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
rirs = (
RIR(name='RIR 1', slug='rir-1'),
RIR(name='RIR 2', slug='rir-2'),
RIR(name='RIR 3', slug='rir-3'),
)
RIR.objects.bulk_create(rirs)
class AggregateTest(APIViewTestCases.APIViewTestCase):
model = Aggregate
brief_fields = ['display', 'family', 'id', 'prefix', 'url']
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
rirs = (
RIR(name='RIR 1', slug='rir-1'),
RIR(name='RIR 2', slug='rir-2'),
)
RIR.objects.bulk_create(rirs)
aggregates = (
Aggregate(prefix=IPNetwork('10.0.0.0/8'), rir=rirs[0]),
Aggregate(prefix=IPNetwork('172.16.0.0/12'), rir=rirs[0]),
Aggregate(prefix=IPNetwork('192.168.0.0/16'), rir=rirs[0]),
)
Aggregate.objects.bulk_create(aggregates)
cls.create_data = [
{
'prefix': '100.0.0.0/8',
'rir': rirs[1].pk,
},
{
'prefix': '101.0.0.0/8',
'rir': rirs[1].pk,
},
{
'prefix': '102.0.0.0/8',
'rir': rirs[1].pk,
},
]
class RoleTest(APIViewTestCases.APIViewTestCase):
model = Role
brief_fields = ['display', 'id', 'name', 'prefix_count', 'slug', 'url', 'vlan_count']
create_data = [
{
'name': 'Role 4',
'slug': 'role-4',
},
{
'name': 'Role 5',
'slug': 'role-5',
},
{
'name': 'Role 6',
'slug': 'role-6',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
roles = (
Role(name='Role 1', slug='role-1'),
Role(name='Role 2', slug='role-2'),
Role(name='Role 3', slug='role-3'),
)
Role.objects.bulk_create(roles)
class PrefixTest(APIViewTestCases.APIViewTestCase):
model = Prefix
brief_fields = ['_depth', 'display', 'family', 'id', 'prefix', 'url']
create_data = [
{
'prefix': '192.168.4.0/24',
},
{
'prefix': '192.168.5.0/24',
},
{
'prefix': '192.168.6.0/24',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
prefixes = (
Prefix(prefix=IPNetwork('192.168.1.0/24')),
Prefix(prefix=IPNetwork('192.168.2.0/24')),
Prefix(prefix=IPNetwork('192.168.3.0/24')),
)
Prefix.objects.bulk_create(prefixes)
def test_list_available_prefixes(self):
"""
Test retrieval of all available prefixes within a parent prefix.
"""
vrf = VRF.objects.create(name='VRF 1')
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/24'), vrf=vrf)
Prefix.objects.create(prefix=IPNetwork('192.0.2.64/26'), vrf=vrf)
Prefix.objects.create(prefix=IPNetwork('192.0.2.192/27'), vrf=vrf)
url = reverse('ipam-api:prefix-available-prefixes', kwargs={'pk': prefix.pk})
self.add_permissions('ipam.view_prefix')
# Retrieve all available IPs
response = self.client.get(url, **self.header)
available_prefixes = ['192.0.2.0/26', '192.0.2.128/26', '192.0.2.224/27']
for i, p in enumerate(response.data):
self.assertEqual(p['prefix'], available_prefixes[i])
def test_create_single_available_prefix(self):
"""
Test retrieval of the first available prefix within a parent prefix.
"""
vrf = VRF.objects.create(name='VRF 1')
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/28'), vrf=vrf, is_pool=True)
url = reverse('ipam-api:prefix-available-prefixes', kwargs={'pk': prefix.pk})
self.add_permissions('ipam.view_prefix', 'ipam.add_prefix')
# Create four available prefixes with individual requests
prefixes_to_be_created = [
'192.0.2.0/30',
'192.0.2.4/30',
'192.0.2.8/30',
'192.0.2.12/30',
]
for i in range(4):
data = {
'prefix_length': 30,
'description': 'Test Prefix {}'.format(i + 1)
}
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(response.data['prefix'], prefixes_to_be_created[i])
self.assertEqual(response.data['vrf']['id'], vrf.pk)
self.assertEqual(response.data['description'], data['description'])
# Try to create one more prefix
response = self.client.post(url, {'prefix_length': 30}, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
self.assertIn('detail', response.data)
# Try to create invalid prefix type
response = self.client.post(url, {'prefix_length': '30'}, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
self.assertIn('prefix_length', response.data[0])
def test_create_multiple_available_prefixes(self):
"""
Test the creation of available prefixes within a parent prefix.
"""
vrf = VRF.objects.create(name='VRF 1')
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/28'), vrf=vrf, is_pool=True)
url = reverse('ipam-api:prefix-available-prefixes', kwargs={'pk': prefix.pk})
self.add_permissions('ipam.view_prefix', 'ipam.add_prefix')
# Try to create five /30s (only four are available)
data = [
{'prefix_length': 30, 'description': 'Prefix 1'},
{'prefix_length': 30, 'description': 'Prefix 2'},
{'prefix_length': 30, 'description': 'Prefix 3'},
{'prefix_length': 30, 'description': 'Prefix 4'},
{'prefix_length': 30, 'description': 'Prefix 5'},
]
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
self.assertIn('detail', response.data)
# Verify that no prefixes were created (the entire /28 is still available)
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(response.data[0]['prefix'], '192.0.2.0/28')
# Create four /30s in a single request
response = self.client.post(url, data[:4], format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(len(response.data), 4)
def test_list_available_ips(self):
"""
Test retrieval of all available IP addresses within a parent prefix.
"""
vrf = VRF.objects.create(name='VRF 1')
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/29'), vrf=vrf, is_pool=True)
url = reverse('ipam-api:prefix-available-ips', kwargs={'pk': prefix.pk})
self.add_permissions('ipam.view_prefix', 'ipam.view_ipaddress')
# Retrieve all available IPs
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(len(response.data), 8) # 8 because prefix.is_pool = True
# Change the prefix to not be a pool and try again
prefix.is_pool = False
prefix.save()
response = self.client.get(url, **self.header)
self.assertEqual(len(response.data), 6) # 8 - 2 because prefix.is_pool = False
def test_create_single_available_ip(self):
"""
Test retrieval of the first available IP address within a parent prefix.
"""
vrf = VRF.objects.create(name='VRF 1')
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/30'), vrf=vrf, is_pool=True)
url = reverse('ipam-api:prefix-available-ips', kwargs={'pk': prefix.pk})
self.add_permissions('ipam.view_prefix', 'ipam.add_ipaddress')
# Create all four available IPs with individual requests
for i in range(1, 5):
data = {
'description': 'Test IP {}'.format(i)
}
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(response.data['vrf']['id'], vrf.pk)
self.assertEqual(response.data['description'], data['description'])
# Try to create one more IP
response = self.client.post(url, {}, **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
self.assertIn('detail', response.data)
def test_create_multiple_available_ips(self):
"""
Test the creation of available IP addresses within a parent prefix.
"""
vrf = VRF.objects.create(name='VRF 1')
prefix = Prefix.objects.create(prefix=IPNetwork('192.0.2.0/29'), vrf=vrf, is_pool=True)
url = reverse('ipam-api:prefix-available-ips', kwargs={'pk': prefix.pk})
self.add_permissions('ipam.view_prefix', 'ipam.add_ipaddress')
# Try to create nine IPs (only eight are available)
data = [{'description': f'Test IP {i}'} for i in range(1, 10)] # 9 IPs
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
self.assertIn('detail', response.data)
# Create all eight available IPs in a single request
data = [{'description': 'Test IP {}'.format(i)} for i in range(1, 9)] # 8 IPs
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(len(response.data), 8)
class IPRangeTest(APIViewTestCases.APIViewTestCase):
model = IPRange
brief_fields = ['display', 'end_address', 'family', 'id', 'start_address', 'url']
create_data = [
{
'start_address': '192.168.4.10/24',
'end_address': '192.168.4.50/24',
},
{
'start_address': '192.168.5.10/24',
'end_address': '192.168.5.50/24',
},
{
'start_address': '192.168.6.10/24',
'end_address': '192.168.6.50/24',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
ip_ranges = (
IPRange(start_address=IPNetwork('192.168.1.10/24'), end_address=IPNetwork('192.168.1.50/24'), size=51),
IPRange(start_address=IPNetwork('192.168.2.10/24'), end_address=IPNetwork('192.168.2.50/24'), size=51),
IPRange(start_address=IPNetwork('192.168.3.10/24'), end_address=IPNetwork('192.168.3.50/24'), size=51),
)
IPRange.objects.bulk_create(ip_ranges)
def test_list_available_ips(self):
"""
Test retrieval of all available IP addresses within a parent IP range.
"""
iprange = IPRange.objects.create(
start_address=IPNetwork('192.0.2.10/24'),
end_address=IPNetwork('192.0.2.19/24')
)
url = reverse('ipam-api:iprange-available-ips', kwargs={'pk': iprange.pk})
self.add_permissions('ipam.view_iprange', 'ipam.view_ipaddress')
# Retrieve all available IPs
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(len(response.data), 10)
def test_create_single_available_ip(self):
"""
Test retrieval of the first available IP address within a parent IP range.
"""
vrf = VRF.objects.create(name='Test VRF 1', rd='1234')
iprange = IPRange.objects.create(
start_address=IPNetwork('192.0.2.1/24'),
end_address=IPNetwork('192.0.2.3/24'),
vrf=vrf
)
url = reverse('ipam-api:iprange-available-ips', kwargs={'pk': iprange.pk})
self.add_permissions('ipam.view_iprange', 'ipam.add_ipaddress')
# Create all three available IPs with individual requests
for i in range(1, 4):
data = {
'description': f'Test IP #{i}'
}
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(response.data['vrf']['id'], vrf.pk)
self.assertEqual(response.data['description'], data['description'])
# Try to create one more IP
response = self.client.post(url, {}, **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
self.assertIn('detail', response.data)
def test_create_multiple_available_ips(self):
"""
Test the creation of available IP addresses within a parent IP range.
"""
iprange = IPRange.objects.create(
start_address=IPNetwork('192.0.2.1/24'),
end_address=IPNetwork('192.0.2.8/24')
)
url = reverse('ipam-api:iprange-available-ips', kwargs={'pk': iprange.pk})
self.add_permissions('ipam.view_iprange', 'ipam.add_ipaddress')
# Try to create nine IPs (only eight are available)
data = [{'description': f'Test IP #{i}'} for i in range(1, 10)] # 9 IPs
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
self.assertIn('detail', response.data)
# Create all eight available IPs in a single request
data = [{'description': f'Test IP #{i}'} for i in range(1, 9)] # 8 IPs
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(len(response.data), 8)
class IPAddressTest(APIViewTestCases.APIViewTestCase):
model = IPAddress
brief_fields = ['address', 'display', 'family', 'id', 'url']
create_data = [
{
'address': '192.168.0.4/24',
},
{
'address': '192.168.0.5/24',
},
{
'address': '192.168.0.6/24',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
ip_addresses = (
IPAddress(address=IPNetwork('192.168.0.1/24')),
IPAddress(address=IPNetwork('192.168.0.2/24')),
IPAddress(address=IPNetwork('192.168.0.3/24')),
)
IPAddress.objects.bulk_create(ip_addresses)
class FHRPGroupTest(APIViewTestCases.APIViewTestCase):
model = FHRPGroup
brief_fields = ['display', 'group_id', 'id', 'protocol', 'url']
bulk_update_data = {
'protocol': FHRPGroupProtocolChoices.PROTOCOL_GLBP,
'group_id': 200,
'auth_type': FHRPGroupAuthTypeChoices.AUTHENTICATION_MD5,
'auth_key': 'foobarbaz999',
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
fhrp_groups = (
FHRPGroup(protocol=FHRPGroupProtocolChoices.PROTOCOL_VRRP2, group_id=10, auth_type=FHRPGroupAuthTypeChoices.AUTHENTICATION_PLAINTEXT, auth_key='foobar123'),
FHRPGroup(protocol=FHRPGroupProtocolChoices.PROTOCOL_VRRP3, group_id=20, auth_type=FHRPGroupAuthTypeChoices.AUTHENTICATION_MD5, auth_key='foobar123'),
FHRPGroup(protocol=FHRPGroupProtocolChoices.PROTOCOL_HSRP, group_id=30),
)
FHRPGroup.objects.bulk_create(fhrp_groups)
cls.create_data = [
{
'protocol': FHRPGroupProtocolChoices.PROTOCOL_VRRP2,
'group_id': 110,
'auth_type': FHRPGroupAuthTypeChoices.AUTHENTICATION_PLAINTEXT,
'auth_key': 'foobar123',
},
{
'protocol': FHRPGroupProtocolChoices.PROTOCOL_VRRP3,
'group_id': 120,
'auth_type': FHRPGroupAuthTypeChoices.AUTHENTICATION_MD5,
'auth_key': 'barfoo456',
},
{
'protocol': FHRPGroupProtocolChoices.PROTOCOL_GLBP,
'group_id': 130,
},
]
class FHRPGroupAssignmentTest(APIViewTestCases.APIViewTestCase):
model = FHRPGroupAssignment
brief_fields = ['display', 'group_id', 'id', 'interface_id', 'interface_type', 'priority', 'url']
bulk_update_data = {
'priority': 100,
}
@classmethod
def setUpTestData(cls):
device1 = create_test_device('device1')
device2 = create_test_device('device2')
device3 = create_test_device('device3')
interfaces = (
Interface(device=device1, name='eth0', type='other'),
Interface(device=device1, name='eth1', type='other'),
Interface(device=device1, name='eth2', type='other'),
Interface(device=device2, name='eth0', type='other'),
Interface(device=device2, name='eth1', type='other'),
Interface(device=device2, name='eth2', type='other'),
Interface(device=device3, name='eth0', type='other'),
Interface(device=device3, name='eth1', type='other'),
Interface(device=device3, name='eth2', type='other'),
)
Interface.objects.bulk_create(interfaces)
ip_addresses = (
IPAddress(address=IPNetwork('192.168.0.2/24'), assigned_object=interfaces[0]),
IPAddress(address=IPNetwork('192.168.1.2/24'), assigned_object=interfaces[1]),
IPAddress(address=IPNetwork('192.168.2.2/24'), assigned_object=interfaces[2]),
IPAddress(address=IPNetwork('192.168.0.3/24'), assigned_object=interfaces[3]),
IPAddress(address=IPNetwork('192.168.1.3/24'), assigned_object=interfaces[4]),
IPAddress(address=IPNetwork('192.168.2.3/24'), assigned_object=interfaces[5]),
IPAddress(address=IPNetwork('192.168.0.4/24'), assigned_object=interfaces[6]),
IPAddress(address=IPNetwork('192.168.1.4/24'), assigned_object=interfaces[7]),
IPAddress(address=IPNetwork('192.168.2.4/24'), assigned_object=interfaces[8]),
)
IPAddress.objects.bulk_create(ip_addresses)
fhrp_groups = (
FHRPGroup(protocol=FHRPGroupProtocolChoices.PROTOCOL_VRRP2, group_id=10),
FHRPGroup(protocol=FHRPGroupProtocolChoices.PROTOCOL_VRRP2, group_id=20),
FHRPGroup(protocol=FHRPGroupProtocolChoices.PROTOCOL_VRRP2, group_id=30),
)
FHRPGroup.objects.bulk_create(fhrp_groups)
fhrp_group_assignments = (
FHRPGroupAssignment(group=fhrp_groups[0], interface=interfaces[0], priority=10),
FHRPGroupAssignment(group=fhrp_groups[1], interface=interfaces[1], priority=10),
FHRPGroupAssignment(group=fhrp_groups[2], interface=interfaces[2], priority=10),
FHRPGroupAssignment(group=fhrp_groups[0], interface=interfaces[3], priority=20),
FHRPGroupAssignment(group=fhrp_groups[1], interface=interfaces[4], priority=20),
FHRPGroupAssignment(group=fhrp_groups[2], interface=interfaces[5], priority=20),
)
FHRPGroupAssignment.objects.bulk_create(fhrp_group_assignments)
cls.create_data = [
{
'group': fhrp_groups[0].pk,
'interface_type': 'dcim.interface',
'interface_id': interfaces[6].pk,
'priority': 30,
},
{
'group': fhrp_groups[1].pk,
'interface_type': 'dcim.interface',
'interface_id': interfaces[7].pk,
'priority': 30,
},
{
'group': fhrp_groups[2].pk,
'interface_type': 'dcim.interface',
'interface_id': interfaces[8].pk,
'priority': 30,
},
]
class VLANGroupTest(APIViewTestCases.APIViewTestCase):
model = VLANGroup
brief_fields = ['display', 'id', 'name', 'slug', 'url', 'vlan_count']
create_data = [
{
'name': 'VLAN Group 4',
'slug': 'vlan-group-4',
},
{
'name': 'VLAN Group 5',
'slug': 'vlan-group-5',
},
{
'name': 'VLAN Group 6',
'slug': 'vlan-group-6',
},
]
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
vlan_groups = (
VLANGroup(name='VLAN Group 1', slug='vlan-group-1'),
VLANGroup(name='VLAN Group 2', slug='vlan-group-2'),
VLANGroup(name='VLAN Group 3', slug='vlan-group-3'),
)
VLANGroup.objects.bulk_create(vlan_groups)
def test_list_available_vlans(self):
"""
Test retrieval of all available VLANs within a group.
"""
self.add_permissions('ipam.view_vlangroup', 'ipam.view_vlan')
vlangroup = VLANGroup.objects.first()
vlans = (
VLAN(vid=10, name='VLAN 10', group=vlangroup),
VLAN(vid=20, name='VLAN 20', group=vlangroup),
VLAN(vid=30, name='VLAN 30', group=vlangroup),
)
VLAN.objects.bulk_create(vlans)
# Retrieve all available VLANs
url = reverse('ipam-api:vlangroup-available-vlans', kwargs={'pk': vlangroup.pk})
response = self.client.get(url, **self.header)
self.assertEqual(len(response.data), 4094 - len(vlans))
available_vlans = {vlan['vid'] for vlan in response.data}
for vlan in vlans:
self.assertNotIn(vlan.vid, available_vlans)
def test_create_single_available_vlan(self):
"""
Test the creation of a single available VLAN.
"""
self.add_permissions('ipam.view_vlangroup', 'ipam.view_vlan', 'ipam.add_vlan')
vlangroup = VLANGroup.objects.first()
VLAN.objects.create(vid=1, name='VLAN 1', group=vlangroup)
data = {
"name": "First VLAN",
}
url = reverse('ipam-api:vlangroup-available-vlans', kwargs={'pk': vlangroup.pk})
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(response.data['name'], data['name'])
self.assertEqual(response.data['group']['id'], vlangroup.pk)
self.assertEqual(response.data['vid'], 2)
def test_create_multiple_available_vlans(self):
"""
Test the creation of multiple available VLANs.
"""
self.add_permissions('ipam.view_vlangroup', 'ipam.view_vlan', 'ipam.add_vlan')
vlangroup = VLANGroup.objects.first()
vlans = (
VLAN(vid=1, name='VLAN 1', group=vlangroup),
VLAN(vid=3, name='VLAN 3', group=vlangroup),
VLAN(vid=5, name='VLAN 5', group=vlangroup),
)
VLAN.objects.bulk_create(vlans)
data = (
{"name": "First VLAN"},
{"name": "Second VLAN"},
{"name": "Third VLAN"},
)
url = reverse('ipam-api:vlangroup-available-vlans', kwargs={'pk': vlangroup.pk})
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
self.assertEqual(len(response.data), 3)
self.assertEqual(response.data[0]['name'], data[0]['name'])
self.assertEqual(response.data[0]['group']['id'], vlangroup.pk)
self.assertEqual(response.data[0]['vid'], 2)
self.assertEqual(response.data[1]['name'], data[1]['name'])
self.assertEqual(response.data[1]['group']['id'], vlangroup.pk)
self.assertEqual(response.data[1]['vid'], 4)
self.assertEqual(response.data[2]['name'], data[2]['name'])
self.assertEqual(response.data[2]['group']['id'], vlangroup.pk)
self.assertEqual(response.data[2]['vid'], 6)
class VLANTest(APIViewTestCases.APIViewTestCase):
model = VLAN
brief_fields = ['display', 'id', 'name', 'url', 'vid']
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
vlan_groups = (
VLANGroup(name='VLAN Group 1', slug='vlan-group-1'),
VLANGroup(name='VLAN Group 2', slug='vlan-group-2'),
)
VLANGroup.objects.bulk_create(vlan_groups)
vlans = (
VLAN(name='VLAN 1', vid=1, group=vlan_groups[0]),
VLAN(name='VLAN 2', vid=2, group=vlan_groups[0]),
VLAN(name='VLAN 3', vid=3, group=vlan_groups[0]),
)
VLAN.objects.bulk_create(vlans)
cls.create_data = [
{
'vid': 4,
'name': 'VLAN 4',
'group': vlan_groups[1].pk,
},
{
'vid': 5,
'name': 'VLAN 5',
'group': vlan_groups[1].pk,
},
{
'vid': 6,
'name': 'VLAN 6',
'group': vlan_groups[1].pk,
},
]
def test_delete_vlan_with_prefix(self):
"""
Attempt and fail to delete a VLAN with a Prefix assigned to it.
"""
vlan = VLAN.objects.first()
Prefix.objects.create(prefix=IPNetwork('192.0.2.0/24'), vlan=vlan)
self.add_permissions('ipam.delete_vlan')
url = reverse('ipam-api:vlan-detail', kwargs={'pk': vlan.pk})
with disable_warnings('django.request'):
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_409_CONFLICT)
content = json.loads(response.content.decode('utf-8'))
self.assertIn('detail', content)
self.assertTrue(content['detail'].startswith('Unable to delete object.'))
class ServiceTemplateTest(APIViewTestCases.APIViewTestCase):
model = ServiceTemplate
brief_fields = ['display', 'id', 'name', 'ports', 'protocol', 'url']
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
service_templates = (
ServiceTemplate(name='Service Template 1', protocol=ServiceProtocolChoices.PROTOCOL_TCP, ports=[1, 2]),
ServiceTemplate(name='Service Template 2', protocol=ServiceProtocolChoices.PROTOCOL_TCP, ports=[3, 4]),
ServiceTemplate(name='Service Template 3', protocol=ServiceProtocolChoices.PROTOCOL_TCP, ports=[5, 6]),
)
ServiceTemplate.objects.bulk_create(service_templates)
cls.create_data = [
{
'name': 'Service Template 4',
'protocol': ServiceProtocolChoices.PROTOCOL_TCP,
'ports': [7, 8],
},
{
'name': 'Service Template 5',
'protocol': ServiceProtocolChoices.PROTOCOL_TCP,
'ports': [9, 10],
},
{
'name': 'Service Template 6',
'protocol': ServiceProtocolChoices.PROTOCOL_TCP,
'ports': [11, 12],
},
]
class ServiceTest(APIViewTestCases.APIViewTestCase):
model = Service
brief_fields = ['display', 'id', 'name', 'ports', 'protocol', 'url']
bulk_update_data = {
'description': 'New description',
}
@classmethod
def setUpTestData(cls):
site = Site.objects.create(name='Site 1', slug='site-1')
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
devicetype = DeviceType.objects.create(manufacturer=manufacturer, model='Device Type 1')
devicerole = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1')
devices = (
Device(name='Device 1', site=site, device_type=devicetype, device_role=devicerole),
Device(name='Device 2', site=site, device_type=devicetype, device_role=devicerole),
)
Device.objects.bulk_create(devices)
services = (
Service(device=devices[0], name='Service 1', protocol=ServiceProtocolChoices.PROTOCOL_TCP, ports=[1]),
Service(device=devices[0], name='Service 2', protocol=ServiceProtocolChoices.PROTOCOL_TCP, ports=[2]),
Service(device=devices[0], name='Service 3', protocol=ServiceProtocolChoices.PROTOCOL_TCP, ports=[3]),
)
Service.objects.bulk_create(services)
cls.create_data = [
{
'device': devices[1].pk,
'name': 'Service 4',
'protocol': ServiceProtocolChoices.PROTOCOL_TCP,
'ports': [4],
},
{
'device': devices[1].pk,
'name': 'Service 5',
'protocol': ServiceProtocolChoices.PROTOCOL_TCP,
'ports': [5],
},
{
'device': devices[1].pk,
'name': 'Service 6',
'protocol': ServiceProtocolChoices.PROTOCOL_TCP,
'ports': [6],
},
]
| {
"content_hash": "6430b89b5bfa55610f3f9770e2ed7db5",
"timestamp": "",
"source": "github",
"line_count": 916,
"max_line_length": 168,
"avg_line_length": 36.6146288209607,
"alnum_prop": 0.5649542323861773,
"repo_name": "digitalocean/netbox",
"id": "d99de6d207189fd00cc608aefbc281ad2f329d5e",
"size": "33539",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "netbox/ipam/tests/test_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "189339"
},
{
"name": "HTML",
"bytes": "570800"
},
{
"name": "JavaScript",
"bytes": "326125"
},
{
"name": "Python",
"bytes": "1815170"
},
{
"name": "Shell",
"bytes": "2786"
}
],
"symlink_target": ""
} |
"""Support for monitoring an SABnzbd NZB client."""
import logging
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from . import DATA_SABNZBD, SENSOR_TYPES, SIGNAL_SABNZBD_UPDATED
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the SABnzbd sensors."""
if discovery_info is None:
return
sab_api_data = hass.data[DATA_SABNZBD]
sensors = sab_api_data.sensors
client_name = sab_api_data.name
async_add_entities(
[SabnzbdSensor(sensor, sab_api_data, client_name) for sensor in sensors]
)
class SabnzbdSensor(Entity):
"""Representation of an SABnzbd sensor."""
def __init__(self, sensor_type, sabnzbd_api_data, client_name):
"""Initialize the sensor."""
self._client_name = client_name
self._field_name = SENSOR_TYPES[sensor_type][2]
self._name = SENSOR_TYPES[sensor_type][0]
self._sabnzbd_api = sabnzbd_api_data
self._state = None
self._type = sensor_type
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
async def async_added_to_hass(self):
"""Call when entity about to be added to hass."""
async_dispatcher_connect(self.hass, SIGNAL_SABNZBD_UPDATED, self.update_state)
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._client_name, self._name)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def should_poll(self):
"""Don't poll. Will be updated by dispatcher signal."""
return False
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update_state(self, args):
"""Get the latest data and updates the states."""
self._state = self._sabnzbd_api.get_queue_field(self._field_name)
if self._type == "speed":
self._state = round(float(self._state) / 1024, 1)
elif "size" in self._type:
self._state = round(float(self._state), 2)
self.schedule_update_ha_state()
| {
"content_hash": "334989acbdceb7a7c5f9c3d2c2c97aa1",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 86,
"avg_line_length": 32.785714285714285,
"alnum_prop": 0.6405228758169934,
"repo_name": "fbradyirl/home-assistant",
"id": "642dd27b1d82e383814b7bddf7d1d7dd0d1b15b1",
"size": "2295",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sabnzbd/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1829"
},
{
"name": "Python",
"bytes": "16494727"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17784"
}
],
"symlink_target": ""
} |
import copy
from auto_gen import DBAbstraction as _DBAbstraction
from auto_gen import DBAbstractionRef, DBModule
from id_scope import IdScope
class DBAbstraction(_DBAbstraction):
def __init__(self, *args, **kwargs):
_DBAbstraction.__init__(self, *args, **kwargs)
self.idScope = IdScope(remap={DBAbstractionRef.vtType: DBModule.vtType})
self.idScope.setBeginId('action', 1)
def __copy__(self):
return DBAbstraction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = _DBAbstraction.do_copy(self, new_ids, id_scope, id_remap)
cp.__class__ = DBAbstraction
# need to go through and reset the index to the copied objects
cp.idScope = copy.copy(self.idScope)
return cp
| {
"content_hash": "7e3f38c5609c0acc9ec35a39e2b94890",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 80,
"avg_line_length": 38.7,
"alnum_prop": 0.6795865633074936,
"repo_name": "CMUSV-VisTrails/WorkflowRecommendation",
"id": "f625642ce6589439919c48428d1b2c85ad12ba1c",
"size": "2617",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vistrails/db/versions/v0_9_3/domain/abstraction.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "57"
},
{
"name": "PHP",
"bytes": "48730"
},
{
"name": "Python",
"bytes": "12760768"
},
{
"name": "Shell",
"bytes": "33785"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
setup(
name = "django_ajax",
version = "0.2.0",
description = 'Utility app to support AJAX development in Django',
author = 'David Danier',
author_email = 'david.danier@team23.de',
url = 'https://github.com/ddanier/django_ajax',
long_description=open('README.rst', 'r').read(),
packages = [
'django_ajax',
'django_ajax.templatetags',
],
install_requires = [
'Django >=1.4',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
| {
"content_hash": "53d1d1cabbadb0136e7b9d77718d2ef9",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 70,
"avg_line_length": 29.517241379310345,
"alnum_prop": 0.5864485981308412,
"repo_name": "ddanier/django_ajax",
"id": "6eedd0cf1b1163d41ee82847d78d4e1c8ff3a7ad",
"size": "856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "8936"
}
],
"symlink_target": ""
} |
"""
Phil Adams http://philadams.net
PNG file parser.
"""
import logging
import struct
SIGNATURE = ('\211PNG\r\n\032\n')
class Chunk(object):
"""A PNG Chunk is composed of 4 fields:
- a length (4 byte unsigned int)
- a type (4 bytes, chars)
- data (length bytes, various)
- crc (4 bytes)
Note: all integer values in PNG streams are big endian"""
_length = struct.Struct('>L')
_type = struct.Struct('cccc')
_crc = 4
_end_type = 'IEND'
class PNGFile(object):
"""A PNG Image"""
def __init__(self, fp):
self.fp = fp
self.chunks = []
self._load()
self.header = self.chunks[0]['data']
self.size = (self.header['width'], self.header['height'])
def _load(self):
# confirm PNG format
magic = self.fp.read(len(SIGNATURE))
if magic != SIGNATURE:
# TODO: raise appropriate exception
print('%s is not PNG signature' % magic)
exit()
self.content_type = 'image/png'
# parse chunk stream
l, t, c = Chunk._length, Chunk._type, Chunk._crc
while True:
# parse and store a chunk
chunk = {}
chunk['length'] = l.unpack(self.fp.read(l.size))[0]
chunk['type'] = ''.join(t.unpack(self.fp.read(t.size)))
parse_data = '%s_data' % chunk['type']
if hasattr(self, parse_data):
chunk['data'] = getattr(self, parse_data)()
else:
chunk['data'] = self.fp.read(chunk['length'])
chunk['crc'] = self.fp.read(c)
self.chunks.append(chunk)
# stop parsing if... we're at the end of the PNG file
if chunk['type'] == Chunk._end_type:
break
# for now, we just need the IHDR chunk
if chunk['type'] == 'IHDR':
break
def IHDR_data(self):
dim = struct.Struct('>L')
s = struct.Struct('c')
data = {}
data['width'] = dim.unpack(self.fp.read(dim.size))[0]
data['height'] = dim.unpack(self.fp.read(dim.size))[0]
data['bit_depth'] = ord(s.unpack(self.fp.read(s.size))[0])
data['color_type'] = ord(s.unpack(self.fp.read(s.size))[0])
data['compression_method'] = ord(s.unpack(self.fp.read(s.size))[0])
data['filter_method'] = ord(s.unpack(self.fp.read(s.size))[0])
data['interlace_method'] = ord(s.unpack(self.fp.read(s.size))[0])
return data
| {
"content_hash": "b35ecb5c84e20c124e6b1daf45f1a3ca",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 75,
"avg_line_length": 30.658536585365855,
"alnum_prop": 0.5302307080350039,
"repo_name": "philadams/dimensions",
"id": "90cb52e106e38bc68ea5004b8d0868989c12e0fb",
"size": "2561",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dimensions/PNGFile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6157"
},
{
"name": "Shell",
"bytes": "995"
}
],
"symlink_target": ""
} |
import anyjson
from urllib import urlencode
from nose.tools import eq_
from allmychanges.models import Changelog, Preview
from django.test import Client
from django.core.urlresolvers import reverse
from allmychanges.tasks import _task_log
from .utils import (refresh, check_status_code,
create_user, put_json, json)
from hamcrest import (
assert_that,
has_entries)
def test_preview():
_task_log[:] = []
cl = Client()
eq_(0, Preview.objects.count())
eq_(0, Changelog.objects.count())
# when user opens add-new page, a new changelog and preview
# are created
source = 'test+samples/very-simple.md'
cl.get(reverse('add-new') + '?' + urlencode(dict(url=source)))
eq_(1, Changelog.objects.count())
eq_(1, Preview.objects.count())
preview = Preview.objects.all()[0]
eq_(None, preview.user)
assert preview.light_user != None
eq_([('update_preview_task', (1,), {})], _task_log)
preview_url = reverse('preview', kwargs=dict(pk=preview.pk))
response = cl.get(preview_url)
eq_(200, response.status_code)
assert 'Some <span class="changelog-highlight-fix">bugfix</span>.' in response.content
assert 'Initial release.' in response.content
# при этом, у объекта preview должны быть версии, а у changelog нет
changelog = Changelog.objects.all()[0]
eq_(0, changelog.versions.count())
eq_(2, preview.versions.count())
# проверим, что у preview есть поле log, и оно список
preview = refresh(preview)
eq_(6, len(preview.log))
# теперь обновим preview на несуществующий источник
response = cl.post(preview_url,
data=anyjson.serialize(dict(source='test+another source',
ignore_list='NEWS',
search_list='docs')),
content_type='application/json')
eq_(200, response.status_code)
preview = refresh(preview)
eq_('test+another source', preview.source)
eq_('NEWS', preview.ignore_list)
eq_('docs', preview.search_list)
# and another preview task was scheduled
eq_([('update_preview_task', (1,), {}),
('update_preview_task', (1,), {})], _task_log)
# версии должны были удалиться
eq_(0, changelog.versions.count())
eq_(0, preview.versions.count())
# а само preview перейти в состояние error
eq_('error', preview.status)
def test_update_package_preview_versions():
changelog = Changelog.objects.create(
namespace='python', name='pip', source='test+samples/markdown-release-notes')
preview = changelog.previews.create(light_user='anonymous',
source=changelog.source)
preview.schedule_update()
eq_(0, changelog.versions.filter(preview=None).count())
def first_line(version):
return version.processed_text.split('\n', 1)[0]
def first_lines(versions):
return map(first_line, versions)
versions = preview.versions.all()
eq_([
'<ul>',
u'<h1>0.1.1</h1>', u'<h1>0.1.0</h1>'],
first_lines(versions))
# now we'll check if ignore list works
preview.set_ignore_list(['docs/unrelated-crap.md'])
preview.save()
preview.schedule_update()
versions = preview.versions.all()
eq_([u'<h1>0.1.1</h1>', u'<h1>0.1.0</h1>'],
first_lines(versions))
def test_when_preview_saved_versions_are_copied_to_changelog():
# this only should happen when changelog is empty
user = create_user('art')
changelog = Changelog.objects.create(
namespace='python', name='pip', source='test+samples/markdown-release-notes')
changelog.add_to_moderators(user)
cl = Client()
cl.login(username='art', password='art')
response = cl.get(reverse('changelog-list'))
preview = changelog.previews.create(light_user=response.cookies.get('light_user_id').value,
source=changelog.source)
preview.schedule_update()
eq_(0, changelog.versions.count())
eq_(3, preview.versions.count())
response = put_json(cl,
reverse('changelog-detail', kwargs=dict(pk=changelog.pk)),
expected_code=200,
namespace=changelog.namespace,
name=changelog.name,
source='http://github.com/svetlyak40wt/django-fields',
downloader='git.vcs')
# versions now moved to the changelog
eq_(3, changelog.versions.count())
eq_(0, preview.versions.count())
def test_get_preview_details_via_api():
changelog = Changelog.objects.create(
namespace='python', name='pip', source='test+samples/markdown-release-notes')
preview = changelog.previews.create(light_user='anonymous',
source=changelog.source)
cl = Client()
response = cl.get(reverse('preview-detail', kwargs=dict(pk=preview.pk)))
check_status_code(200, response)
data = json(response)
assert_that(data,
has_entries(
status='created',
processing_status='',
log=[]))
| {
"content_hash": "12eb97ad47feb19e768d24b74919ec50",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 95,
"avg_line_length": 33.59615384615385,
"alnum_prop": 0.6077084525853844,
"repo_name": "AllMyChanges/allmychanges.com",
"id": "2db130f5fa7b9d25f98278c24273da0d7f4e9891",
"size": "5407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "allmychanges/tests/preview.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "147634"
},
{
"name": "Dockerfile",
"bytes": "735"
},
{
"name": "Emacs Lisp",
"bytes": "905"
},
{
"name": "HTML",
"bytes": "96639"
},
{
"name": "JavaScript",
"bytes": "2645620"
},
{
"name": "Makefile",
"bytes": "7806"
},
{
"name": "Python",
"bytes": "752509"
},
{
"name": "Shell",
"bytes": "1426"
},
{
"name": "Stylus",
"bytes": "58519"
}
],
"symlink_target": ""
} |
from typing import Dict, List, Optional, Union
import numpy as np
from ConfigSpace.configuration_space import ConfigurationSpace
from autosklearn.constants import (
BINARY_CLASSIFICATION,
MULTICLASS_CLASSIFICATION,
MULTILABEL_CLASSIFICATION,
MULTIOUTPUT_REGRESSION,
REGRESSION_TASKS,
)
from autosklearn.data.abstract_data_manager import AbstractDataManager
from autosklearn.pipeline.classification import SimpleClassificationPipeline
from autosklearn.pipeline.regression import SimpleRegressionPipeline
__all__ = ["get_configuration_space"]
def get_configuration_space(
datamanager: AbstractDataManager,
include: Optional[Dict[str, List[str]]] = None,
exclude: Optional[Dict[str, List[str]]] = None,
random_state: Optional[Union[int, np.random.RandomState]] = None,
) -> ConfigurationSpace:
"""Get the configuration of a pipeline given some dataset info
Parameters
----------
datamanager: AbstractDataManager
AbstractDataManager object storing all important information about the dataset
include: Optional[Dict[str, List[str]]] = None
A dictionary of what components to include for each pipeline step
exclude: Optional[Dict[str, List[str]]] = None
A dictionary of what components to exclude for each pipeline step
random_state: Optional[Union[int, np.random.Randomstate]] = None
The random state to use for seeding the ConfigSpace
Returns
-------
ConfigurationSpace
The configuration space for the pipeline
"""
if datamanager.info["task"] in REGRESSION_TASKS:
return _get_regression_configuration_space(
datamanager, include, exclude, random_state
)
else:
return _get_classification_configuration_space(
datamanager, include, exclude, random_state
)
def _get_regression_configuration_space(
datamanager: AbstractDataManager,
include: Optional[Dict[str, List[str]]],
exclude: Optional[Dict[str, List[str]]],
random_state: Optional[Union[int, np.random.RandomState]] = None,
) -> ConfigurationSpace:
"""Get the configuration of a regression pipeline given some dataset info
Parameters
----------
datamanager: AbstractDataManager
AbstractDataManager object storing all important information about the dataset
include: Optional[Dict[str, List[str]]] = None
A dictionary of what components to include for each pipeline step
exclude: Optional[Dict[str, List[str]]] = None
A dictionary of what components to exclude for each pipeline step
random_state: Optional[Union[int, np.random.Randomstate]] = None
The random state to use for seeding the ConfigSpace
Returns
-------
ConfigurationSpace
The configuration space for the regression pipeline
"""
task_type = datamanager.info["task"]
sparse = False
multioutput = False
if task_type == MULTIOUTPUT_REGRESSION:
multioutput = True
if datamanager.info["is_sparse"] == 1:
sparse = True
dataset_properties = {"multioutput": multioutput, "sparse": sparse}
configuration_space = SimpleRegressionPipeline(
feat_type=datamanager.feat_type,
dataset_properties=dataset_properties,
include=include,
exclude=exclude,
random_state=random_state,
).get_hyperparameter_search_space(feat_type=datamanager.feat_type)
return configuration_space
def _get_classification_configuration_space(
datamanager: AbstractDataManager,
include: Optional[Dict[str, List[str]]],
exclude: Optional[Dict[str, List[str]]],
random_state: Optional[Union[int, np.random.RandomState]] = None,
) -> ConfigurationSpace:
"""Get the configuration of a classification pipeline given some dataset info
Parameters
----------
datamanager: AbstractDataManager
AbstractDataManager object storing all important information about the dataset
include: Optional[Dict[str, List[str]]] = None
A dictionary of what components to include for each pipeline step
exclude: Optional[Dict[str, List[str]]] = None
A dictionary of what components to exclude for each pipeline step
random_state: Optional[Union[int, np.random.Randomstate]] = None
The random state to use for seeding the ConfigSpace
Returns
-------
ConfigurationSpace
The configuration space for the classification pipeline
"""
task_type = datamanager.info["task"]
multilabel = False
multiclass = False
sparse = False
if task_type == MULTILABEL_CLASSIFICATION:
multilabel = True
if task_type == MULTICLASS_CLASSIFICATION:
multiclass = True
if task_type == BINARY_CLASSIFICATION:
pass
if datamanager.info["is_sparse"] == 1:
sparse = True
dataset_properties = {
"multilabel": multilabel,
"multiclass": multiclass,
"sparse": sparse,
}
return SimpleClassificationPipeline(
feat_type=datamanager.feat_type,
dataset_properties=dataset_properties,
include=include,
exclude=exclude,
random_state=random_state,
).get_hyperparameter_search_space(feat_type=datamanager.feat_type)
| {
"content_hash": "d6afe08fdf456e04a4cd0e649425aa08",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 87,
"avg_line_length": 32.89375,
"alnum_prop": 0.6963708911267338,
"repo_name": "automl/auto-sklearn",
"id": "f0a66a2a86c0f3dcb051b1acf2f0994e412b600d",
"size": "5289",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "autosklearn/util/pipeline.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "950"
},
{
"name": "Makefile",
"bytes": "3513"
},
{
"name": "Python",
"bytes": "2008151"
},
{
"name": "Shell",
"bytes": "4744"
}
],
"symlink_target": ""
} |
from drc_utils import *
| {
"content_hash": "10006e3782a9699eaf76c83af1fed217",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 23,
"avg_line_length": 24,
"alnum_prop": 0.75,
"repo_name": "openhumanoids/oh-distro",
"id": "ff44f8a945114376d77a9bcfcbab7bb0b69aacd3",
"size": "24",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "software/utils/drc_utils/python/drc_utils/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "131738"
},
{
"name": "C++",
"bytes": "2773796"
},
{
"name": "CMake",
"bytes": "1099155"
},
{
"name": "GLSL",
"bytes": "5320"
},
{
"name": "Java",
"bytes": "233603"
},
{
"name": "JavaScript",
"bytes": "232"
},
{
"name": "M",
"bytes": "3971"
},
{
"name": "Makefile",
"bytes": "82095"
},
{
"name": "Matlab",
"bytes": "1946915"
},
{
"name": "Mercury",
"bytes": "1487"
},
{
"name": "Objective-C",
"bytes": "10657"
},
{
"name": "Pascal",
"bytes": "3353"
},
{
"name": "Perl",
"bytes": "18915"
},
{
"name": "Python",
"bytes": "378988"
},
{
"name": "Shell",
"bytes": "35631"
},
{
"name": "XSLT",
"bytes": "73426"
}
],
"symlink_target": ""
} |
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import libya_elections.libya_bread
import libya_elections.phone_numbers
class Migration(migrations.Migration):
initial = True
dependencies = [
('register', '0001_initial'),
('voting', '__first__'),
]
operations = [
migrations.CreateModel(
name='StaffPhone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False, verbose_name='deleted')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='creation date')),
('modification_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='modification date')),
('phone_number', models.CharField(max_length=13, verbose_name='phone number')),
('registration_center', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='register.RegistrationCenter', verbose_name='registration center')),
],
options={
'verbose_name': 'staff phone',
'verbose_name_plural': 'staff phones',
'ordering': ['registration_center__center_id', '-modification_date'],
'permissions': (('read_staffphone', 'Can read staff phone'), ('browse_staffphone', 'Can browse staff phones')),
},
bases=(libya_elections.phone_numbers.FormattedPhoneNumberMixin, libya_elections.libya_bread.RegistrationCenterFormatterMixin, models.Model),
),
migrations.CreateModel(
name='PreliminaryVoteCount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False, verbose_name='deleted')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='creation date')),
('modification_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='modification date')),
('phone_number', models.CharField(help_text='Received from this phone', max_length=13, verbose_name='phone number')),
('option', models.PositiveSmallIntegerField(validators=[django.core.validators.MinValueValidator(1)], verbose_name='option')),
('num_votes', models.PositiveIntegerField(verbose_name='number of votes')),
('election', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='voting.Election', verbose_name='election')),
('registration_center', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='register.RegistrationCenter', verbose_name='registration center')),
],
options={
'verbose_name': 'preliminary vote count',
'verbose_name_plural': 'preliminary vote counts',
'ordering': ['election', 'registration_center', 'option'],
'permissions': (('read_preliminaryvotecount', 'Can read preliminary vote count'), ('browse_preliminaryvotecount', 'Can browse preliminary vote counts')),
},
bases=(libya_elections.phone_numbers.FormattedPhoneNumberMixin, libya_elections.libya_bread.ElectionFormatterMixin, libya_elections.libya_bread.RegistrationCenterFormatterMixin, models.Model),
),
migrations.CreateModel(
name='PollingReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False, verbose_name='deleted')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='creation date')),
('modification_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='modification date')),
('phone_number', models.CharField(help_text='Received from this phone', max_length=13, verbose_name='phone number')),
('period_number', models.IntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(4)], verbose_name='period number')),
('num_voters', models.IntegerField(validators=[django.core.validators.MinValueValidator(0)], verbose_name='number of voters')),
('election', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='voting.Election', verbose_name='election')),
('registration_center', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='register.RegistrationCenter', verbose_name='registration center')),
],
options={
'verbose_name': 'polling report',
'verbose_name_plural': 'polling reports',
'ordering': ['election_id', 'registration_center__center_id', '-period_number', '-modification_date'],
'permissions': (('read_pollingreport', 'Can read polling report'), ('browse_pollingreport', 'Can browse polling reports')),
},
bases=(libya_elections.phone_numbers.FormattedPhoneNumberMixin, libya_elections.libya_bread.ElectionFormatterMixin, libya_elections.libya_bread.RegistrationCenterFormatterMixin, models.Model),
),
migrations.CreateModel(
name='CenterOpen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False, verbose_name='deleted')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='creation date')),
('modification_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='modification date')),
('phone_number', models.CharField(help_text='Received from this phone', max_length=13, verbose_name='phone number')),
('election', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='voting.Election', verbose_name='election')),
('registration_center', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='register.RegistrationCenter', verbose_name='registration center')),
],
options={
'verbose_name': 'center open record',
'verbose_name_plural': 'center open records',
'ordering': ['election_id', 'registration_center__center_id'],
'permissions': (('read_centeropen', 'Can read center open record'), ('browse_centeropen', 'Can browse center open record')),
},
bases=(libya_elections.phone_numbers.FormattedPhoneNumberMixin, libya_elections.libya_bread.ElectionFormatterMixin, libya_elections.libya_bread.RegistrationCenterFormatterMixin, models.Model),
),
migrations.CreateModel(
name='CenterClosedForElection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False, verbose_name='deleted')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='creation date')),
('modification_date', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='modification date')),
('election', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='voting.Election', verbose_name='election')),
('registration_center', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='register.RegistrationCenter', verbose_name='registration center')),
],
options={
'verbose_name': 'center closed for election',
'verbose_name_plural': 'centers closed for election',
'ordering': ['election_id', 'registration_center__center_id'],
'permissions': (('read_centerclosedforelection', 'Can read center closed for an election'), ('browse_centerclosedforelection', 'Can browse centers closed for an election')),
},
bases=(libya_elections.libya_bread.ElectionFormatterMixin, libya_elections.libya_bread.RegistrationCenterFormatterMixin, models.Model),
),
]
| {
"content_hash": "3c48be351fcf9a405ae0c712ace9057f",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 204,
"avg_line_length": 75.87931034482759,
"alnum_prop": 0.6520109066121336,
"repo_name": "SmartElect/SmartElect",
"id": "9b17a86624286888651dd75e507dd199428596b8",
"size": "8849",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "polling_reports/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "43928"
},
{
"name": "HTML",
"bytes": "175822"
},
{
"name": "JavaScript",
"bytes": "475284"
},
{
"name": "Python",
"bytes": "1848271"
},
{
"name": "Shell",
"bytes": "1834"
}
],
"symlink_target": ""
} |
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "xcal3d",
version = "0.1.0",
author = "Emanuele Ruffaldi",
author_email = "emanuele.ruffaldi@gmail.com",
description = ("A package to load and manipulate Virtual Human CAL3D files directly in Python"),
license = "BSD",
keywords = "cal3d avatar",
url = "https://github.com/eruffaldi/pyxcal3d",
packages=['xcal3d'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
| {
"content_hash": "f8bd7a6d7d9cd07d3f74e5a60551839e",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 101,
"avg_line_length": 34.7037037037037,
"alnum_prop": 0.6307363927427961,
"repo_name": "eruffaldi/pyxcal3d",
"id": "9a556bbb2e260e8fb10c4ba99f59780c08825257",
"size": "937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "28567"
}
],
"symlink_target": ""
} |
from __future__ import nested_scopes
from _pydev_imps._pydev_saved_modules import threading
import os
def set_trace_in_qt():
from _pydevd_bundle.pydevd_comm import get_global_debugger
debugger = get_global_debugger()
if debugger is not None:
threading.current_thread() # Create the dummy thread for qt.
debugger.enable_tracing()
_patched_qt = False
def patch_qt(qt_support_mode):
'''
This method patches qt (PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
'''
if not qt_support_mode:
return
if qt_support_mode is True or qt_support_mode == 'True':
# do not break backward compatibility
qt_support_mode = 'auto'
if qt_support_mode == 'auto':
qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto')
# Avoid patching more than once
global _patched_qt
if _patched_qt:
return
_patched_qt = True
if qt_support_mode == 'auto':
patch_qt_on_import = None
try:
import PySide2 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyside2'
except:
try:
import Pyside # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyside'
except:
try:
import PyQt5 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyqt5'
except:
try:
import PyQt4 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyqt4'
except:
return
if qt_support_mode == 'pyside2':
try:
import PySide2.QtCore # @UnresolvedImport
_internal_patch_qt(PySide2.QtCore, qt_support_mode)
except:
return
elif qt_support_mode == 'pyside':
try:
import PySide.QtCore # @UnresolvedImport
_internal_patch_qt(PySide.QtCore, qt_support_mode)
except:
return
elif qt_support_mode == 'pyqt5':
try:
import PyQt5.QtCore # @UnresolvedImport
_internal_patch_qt(PyQt5.QtCore)
except:
return
elif qt_support_mode == 'pyqt4':
# Ok, we have an issue here:
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
# that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue
# as they only implements v2).
patch_qt_on_import = 'PyQt4'
def get_qt_core_module():
import PyQt4.QtCore # @UnresolvedImport
return PyQt4.QtCore
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module)
else:
raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,))
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module):
# I don't like this approach very much as we have to patch __import__, but I like even less
# asking the user to configure something in the client side...
# So, our approach is to patch PyQt4 right before the user tries to import it (at which
# point he should've set the sip api version properly already anyways).
dotted = patch_qt_on_import + '.'
original_import = __import__
from _pydev_imps._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
patch_sys_module()
patch_reload()
def patched_import(name, *args, **kwargs):
if patch_qt_on_import == name or name.startswith(dotted):
builtins.__import__ = original_import
cancel_patches_in_sys_module()
_internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module
return original_import(name, *args, **kwargs)
import sys
if sys.version_info[0] >= 3:
import builtins # Py3
else:
import __builtin__ as builtins
builtins.__import__ = patched_import
def _internal_patch_qt(QtCore, qt_support_mode='auto'):
_original_thread_init = QtCore.QThread.__init__
_original_runnable_init = QtCore.QRunnable.__init__
_original_QThread = QtCore.QThread
class FuncWrapper:
def __init__(self, original):
self._original = original
def __call__(self, *args, **kwargs):
set_trace_in_qt()
return self._original(*args, **kwargs)
class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal
try:
_signal = QtCore.Signal() # @UndefinedVariable
except:
_signal = QtCore.pyqtSignal() # @UndefinedVariable
def __init__(self, thread, original_started):
QtCore.QObject.__init__(self)
self.thread = thread
self.original_started = original_started
if qt_support_mode in ('pyside', 'pyside2'):
self._signal = original_started
else:
self._signal.connect(self._on_call)
self.original_started.connect(self._signal)
def connect(self, func, *args, **kwargs):
if qt_support_mode in ('pyside', 'pyside2'):
return self._signal.connect(FuncWrapper(func), *args, **kwargs)
else:
return self._signal.connect(func, *args, **kwargs)
def disconnect(self, *args, **kwargs):
return self._signal.disconnect(*args, **kwargs)
def emit(self, *args, **kwargs):
return self._signal.emit(*args, **kwargs)
def _on_call(self, *args, **kwargs):
set_trace_in_qt()
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
def __init__(self, *args, **kwargs):
_original_thread_init(self, *args, **kwargs)
# In PyQt5 the program hangs when we try to call original run method of QThread class.
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
if self.__class__.run == _original_QThread.run:
self.run = self._exec_run
else:
self._original_run = self.run
self.run = self._new_run
self._original_started = self.started
self.started = StartedSignalWrapper(self, self.started)
def _exec_run(self):
set_trace_in_qt()
self.exec_()
return None
def _new_run(self):
set_trace_in_qt()
return self._original_run()
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
def __init__(self, *args):
_original_runnable_init(self, *args)
self._original_run = self.run
self.run = self._new_run
def _new_run(self):
set_trace_in_qt()
return self._original_run()
QtCore.QThread = ThreadWrapper
QtCore.QRunnable = RunnableWrapper
| {
"content_hash": "29c525d5af04ed5dc2bfb4353e2cf21e",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 109,
"avg_line_length": 34.46153846153846,
"alnum_prop": 0.5862165178571429,
"repo_name": "zdary/intellij-community",
"id": "f9e7e36aff7d59e3396a4237400597745a2e4c7b",
"size": "7168",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "python/helpers/pydev/_pydev_bundle/pydev_monkey_qt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand, CommandError
from annotations.models import Corpus, Document, Language
from annotations.exports import export_annotations_inline
from core.utils import CSV, XLSX
class Command(BaseCommand):
help = 'Exports Annotations for the given Corpus and Languages (one file for all languages)'
def add_arguments(self, parser):
parser.add_argument('corpus', type=str)
parser.add_argument('source_language', type=str)
parser.add_argument('languages', nargs='+', type=str)
parser.add_argument('--document')
parser.add_argument('--formal_structure', dest='formal_structure')
parser.add_argument('--include_non_targets', action='store_true', dest='include_non_targets', default=False)
parser.add_argument('--xlsx', action='store_true', dest='format_xlsx', default=False)
def handle(self, *args, **options):
# Retrieve the Corpus from the database
try:
corpus = Corpus.objects.get(title=options['corpus'])
except Corpus.DoesNotExist:
raise CommandError('Corpus with title {} does not exist'.format(options['corpus']))
try:
source_language = Language.objects.get(iso=options['source_language'])
except Language.DoesNotExist:
raise CommandError('Language with iso {} does not exist'.format(options['source_language']))
for language in options['languages']:
if not corpus.languages.filter(iso=language):
raise CommandError('Language {} does not exist for Corpus {}'.format(language, corpus))
languages = Language.objects.filter(iso__in=options['languages']).order_by('iso')
document = None
if options['document']:
try:
document = Document.objects.get(corpus=corpus, title=options['document'])
except Document.DoesNotExist:
raise CommandError('Document with title {} does not exist'.format(options['document']))
format_ = XLSX if options['format_xlsx'] else CSV
filename = '{corpus}_{iso}.{ext}'.format(corpus=corpus.title, iso=source_language.iso, ext=format_)
export_annotations_inline(filename, format_, corpus, source_language, languages,
document=document,
include_non_targets=options['include_non_targets'],
formal_structure=options['formal_structure'])
| {
"content_hash": "28d355065f4a980211ce2765e9ea1c7c",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 116,
"avg_line_length": 50,
"alnum_prop": 0.6464,
"repo_name": "UUDigitalHumanitieslab/timealign",
"id": "413fb4ebcc932a3060098cf2d5fd8f617358b14e",
"size": "2525",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "annotations/management/commands/export_annotations_inline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4412"
},
{
"name": "HTML",
"bytes": "156742"
},
{
"name": "JavaScript",
"bytes": "17478"
},
{
"name": "Python",
"bytes": "387902"
},
{
"name": "XSLT",
"bytes": "2171"
}
],
"symlink_target": ""
} |
import unittest
import reactivex
from reactivex import operators as ops
from reactivex.testing import ReactiveTest, TestScheduler
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestOnErrorResumeNext(unittest.TestCase):
def test_on_error_resume_next_no_errors(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_next(220, 3), on_completed(230)]
msgs2 = [on_next(240, 4), on_completed(250)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
def create():
return o1.pipe(ops.on_error_resume_next(o2))
results = scheduler.start(create)
assert results.messages == [
on_next(210, 2),
on_next(220, 3),
on_next(240, 4),
on_completed(250),
]
def test_on_error_resume_next_error(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_next(220, 3), on_error(230, "ex")]
msgs2 = [on_next(240, 4), on_completed(250)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
def create():
return o1.pipe(ops.on_error_resume_next(o2))
results = scheduler.start(create)
assert results.messages == [
on_next(210, 2),
on_next(220, 3),
on_next(240, 4),
on_completed(250),
]
def test_on_error_resume_next_error_multiple(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_error(220, "ex")]
msgs2 = [on_next(230, 4), on_error(240, "ex")]
msgs3 = [on_completed(250)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
o3 = scheduler.create_hot_observable(msgs3)
def create():
return reactivex.on_error_resume_next(o1, o2, o3)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_next(230, 4), on_completed(250)]
def test_on_error_resume_next_empty_return_throw_and_more(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(205)]
msgs2 = [on_next(215, 2), on_completed(220)]
msgs3 = [on_next(225, 3), on_next(230, 4), on_completed(235)]
msgs4 = [on_error(240, "ex")]
msgs5 = [on_next(245, 5), on_completed(250)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
o3 = scheduler.create_hot_observable(msgs3)
o4 = scheduler.create_hot_observable(msgs4)
o5 = scheduler.create_hot_observable(msgs5)
def create():
return reactivex.on_error_resume_next(o1, o2, o3, o4, o5)
results = scheduler.start(create)
assert results.messages == [
on_next(215, 2),
on_next(225, 3),
on_next(230, 4),
on_next(245, 5),
on_completed(250),
]
def test_on_error_resume_next_empty_return_throw_and_more_ii(self):
ex = "ex"
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(220)]
msgs2 = [on_error(230, ex)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
def create():
return o1.pipe(ops.on_error_resume_next(o2))
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_completed(230)]
def test_on_error_resume_next_single_source_throws(self):
ex = "ex"
scheduler = TestScheduler()
msgs1 = [on_error(230, ex)]
o1 = scheduler.create_hot_observable(msgs1)
def create():
return reactivex.on_error_resume_next(o1)
results = scheduler.start(create)
assert results.messages == [on_completed(230)]
def test_on_error_resume_next_end_with_never(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(220)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = reactivex.never()
def create():
return reactivex.on_error_resume_next(o1, o2)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2)]
def test_on_error_resume_next_start_with_never(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(220)]
o1 = reactivex.never()
o2 = scheduler.create_hot_observable(msgs1)
def create():
return reactivex.on_error_resume_next(o1, o2)
results = scheduler.start(create)
assert results.messages == []
def test_on_error_resume_next_start_with_factory(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_next(220, 3), on_error(230, "ex")]
o1 = scheduler.create_hot_observable(msgs1)
def factory(ex: Exception):
assert str(ex) == "ex"
msgs2 = [on_next(240, 4), on_completed(250)]
o2 = scheduler.create_hot_observable(msgs2)
return o2
def create():
return reactivex.on_error_resume_next(o1, factory)
results = scheduler.start(create)
assert results.messages == [
on_next(210, 2),
on_next(220, 3),
on_next(240, 4),
on_completed(250),
]
| {
"content_hash": "b8db4997896e8d0a0052aa4d786b175b",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 88,
"avg_line_length": 32.134078212290504,
"alnum_prop": 0.5916203059805285,
"repo_name": "ReactiveX/RxPY",
"id": "d672e9e02fca0fc3013710829457574388211dc5",
"size": "5752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_observable/test_onerrorresumenext.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1503"
},
{
"name": "Jupyter Notebook",
"bytes": "347338"
},
{
"name": "Python",
"bytes": "1726895"
}
],
"symlink_target": ""
} |
import mox
import shutil
import sys
import tempfile
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.network import linux_net
from nova.network import manager as network_manager
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
import nova.policy
from nova import quota
from nova import test
from nova.tests import fake_network
from nova import utils
LOG = logging.getLogger(__name__)
HOST = "testhost"
FAKEUUID = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
networks = [{'id': 0,
'uuid': FAKEUUID,
'label': 'test0',
'injected': False,
'multi_host': False,
'cidr': '192.168.0.0/24',
'cidr_v6': '2001:db8::/64',
'gateway_v6': '2001:db8::1',
'netmask_v6': '64',
'netmask': '255.255.255.0',
'bridge': 'fa0',
'bridge_interface': 'fake_fa0',
'gateway': '192.168.0.1',
'broadcast': '192.168.0.255',
'dns1': '192.168.0.1',
'dns2': '192.168.0.2',
'vlan': None,
'host': HOST,
'project_id': 'fake_project',
'vpn_public_address': '192.168.0.2'},
{'id': 1,
'uuid': 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'label': 'test1',
'injected': False,
'multi_host': False,
'cidr': '192.168.1.0/24',
'cidr_v6': '2001:db9::/64',
'gateway_v6': '2001:db9::1',
'netmask_v6': '64',
'netmask': '255.255.255.0',
'bridge': 'fa1',
'bridge_interface': 'fake_fa1',
'gateway': '192.168.1.1',
'broadcast': '192.168.1.255',
'dns1': '192.168.0.1',
'dns2': '192.168.0.2',
'vlan': None,
'host': HOST,
'project_id': 'fake_project',
'vpn_public_address': '192.168.1.2'}]
fixed_ips = [{'id': 0,
'network_id': FAKEUUID,
'address': '192.168.0.100',
'instance_uuid': 0,
'allocated': False,
'virtual_interface_id': 0,
'floating_ips': []},
{'id': 0,
'network_id': 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'address': '192.168.1.100',
'instance_uuid': 0,
'allocated': False,
'virtual_interface_id': 0,
'floating_ips': []}]
flavor = {'id': 0,
'rxtx_cap': 3}
floating_ip_fields = {'id': 0,
'address': '192.168.10.100',
'pool': 'nova',
'interface': 'eth0',
'fixed_ip_id': 0,
'project_id': None,
'auto_assigned': False}
vifs = [{'id': 0,
'address': 'DE:AD:BE:EF:00:00',
'uuid': '00000000-0000-0000-0000-0000000000000000',
'network_id': 0,
'instance_uuid': 0},
{'id': 1,
'address': 'DE:AD:BE:EF:00:01',
'uuid': '00000000-0000-0000-0000-0000000000000001',
'network_id': 1,
'instance_uuid': 0},
{'id': 2,
'address': 'DE:AD:BE:EF:00:02',
'uuid': '00000000-0000-0000-0000-0000000000000002',
'network_id': 2,
'instance_uuid': 0}]
class FlatNetworkTestCase(test.TestCase):
def setUp(self):
super(FlatNetworkTestCase, self).setUp()
self.tempdir = tempfile.mkdtemp()
self.flags(logdir=self.tempdir)
self.network = network_manager.FlatManager(host=HOST)
temp = importutils.import_object('nova.network.minidns.MiniDNS')
self.network.instance_dns_manager = temp
self.network.instance_dns_domain = ''
self.network.db = db
self.context = context.RequestContext('testuser', 'testproject',
is_admin=False)
def tearDown(self):
shutil.rmtree(self.tempdir)
super(FlatNetworkTestCase, self).tearDown()
def test_get_instance_nw_info(self):
fake_get_instance_nw_info = fake_network.fake_get_instance_nw_info
nw_info = fake_get_instance_nw_info(self.stubs, 0, 2)
self.assertFalse(nw_info)
nw_info = fake_get_instance_nw_info(self.stubs, 1, 2)
for i, (nw, info) in enumerate(nw_info):
nid = i + 1
check = {'bridge': 'fake_br%d' % nid,
'cidr': '192.168.%s.0/24' % nid,
'cidr_v6': '2001:db8:0:%x::/64' % nid,
'id': '00000000-0000-0000-0000-00000000000000%02d' % nid,
'multi_host': False,
'injected': False,
'bridge_interface': None,
'vlan': None}
self.assertDictMatch(nw, check)
check = {'broadcast': '192.168.%d.255' % nid,
'dhcp_server': '192.168.1.1',
'dns': ['192.168.%d.3' % nid, '192.168.%d.4' % nid],
'gateway': '192.168.%d.1' % nid,
'gateway_v6': 'fe80::def',
'ip6s': 'DONTCARE',
'ips': 'DONTCARE',
'label': 'test%d' % nid,
'mac': 'DE:AD:BE:EF:00:%02x' % nid,
'rxtx_cap': 30,
'vif_uuid':
'00000000-0000-0000-0000-00000000000000%02d' % nid,
'should_create_vlan': False,
'should_create_bridge': False}
self.assertDictMatch(info, check)
check = [{'enabled': 'DONTCARE',
'ip': '2001:db8:0:1::%x' % nid,
'netmask': 64,
'gateway': 'fe80::def'}]
self.assertDictListMatch(info['ip6s'], check)
num_fixed_ips = len(info['ips'])
check = [{'enabled': 'DONTCARE',
'ip': '192.168.%d.%03d' % (nid, ip_num + 99),
'netmask': '255.255.255.0',
'gateway': '192.168.%d.1' % nid}
for ip_num in xrange(1, num_fixed_ips + 1)]
self.assertDictListMatch(info['ips'], check)
def test_validate_networks(self):
self.mox.StubOutWithMock(db, 'network_get')
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
self.mox.StubOutWithMock(db, 'fixed_ip_get_by_address')
requested_networks = [('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'192.168.1.100')]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
db.network_get(mox.IgnoreArg(),
mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks[1])
ip = fixed_ips[1].copy()
ip['instance_uuid'] = None
db.fixed_ip_get_by_address(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(ip)
self.mox.ReplayAll()
self.network.validate_networks(self.context, requested_networks)
def test_validate_reserved(self):
context_admin = context.RequestContext('testuser', 'testproject',
is_admin=True)
nets = self.network.create_networks(context_admin, 'fake',
'192.168.0.0/24', False, 1,
256, None, None, None, None, None)
self.assertEqual(1, len(nets))
network = nets[0]
self.assertEqual(3, db.network_count_reserved_ips(context_admin,
network['id']))
def test_validate_networks_none_requested_networks(self):
self.network.validate_networks(self.context, None)
def test_validate_networks_empty_requested_networks(self):
requested_networks = []
self.mox.ReplayAll()
self.network.validate_networks(self.context, requested_networks)
def test_validate_networks_invalid_fixed_ip(self):
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
requested_networks = [(1, "192.168.0.100.1")]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpInvalid,
self.network.validate_networks, self.context,
requested_networks)
def test_validate_networks_empty_fixed_ip(self):
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
requested_networks = [(1, "")]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpInvalid,
self.network.validate_networks,
self.context, requested_networks)
def test_validate_networks_none_fixed_ip(self):
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
requested_networks = [(1, None)]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
self.network.validate_networks(self.context, requested_networks)
def test_add_fixed_ip_instance_using_id_without_vpn(self):
self.mox.StubOutWithMock(db, 'network_get')
self.mox.StubOutWithMock(db, 'network_update')
self.mox.StubOutWithMock(db, 'fixed_ip_associate_pool')
self.mox.StubOutWithMock(db, 'instance_get')
self.mox.StubOutWithMock(db,
'virtual_interface_get_by_instance_and_network')
self.mox.StubOutWithMock(db, 'fixed_ip_update')
self.mox.StubOutWithMock(quota.QUOTAS, 'reserve')
db.fixed_ip_update(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
db.virtual_interface_get_by_instance_and_network(mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn({'id': 0})
db.instance_get(self.context,
1).AndReturn({'display_name': HOST,
'uuid': 'test-00001'})
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'security_groups':
[{'id': 0}]})
quota.QUOTAS.reserve(mox.IgnoreArg(),
fixed_ips=mox.IgnoreArg()).AndReturn(None)
db.fixed_ip_associate_pool(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn('192.168.0.101')
db.network_get(mox.IgnoreArg(),
mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks[0])
db.network_update(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.network.add_fixed_ip_to_instance(self.context, 1, HOST,
networks[0]['id'])
def test_add_fixed_ip_instance_using_uuid_without_vpn(self):
self.mox.StubOutWithMock(db, 'network_get_by_uuid')
self.mox.StubOutWithMock(db, 'network_update')
self.mox.StubOutWithMock(db, 'fixed_ip_associate_pool')
self.mox.StubOutWithMock(db, 'instance_get')
self.mox.StubOutWithMock(db,
'virtual_interface_get_by_instance_and_network')
self.mox.StubOutWithMock(db, 'fixed_ip_update')
self.mox.StubOutWithMock(quota.QUOTAS, 'reserve')
db.fixed_ip_update(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
db.virtual_interface_get_by_instance_and_network(mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn({'id': 0})
db.instance_get(self.context,
1).AndReturn({'display_name': HOST,
'uuid': 'test-00001'})
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'security_groups':
[{'id': 0}]})
quota.QUOTAS.reserve(mox.IgnoreArg(),
fixed_ips=mox.IgnoreArg()).AndReturn(None)
db.fixed_ip_associate_pool(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn('192.168.0.101')
db.network_get_by_uuid(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(networks[0])
db.network_update(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.network.add_fixed_ip_to_instance(self.context, 1, HOST,
networks[0]['uuid'])
def test_mini_dns_driver(self):
zone1 = "example.org"
zone2 = "example.com"
driver = self.network.instance_dns_manager
driver.create_entry("hostone", "10.0.0.1", "A", zone1)
driver.create_entry("hosttwo", "10.0.0.2", "A", zone1)
driver.create_entry("hostthree", "10.0.0.3", "A", zone1)
driver.create_entry("hostfour", "10.0.0.4", "A", zone1)
driver.create_entry("hostfive", "10.0.0.5", "A", zone2)
driver.delete_entry("hostone", zone1)
driver.modify_address("hostfour", "10.0.0.1", zone1)
driver.modify_address("hostthree", "10.0.0.1", zone1)
names = driver.get_entries_by_address("10.0.0.1", zone1)
self.assertEqual(len(names), 2)
self.assertIn('hostthree', names)
self.assertIn('hostfour', names)
names = driver.get_entries_by_address("10.0.0.5", zone2)
self.assertEqual(len(names), 1)
self.assertIn('hostfive', names)
addresses = driver.get_entries_by_name("hosttwo", zone1)
self.assertEqual(len(addresses), 1)
self.assertIn('10.0.0.2', addresses)
self.assertRaises(exception.InvalidInput,
driver.create_entry,
"hostname",
"10.10.10.10",
"invalidtype",
zone1)
def test_instance_dns(self):
fixedip = '192.168.0.101'
self.mox.StubOutWithMock(db, 'network_get')
self.mox.StubOutWithMock(db, 'network_update')
self.mox.StubOutWithMock(db, 'fixed_ip_associate_pool')
self.mox.StubOutWithMock(db, 'instance_get')
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db,
'virtual_interface_get_by_instance_and_network')
self.mox.StubOutWithMock(db, 'fixed_ip_update')
self.mox.StubOutWithMock(quota.QUOTAS, 'reserve')
db.fixed_ip_update(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
db.virtual_interface_get_by_instance_and_network(mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn({'id': 0})
db.instance_get(self.context,
1).AndReturn({'display_name': HOST,
'uuid': 'test-00001'})
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'security_groups':
[{'id': 0}]})
quota.QUOTAS.reserve(mox.IgnoreArg(),
fixed_ips=mox.IgnoreArg()).AndReturn(None)
db.fixed_ip_associate_pool(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(fixedip)
db.network_get(mox.IgnoreArg(),
mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks[0])
db.network_update(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.network.add_fixed_ip_to_instance(self.context, 1, HOST,
networks[0]['id'])
instance_manager = self.network.instance_dns_manager
addresses = instance_manager.get_entries_by_name(HOST,
self.network.instance_dns_domain)
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], fixedip)
addresses = instance_manager.get_entries_by_name('test-00001',
self.network.instance_dns_domain)
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], fixedip)
class VlanNetworkTestCase(test.TestCase):
def setUp(self):
super(VlanNetworkTestCase, self).setUp()
self.network = network_manager.VlanManager(host=HOST)
self.network.db = db
self.context = context.RequestContext('testuser', 'testproject',
is_admin=False)
def test_vpn_allocate_fixed_ip(self):
self.mox.StubOutWithMock(db, 'instance_get')
self.mox.StubOutWithMock(db, 'fixed_ip_associate')
self.mox.StubOutWithMock(db, 'fixed_ip_update')
self.mox.StubOutWithMock(db,
'virtual_interface_get_by_instance_and_network')
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'uuid': '42'})
db.fixed_ip_associate(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg(),
reserved=True).AndReturn('192.168.0.1')
db.fixed_ip_update(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
db.virtual_interface_get_by_instance_and_network(mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn({'id': 0})
self.mox.ReplayAll()
network = dict(networks[0])
network['vpn_private_address'] = '192.168.0.2'
self.network.allocate_fixed_ip(None, 0, network, vpn=True)
def test_vpn_allocate_fixed_ip_no_network_id(self):
network = dict(networks[0])
network['vpn_private_address'] = '192.168.0.2'
network['id'] = None
instance = db.instance_create(self.context, {})
context_admin = context.RequestContext('testuser', 'testproject',
is_admin=True)
self.assertRaises(exception.FixedIpNotFoundForNetwork,
self.network.allocate_fixed_ip,
context_admin,
instance['id'],
network,
vpn=True)
def test_allocate_fixed_ip(self):
self.mox.StubOutWithMock(db, 'fixed_ip_associate_pool')
self.mox.StubOutWithMock(db, 'fixed_ip_update')
self.mox.StubOutWithMock(db,
'virtual_interface_get_by_instance_and_network')
self.mox.StubOutWithMock(db, 'instance_get')
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'uuid': FAKEUUID})
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'security_groups':
[{'id': 0}]})
db.fixed_ip_associate_pool(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn('192.168.0.1')
db.fixed_ip_update(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
db.virtual_interface_get_by_instance_and_network(mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn({'id': 0})
self.mox.ReplayAll()
network = dict(networks[0])
network['vpn_private_address'] = '192.168.0.2'
self.network.allocate_fixed_ip(self.context, 0, network)
def test_create_networks_too_big(self):
self.assertRaises(ValueError, self.network.create_networks, None,
num_networks=4094, vlan_start=1)
def test_create_networks_too_many(self):
self.assertRaises(ValueError, self.network.create_networks, None,
num_networks=100, vlan_start=1,
cidr='192.168.0.1/24', network_size=100)
def test_validate_networks(self):
def network_get(_context, network_id, project_only='allow_none'):
return networks[network_id]
self.stubs.Set(db, 'network_get', network_get)
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
self.mox.StubOutWithMock(db, "fixed_ip_get_by_address")
requested_networks = [("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb",
"192.168.1.100")]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
fixed_ips[1]['network_id'] = networks[1]['id']
fixed_ips[1]['instance_uuid'] = None
db.fixed_ip_get_by_address(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(fixed_ips[1])
self.mox.ReplayAll()
self.network.validate_networks(self.context, requested_networks)
def test_validate_networks_none_requested_networks(self):
self.network.validate_networks(self.context, None)
def test_validate_networks_empty_requested_networks(self):
requested_networks = []
self.mox.ReplayAll()
self.network.validate_networks(self.context, requested_networks)
def test_validate_networks_invalid_fixed_ip(self):
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
requested_networks = [(1, "192.168.0.100.1")]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpInvalid,
self.network.validate_networks, self.context,
requested_networks)
def test_validate_networks_empty_fixed_ip(self):
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
requested_networks = [(1, "")]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpInvalid,
self.network.validate_networks,
self.context, requested_networks)
def test_validate_networks_none_fixed_ip(self):
self.mox.StubOutWithMock(db, 'network_get_all_by_uuids')
requested_networks = [(1, None)]
db.network_get_all_by_uuids(mox.IgnoreArg(), mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
self.network.validate_networks(self.context, requested_networks)
def test_floating_ip_owned_by_project(self):
ctxt = context.RequestContext('testuser', 'testproject',
is_admin=False)
# raises because floating_ip project_id is None
floating_ip = {'address': '10.0.0.1',
'project_id': None}
self.assertRaises(exception.NotAuthorized,
self.network._floating_ip_owned_by_project,
ctxt,
floating_ip)
# raises because floating_ip project_id is not equal to ctxt project_id
floating_ip = {'address': '10.0.0.1',
'project_id': ctxt.project_id + '1'}
self.assertRaises(exception.NotAuthorized,
self.network._floating_ip_owned_by_project,
ctxt,
floating_ip)
# does not raise (floating ip is owned by ctxt project)
floating_ip = {'address': '10.0.0.1',
'project_id': ctxt.project_id}
self.network._floating_ip_owned_by_project(ctxt, floating_ip)
ctxt = context.RequestContext(None, None,
is_admin=True)
# does not raise (ctxt is admin)
floating_ip = {'address': '10.0.0.1',
'project_id': None}
self.network._floating_ip_owned_by_project(ctxt, floating_ip)
# does not raise (ctxt is admin)
floating_ip = {'address': '10.0.0.1',
'project_id': 'testproject'}
self.network._floating_ip_owned_by_project(ctxt, floating_ip)
def test_allocate_floating_ip(self):
ctxt = context.RequestContext('testuser', 'testproject',
is_admin=False)
def fake_allocate_address(*args, **kwargs):
return {'address': '10.0.0.1', 'project_id': ctxt.project_id}
self.stubs.Set(self.network.db, 'floating_ip_allocate_address',
fake_allocate_address)
self.network.allocate_floating_ip(ctxt, ctxt.project_id)
def test_deallocate_floating_ip(self):
ctxt = context.RequestContext('testuser', 'testproject',
is_admin=False)
def fake1(*args, **kwargs):
pass
def fake2(*args, **kwargs):
return {'address': '10.0.0.1', 'fixed_ip_id': 1}
def fake3(*args, **kwargs):
return {'address': '10.0.0.1', 'fixed_ip_id': None,
'project_id': ctxt.project_id}
self.stubs.Set(self.network.db, 'floating_ip_deallocate', fake1)
self.stubs.Set(self.network, '_floating_ip_owned_by_project', fake1)
# this time should raise because floating ip is associated to fixed_ip
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake2)
self.assertRaises(exception.FloatingIpAssociated,
self.network.deallocate_floating_ip,
ctxt,
mox.IgnoreArg())
# this time should not raise
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake3)
self.network.deallocate_floating_ip(ctxt, ctxt.project_id)
def test_associate_floating_ip(self):
ctxt = context.RequestContext('testuser', 'testproject',
is_admin=False)
def fake1(*args, **kwargs):
return '10.0.0.1'
# floating ip that's already associated
def fake2(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'fixed_ip_id': 1}
# floating ip that isn't associated
def fake3(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'fixed_ip_id': None}
# fixed ip with remote host
def fake4(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'network_id': 'blah'}
def fake4_network(*args, **kwargs):
return {'multi_host': False, 'host': 'jibberjabber'}
# fixed ip with local host
def fake5(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'network_id': 'blahblah'}
def fake5_network(*args, **kwargs):
return {'multi_host': False, 'host': 'testhost'}
def fake6(*args, **kwargs):
self.local = False
def fake7(*args, **kwargs):
self.local = True
def fake8(*args, **kwargs):
raise exception.ProcessExecutionError('',
'Cannot find device "em0"\n')
def fake9(*args, **kwargs):
raise test.TestingException()
# raises because interface doesn't exist
self.stubs.Set(self.network.db,
'floating_ip_fixed_ip_associate',
fake1)
self.stubs.Set(self.network.db, 'floating_ip_disassociate', fake1)
self.stubs.Set(self.network.driver, 'bind_floating_ip', fake8)
self.assertRaises(exception.NoFloatingIpInterface,
self.network._associate_floating_ip,
ctxt,
mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(self.network, '_floating_ip_owned_by_project', fake1)
# raises because floating_ip is already associated to a fixed_ip
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake2)
self.stubs.Set(self.network, 'disassociate_floating_ip', fake9)
def fake_fixed_ip_get(context, fixed_ip_id):
return {'instance_uuid': 'fake_uuid'}
self.stubs.Set(self.network.db, 'fixed_ip_get', fake_fixed_ip_get)
self.assertRaises(test.TestingException,
self.network.associate_floating_ip,
ctxt,
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake3)
# does not raise and makes call remotely
self.local = True
self.stubs.Set(self.network.db, 'fixed_ip_get_by_address', fake4)
self.stubs.Set(self.network.db, 'network_get', fake4_network)
self.stubs.Set(rpc, 'call', fake6)
self.network.associate_floating_ip(ctxt, mox.IgnoreArg(),
mox.IgnoreArg())
self.assertFalse(self.local)
# does not raise and makes call locally
self.local = False
self.stubs.Set(self.network.db, 'fixed_ip_get_by_address', fake5)
self.stubs.Set(self.network.db, 'network_get', fake5_network)
self.stubs.Set(self.network, '_associate_floating_ip', fake7)
self.network.associate_floating_ip(ctxt, mox.IgnoreArg(),
mox.IgnoreArg())
self.assertTrue(self.local)
def test_floating_ip_init_host(self):
def get_all_by_host(_context, _host):
return [{'interface': 'foo',
'address': 'foo'},
{'interface': 'fakeiface',
'address': 'fakefloat',
'fixed_ip_id': 1},
{'interface': 'bar',
'address': 'bar',
'fixed_ip_id': 2}]
self.stubs.Set(self.network.db, 'floating_ip_get_all_by_host',
get_all_by_host)
def fixed_ip_get(_context, fixed_ip_id):
if fixed_ip_id == 1:
return {'address': 'fakefixed'}
raise exception.FixedIpNotFound()
self.stubs.Set(self.network.db, 'fixed_ip_get', fixed_ip_get)
self.mox.StubOutWithMock(self.network.l3driver, 'add_floating_ip')
self.flags(public_interface=False)
self.network.l3driver.add_floating_ip('fakefloat',
'fakefixed',
'fakeiface')
self.mox.ReplayAll()
self.network.init_host_floating_ips()
self.mox.UnsetStubs()
self.mox.VerifyAll()
self.mox.StubOutWithMock(self.network.l3driver, 'add_floating_ip')
self.flags(public_interface='fooiface')
self.network.l3driver.add_floating_ip('fakefloat',
'fakefixed',
'fooiface')
self.mox.ReplayAll()
self.network.init_host_floating_ips()
self.mox.UnsetStubs()
self.mox.VerifyAll()
def test_disassociate_floating_ip(self):
ctxt = context.RequestContext('testuser', 'testproject',
is_admin=False)
def fake1(*args, **kwargs):
pass
# floating ip that isn't associated
def fake2(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'fixed_ip_id': None}
# floating ip that is associated
def fake3(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'fixed_ip_id': 1,
'project_id': ctxt.project_id}
# fixed ip with remote host
def fake4(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'network_id': 'blah'}
def fake4_network(*args, **kwargs):
return {'multi_host': False,
'host': 'jibberjabber'}
# fixed ip with local host
def fake5(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'network_id': 'blahblah'}
def fake5_network(*args, **kwargs):
return {'multi_host': False, 'host': 'testhost'}
def fake6(*args, **kwargs):
self.local = False
def fake7(*args, **kwargs):
self.local = True
def fake8(*args, **kwargs):
return {'address': '10.0.0.1',
'pool': 'nova',
'interface': 'eth0',
'fixed_ip_id': 1,
'auto_assigned': True,
'project_id': ctxt.project_id}
self.stubs.Set(self.network, '_floating_ip_owned_by_project', fake1)
# raises because floating_ip is not associated to a fixed_ip
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake2)
self.assertRaises(exception.FloatingIpNotAssociated,
self.network.disassociate_floating_ip,
ctxt,
mox.IgnoreArg())
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake3)
# does not raise and makes call remotely
self.local = True
self.stubs.Set(self.network.db, 'fixed_ip_get', fake4)
self.stubs.Set(self.network.db, 'network_get', fake4_network)
self.stubs.Set(rpc, 'call', fake6)
self.network.disassociate_floating_ip(ctxt, mox.IgnoreArg())
self.assertFalse(self.local)
# does not raise and makes call locally
self.local = False
self.stubs.Set(self.network.db, 'fixed_ip_get', fake5)
self.stubs.Set(self.network.db, 'network_get', fake5_network)
self.stubs.Set(self.network, '_disassociate_floating_ip', fake7)
self.network.disassociate_floating_ip(ctxt, mox.IgnoreArg())
self.assertTrue(self.local)
# raises because auto_assigned floating IP cannot be disassociated
self.stubs.Set(self.network.db, 'floating_ip_get_by_address', fake8)
self.assertRaises(exception.CannotDisassociateAutoAssignedFloatingIP,
self.network.disassociate_floating_ip,
ctxt,
mox.IgnoreArg())
def test_add_fixed_ip_instance_without_vpn_requested_networks(self):
self.mox.StubOutWithMock(db, 'network_get')
self.mox.StubOutWithMock(db, 'fixed_ip_associate_pool')
self.mox.StubOutWithMock(db, 'instance_get')
self.mox.StubOutWithMock(db,
'virtual_interface_get_by_instance_and_network')
self.mox.StubOutWithMock(db, 'fixed_ip_update')
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'uuid': FAKEUUID})
db.fixed_ip_update(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
db.virtual_interface_get_by_instance_and_network(mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn({'id': 0})
db.instance_get(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn({'security_groups':
[{'id': 0}],
'availability_zone': '',
'uuid': FAKEUUID})
db.fixed_ip_associate_pool(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn('192.168.0.101')
db.network_get(mox.IgnoreArg(),
mox.IgnoreArg(),
project_only=mox.IgnoreArg()).AndReturn(networks[0])
self.mox.ReplayAll()
self.network.add_fixed_ip_to_instance(self.context, 1, HOST,
networks[0]['id'])
def test_ip_association_and_allocation_of_other_project(self):
"""Makes sure that we cannot deallocaate or disassociate
a public ip of other project"""
def network_get(_context, network_id, project_only="allow_none"):
return networks[network_id]
self.stubs.Set(db, 'network_get', network_get)
context1 = context.RequestContext('user', 'project1')
context2 = context.RequestContext('user', 'project2')
address = '1.2.3.4'
float_addr = db.floating_ip_create(context1.elevated(),
{'address': address,
'project_id': context1.project_id})
instance = db.instance_create(context1,
{'project_id': 'project1'})
fix_addr = db.fixed_ip_associate_pool(context1.elevated(),
1, instance['uuid'])
# Associate the IP with non-admin user context
self.assertRaises(exception.NotAuthorized,
self.network.associate_floating_ip,
context2,
float_addr,
fix_addr)
# Deallocate address from other project
self.assertRaises(exception.NotAuthorized,
self.network.deallocate_floating_ip,
context2,
float_addr)
# Now Associates the address to the actual project
self.network.associate_floating_ip(context1, float_addr, fix_addr)
# Now try dis-associating from other project
self.assertRaises(exception.NotAuthorized,
self.network.disassociate_floating_ip,
context2,
float_addr)
# Clean up the ip addresses
self.network.disassociate_floating_ip(context1, float_addr)
self.network.deallocate_floating_ip(context1, float_addr)
self.network.deallocate_fixed_ip(context1, fix_addr, 'fake')
db.floating_ip_destroy(context1.elevated(), float_addr)
db.fixed_ip_disassociate(context1.elevated(), fix_addr)
def test_deallocate_fixed(self):
"""Verify that release is called properly.
Ensures https://bugs.launchpad.net/nova/+bug/973442 doesn't return"""
def network_get(_context, network_id, project_only="allow_none"):
return networks[network_id]
self.stubs.Set(db, 'network_get', network_get)
def vif_get(_context, _vif_id):
return {'address': 'fake_mac'}
self.stubs.Set(db, 'virtual_interface_get', vif_get)
context1 = context.RequestContext('user', 'project1')
instance = db.instance_create(context1,
{'project_id': 'project1'})
elevated = context1.elevated()
fix_addr = db.fixed_ip_associate_pool(elevated, 1, instance['uuid'])
values = {'allocated': True,
'virtual_interface_id': 3}
db.fixed_ip_update(elevated, fix_addr, values)
fixed = db.fixed_ip_get_by_address(elevated, fix_addr)
network = db.network_get(elevated, fixed['network_id'])
self.flags(force_dhcp_release=True)
self.mox.StubOutWithMock(linux_net, 'release_dhcp')
linux_net.release_dhcp(network['bridge'], fixed['address'], 'fake_mac')
self.mox.ReplayAll()
self.network.deallocate_fixed_ip(context1, fix_addr, 'fake')
fixed = db.fixed_ip_get_by_address(elevated, fix_addr)
self.assertFalse(fixed['allocated'])
def test_deallocate_fixed_deleted(self):
"""Verify doesn't deallocate deleted fixed_ip from deleted network"""
def network_get(_context, network_id, project_only="allow_none"):
return networks[network_id]
def teardown_network_on_host(_context, network):
if network['id'] == 0:
raise test.TestingException()
self.stubs.Set(db, 'network_get', network_get)
self.stubs.Set(self.network, '_teardown_network_on_host',
teardown_network_on_host)
context1 = context.RequestContext('user', 'project1')
instance = db.instance_create(context1,
{'project_id': 'project1'})
elevated = context1.elevated()
fix_addr = db.fixed_ip_associate_pool(elevated, 1, instance['uuid'])
db.fixed_ip_update(elevated, fix_addr, {'deleted': 1})
elevated.read_deleted = 'yes'
delfixed = db.fixed_ip_get_by_address(elevated, fix_addr)
values = {'address': fix_addr,
'network_id': 0,
'instance_uuid': delfixed['instance_uuid']}
db.fixed_ip_create(elevated, values)
elevated.read_deleted = 'no'
newfixed = db.fixed_ip_get_by_address(elevated, fix_addr)
elevated.read_deleted = 'yes'
deallocate = self.network.deallocate_fixed_ip
self.assertRaises(test.TestingException, deallocate, context1,
fix_addr, 'fake')
def test_deallocate_fixed_no_vif(self):
"""Verify that deallocate doesn't raise when no vif is returned.
Ensures https://bugs.launchpad.net/nova/+bug/968457 doesn't return"""
def network_get(_context, network_id, project_only="allow_none"):
return networks[network_id]
self.stubs.Set(db, 'network_get', network_get)
def vif_get(_context, _vif_id):
return None
self.stubs.Set(db, 'virtual_interface_get', vif_get)
context1 = context.RequestContext('user', 'project1')
instance = db.instance_create(context1,
{'project_id': 'project1'})
elevated = context1.elevated()
fix_addr = db.fixed_ip_associate_pool(elevated, 1, instance['uuid'])
values = {'allocated': True,
'virtual_interface_id': 3}
db.fixed_ip_update(elevated, fix_addr, values)
self.flags(force_dhcp_release=True)
self.network.deallocate_fixed_ip(context1, fix_addr, 'fake')
def test_fixed_ip_cleanup_fail(self):
"""Verify IP is not deallocated if the security group refresh fails."""
def network_get(_context, network_id, project_only="allow_none"):
return networks[network_id]
self.stubs.Set(db, 'network_get', network_get)
context1 = context.RequestContext('user', 'project1')
instance = db.instance_create(context1,
{'project_id': 'project1'})
elevated = context1.elevated()
fix_addr = db.fixed_ip_associate_pool(elevated, 1, instance['uuid'])
values = {'allocated': True,
'virtual_interface_id': 3}
db.fixed_ip_update(elevated, fix_addr, values)
fixed = db.fixed_ip_get_by_address(elevated, fix_addr)
network = db.network_get(elevated, fixed['network_id'])
def fake_refresh(instance_uuid):
raise test.TestingException()
self.stubs.Set(self.network,
'_do_trigger_security_group_members_refresh_for_instance',
fake_refresh)
self.assertRaises(test.TestingException,
self.network.deallocate_fixed_ip,
context1, fix_addr, 'fake')
fixed = db.fixed_ip_get_by_address(elevated, fix_addr)
self.assertTrue(fixed['allocated'])
class CommonNetworkTestCase(test.TestCase):
def setUp(self):
super(CommonNetworkTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
def fake_create_fixed_ips(self, context, network_id, fixed_cidr=None):
return None
def test_deallocate_for_instance_passes_host_info(self):
manager = fake_network.FakeNetworkManager()
db = manager.db
db.instance_get = lambda _x, _y: dict(uuid='ignoreduuid')
db.virtual_interface_delete_by_instance = lambda _x, _y: None
ctx = context.RequestContext('igonre', 'igonre')
db.fixed_ip_get_by_instance = lambda x, y: [dict(address='1.2.3.4')]
manager.deallocate_for_instance(
ctx, instance_id='ignore', host='somehost')
self.assertEquals([
(ctx, '1.2.3.4', 'somehost')
], manager.deallocate_fixed_ip_calls)
def test_remove_fixed_ip_from_instance(self):
manager = fake_network.FakeNetworkManager()
manager.remove_fixed_ip_from_instance(self.context, 99, HOST,
'10.0.0.1')
self.assertEquals(manager.deallocate_called, '10.0.0.1')
def test_remove_fixed_ip_from_instance_bad_input(self):
manager = fake_network.FakeNetworkManager()
self.assertRaises(exception.FixedIpNotFoundForSpecificInstance,
manager.remove_fixed_ip_from_instance,
self.context, 99, HOST, 'bad input')
def test_validate_cidrs(self):
manager = fake_network.FakeNetworkManager()
nets = manager.create_networks(None, 'fake', '192.168.0.0/24',
False, 1, 256, None, None, None,
None, None)
self.assertEqual(1, len(nets))
cidrs = [str(net['cidr']) for net in nets]
self.assertTrue('192.168.0.0/24' in cidrs)
def test_validate_cidrs_split_exact_in_half(self):
manager = fake_network.FakeNetworkManager()
nets = manager.create_networks(None, 'fake', '192.168.0.0/24',
False, 2, 128, None, None, None,
None, None)
self.assertEqual(2, len(nets))
cidrs = [str(net['cidr']) for net in nets]
self.assertTrue('192.168.0.0/25' in cidrs)
self.assertTrue('192.168.0.128/25' in cidrs)
def test_validate_cidrs_split_cidr_in_use_middle_of_range(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
manager.db.network_get_all(ctxt).AndReturn([{'id': 1,
'cidr': '192.168.2.0/24'}])
self.mox.ReplayAll()
nets = manager.create_networks(None, 'fake', '192.168.0.0/16',
False, 4, 256, None, None, None,
None, None)
self.assertEqual(4, len(nets))
cidrs = [str(net['cidr']) for net in nets]
exp_cidrs = ['192.168.0.0/24', '192.168.1.0/24', '192.168.3.0/24',
'192.168.4.0/24']
for exp_cidr in exp_cidrs:
self.assertTrue(exp_cidr in cidrs)
self.assertFalse('192.168.2.0/24' in cidrs)
def test_validate_cidrs_smaller_subnet_in_use(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
manager.db.network_get_all(ctxt).AndReturn([{'id': 1,
'cidr': '192.168.2.9/25'}])
self.mox.ReplayAll()
# ValueError: requested cidr (192.168.2.0/24) conflicts with
# existing smaller cidr
args = (None, 'fake', '192.168.2.0/24', False, 1, 256, None, None,
None, None, None)
self.assertRaises(ValueError, manager.create_networks, *args)
def test_validate_cidrs_split_smaller_cidr_in_use(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
manager.db.network_get_all(ctxt).AndReturn([{'id': 1,
'cidr': '192.168.2.0/25'}])
self.mox.ReplayAll()
nets = manager.create_networks(None, 'fake', '192.168.0.0/16',
False, 4, 256, None, None, None, None,
None)
self.assertEqual(4, len(nets))
cidrs = [str(net['cidr']) for net in nets]
exp_cidrs = ['192.168.0.0/24', '192.168.1.0/24', '192.168.3.0/24',
'192.168.4.0/24']
for exp_cidr in exp_cidrs:
self.assertTrue(exp_cidr in cidrs)
self.assertFalse('192.168.2.0/24' in cidrs)
def test_validate_cidrs_split_smaller_cidr_in_use2(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
manager.db.network_get_all(ctxt).AndReturn([{'id': 1,
'cidr': '192.168.2.9/29'}])
self.mox.ReplayAll()
nets = manager.create_networks(None, 'fake', '192.168.2.0/24',
False, 3, 32, None, None, None, None,
None)
self.assertEqual(3, len(nets))
cidrs = [str(net['cidr']) for net in nets]
exp_cidrs = ['192.168.2.32/27', '192.168.2.64/27', '192.168.2.96/27']
for exp_cidr in exp_cidrs:
self.assertTrue(exp_cidr in cidrs)
self.assertFalse('192.168.2.0/27' in cidrs)
def test_validate_cidrs_split_all_in_use(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
in_use = [{'id': 1, 'cidr': '192.168.2.9/29'},
{'id': 2, 'cidr': '192.168.2.64/26'},
{'id': 3, 'cidr': '192.168.2.128/26'}]
manager.db.network_get_all(ctxt).AndReturn(in_use)
self.mox.ReplayAll()
args = (None, 'fake', '192.168.2.0/24', False, 3, 64, None, None,
None, None, None)
# ValueError: Not enough subnets avail to satisfy requested num_
# networks - some subnets in requested range already
# in use
self.assertRaises(ValueError, manager.create_networks, *args)
def test_validate_cidrs_one_in_use(self):
manager = fake_network.FakeNetworkManager()
args = (None, 'fake', '192.168.0.0/24', False, 2, 256, None, None,
None, None, None)
# ValueError: network_size * num_networks exceeds cidr size
self.assertRaises(ValueError, manager.create_networks, *args)
def test_validate_cidrs_already_used(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
manager.db.network_get_all(ctxt).AndReturn([{'id': 1,
'cidr': '192.168.0.0/24'}])
self.mox.ReplayAll()
# ValueError: cidr already in use
args = (None, 'fake', '192.168.0.0/24', False, 1, 256, None, None,
None, None, None)
self.assertRaises(ValueError, manager.create_networks, *args)
def test_validate_cidrs_too_many(self):
manager = fake_network.FakeNetworkManager()
args = (None, 'fake', '192.168.0.0/24', False, 200, 256, None, None,
None, None, None)
# ValueError: Not enough subnets avail to satisfy requested
# num_networks
self.assertRaises(ValueError, manager.create_networks, *args)
def test_validate_cidrs_split_partial(self):
manager = fake_network.FakeNetworkManager()
nets = manager.create_networks(None, 'fake', '192.168.0.0/16',
False, 2, 256, None, None, None, None,
None)
returned_cidrs = [str(net['cidr']) for net in nets]
self.assertTrue('192.168.0.0/24' in returned_cidrs)
self.assertTrue('192.168.1.0/24' in returned_cidrs)
def test_validate_cidrs_conflict_existing_supernet(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
fakecidr = [{'id': 1, 'cidr': '192.168.0.0/8'}]
manager.db.network_get_all(ctxt).AndReturn(fakecidr)
self.mox.ReplayAll()
args = (None, 'fake', '192.168.0.0/24', False, 1, 256, None, None,
None, None, None)
# ValueError: requested cidr (192.168.0.0/24) conflicts
# with existing supernet
self.assertRaises(ValueError, manager.create_networks, *args)
def test_create_networks(self):
cidr = '192.168.0.0/24'
manager = fake_network.FakeNetworkManager()
self.stubs.Set(manager, '_create_fixed_ips',
self.fake_create_fixed_ips)
args = [None, 'foo', cidr, None, 1, 256, 'fd00::/48', None, None,
None, None, None]
self.assertTrue(manager.create_networks(*args))
def test_create_networks_cidr_already_used(self):
manager = fake_network.FakeNetworkManager()
self.mox.StubOutWithMock(manager.db, 'network_get_all')
ctxt = mox.IgnoreArg()
fakecidr = [{'id': 1, 'cidr': '192.168.0.0/24'}]
manager.db.network_get_all(ctxt).AndReturn(fakecidr)
self.mox.ReplayAll()
args = [None, 'foo', '192.168.0.0/24', None, 1, 256,
'fd00::/48', None, None, None, None, None]
self.assertRaises(ValueError, manager.create_networks, *args)
def test_create_networks_many(self):
cidr = '192.168.0.0/16'
manager = fake_network.FakeNetworkManager()
self.stubs.Set(manager, '_create_fixed_ips',
self.fake_create_fixed_ips)
args = [None, 'foo', cidr, None, 10, 256, 'fd00::/48', None, None,
None, None, None]
self.assertTrue(manager.create_networks(*args))
def test_get_instance_uuids_by_ip_regex(self):
manager = fake_network.FakeNetworkManager()
_vifs = manager.db.virtual_interface_get_all(None)
fake_context = context.RequestContext('user', 'project')
# Greedy get eveything
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip': '.*'})
self.assertEqual(len(res), len(_vifs))
# Doesn't exist
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip': '10.0.0.1'})
self.assertFalse(res)
# Get instance 1
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip': '172.16.0.2'})
self.assertTrue(res)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['instance_uuid'], _vifs[1]['instance_uuid'])
# Get instance 2
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip': '173.16.0.2'})
self.assertTrue(res)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['instance_uuid'], _vifs[2]['instance_uuid'])
# Get instance 0 and 1
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip': '172.16.0.*'})
self.assertTrue(res)
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['instance_uuid'], _vifs[0]['instance_uuid'])
self.assertEqual(res[1]['instance_uuid'], _vifs[1]['instance_uuid'])
# Get instance 1 and 2
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip': '17..16.0.2'})
self.assertTrue(res)
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['instance_uuid'], _vifs[1]['instance_uuid'])
self.assertEqual(res[1]['instance_uuid'], _vifs[2]['instance_uuid'])
def test_get_instance_uuids_by_ipv6_regex(self):
manager = fake_network.FakeNetworkManager()
_vifs = manager.db.virtual_interface_get_all(None)
fake_context = context.RequestContext('user', 'project')
# Greedy get eveything
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip6': '.*'})
self.assertEqual(len(res), len(_vifs))
# Doesn't exist
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip6': '.*1034.*'})
self.assertFalse(res)
# Get instance 1
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip6': '2001:.*2'})
self.assertTrue(res)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['instance_uuid'], _vifs[1]['instance_uuid'])
# Get instance 2
ip6 = '2001:db8:69:1f:dead:beff:feff:ef03'
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip6': ip6})
self.assertTrue(res)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['instance_uuid'], _vifs[2]['instance_uuid'])
# Get instance 0 and 1
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip6': '.*ef0[1,2]'})
self.assertTrue(res)
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['instance_uuid'], _vifs[0]['instance_uuid'])
self.assertEqual(res[1]['instance_uuid'], _vifs[1]['instance_uuid'])
# Get instance 1 and 2
ip6 = '2001:db8:69:1.:dead:beff:feff:ef0.'
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'ip6': ip6})
self.assertTrue(res)
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['instance_uuid'], _vifs[1]['instance_uuid'])
self.assertEqual(res[1]['instance_uuid'], _vifs[2]['instance_uuid'])
def test_get_instance_uuids_by_ip(self):
manager = fake_network.FakeNetworkManager()
_vifs = manager.db.virtual_interface_get_all(None)
fake_context = context.RequestContext('user', 'project')
# No regex for you!
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'fixed_ip': '.*'})
self.assertFalse(res)
# Doesn't exist
ip = '10.0.0.1'
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'fixed_ip': ip})
self.assertFalse(res)
# Get instance 1
ip = '172.16.0.2'
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'fixed_ip': ip})
self.assertTrue(res)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['instance_uuid'], _vifs[1]['instance_uuid'])
# Get instance 2
ip = '173.16.0.2'
res = manager.get_instance_uuids_by_ip_filter(fake_context,
{'fixed_ip': ip})
self.assertTrue(res)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['instance_uuid'], _vifs[2]['instance_uuid'])
def test_get_network(self):
manager = fake_network.FakeNetworkManager()
fake_context = context.RequestContext('user', 'project')
self.mox.StubOutWithMock(manager.db, 'network_get_by_uuid')
manager.db.network_get_by_uuid(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(networks[0])
self.mox.ReplayAll()
uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
network = manager.get_network(fake_context, uuid)
self.assertEqual(network['uuid'], uuid)
def test_get_network_not_found(self):
manager = fake_network.FakeNetworkManager()
fake_context = context.RequestContext('user', 'project')
self.mox.StubOutWithMock(manager.db, 'network_get_by_uuid')
manager.db.network_get_by_uuid(
mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(exception.NetworkNotFoundForUUID)
self.mox.ReplayAll()
uuid = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.assertRaises(exception.NetworkNotFound,
manager.get_network, fake_context, uuid)
def test_get_all_networks(self):
manager = fake_network.FakeNetworkManager()
fake_context = context.RequestContext('user', 'project')
self.mox.StubOutWithMock(manager.db, 'network_get_all')
manager.db.network_get_all(mox.IgnoreArg()).AndReturn(networks)
self.mox.ReplayAll()
output = manager.get_all_networks(fake_context)
self.assertEqual(len(networks), 2)
self.assertEqual(output[0]['uuid'],
'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa')
self.assertEqual(output[1]['uuid'],
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb')
def test_disassociate_network(self):
manager = fake_network.FakeNetworkManager()
fake_context = context.RequestContext('user', 'project')
self.mox.StubOutWithMock(manager.db, 'network_get_by_uuid')
manager.db.network_get_by_uuid(mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(networks[0])
self.mox.ReplayAll()
uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
manager.disassociate_network(fake_context, uuid)
def test_disassociate_network_not_found(self):
manager = fake_network.FakeNetworkManager()
fake_context = context.RequestContext('user', 'project')
self.mox.StubOutWithMock(manager.db, 'network_get_by_uuid')
manager.db.network_get_by_uuid(
mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(exception.NetworkNotFoundForUUID)
self.mox.ReplayAll()
uuid = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.assertRaises(exception.NetworkNotFound,
manager.disassociate_network, fake_context, uuid)
class TestRPCFixedManager(network_manager.RPCAllocateFixedIP,
network_manager.NetworkManager):
"""Dummy manager that implements RPCAllocateFixedIP"""
class RPCAllocateTestCase(test.TestCase):
"""Tests nova.network.manager.RPCAllocateFixedIP"""
def setUp(self):
super(RPCAllocateTestCase, self).setUp()
self.rpc_fixed = TestRPCFixedManager()
self.context = context.RequestContext('fake', 'fake')
def test_rpc_allocate(self):
"""Test to verify bug 855030 doesn't resurface.
Mekes sure _rpc_allocate_fixed_ip returns a value so the call
returns properly and the greenpool completes."""
address = '10.10.10.10'
def fake_allocate(*args, **kwargs):
return address
def fake_network_get(*args, **kwargs):
return {}
self.stubs.Set(self.rpc_fixed, 'allocate_fixed_ip', fake_allocate)
self.stubs.Set(self.rpc_fixed.db, 'network_get', fake_network_get)
rval = self.rpc_fixed._rpc_allocate_fixed_ip(self.context,
'fake_instance',
'fake_network')
self.assertEqual(rval, address)
class TestFloatingIPManager(network_manager.FloatingIP,
network_manager.NetworkManager):
"""Dummy manager that implements FloatingIP"""
class AllocateTestCase(test.TestCase):
def test_allocate_for_instance(self):
address = "10.10.10.10"
self.flags(auto_assign_floating_ip=True)
self.compute = self.start_service('compute')
self.network = self.start_service('network')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
inst = db.instance_create(self.context, {'host': self.compute.host,
'instance_type_id': 1})
networks = db.network_get_all(self.context)
for network in networks:
db.network_update(self.context, network['id'],
{'host': self.network.host})
project_id = self.context.project_id
nw_info = self.network.allocate_for_instance(self.context,
instance_id=inst['id'], instance_uuid=inst['uuid'],
host=inst['host'], vpn=None, rxtx_factor=3,
project_id=project_id)
self.assertEquals(1, len(nw_info))
fixed_ip = nw_info.fixed_ips()[0]['address']
self.assertTrue(utils.is_valid_ipv4(fixed_ip))
self.network.deallocate_for_instance(self.context,
instance_id=inst['id'],
fixed_ips=fixed_ip,
host=self.network.host,
project_id=project_id)
class FloatingIPTestCase(test.TestCase):
"""Tests nova.network.manager.FloatingIP"""
def setUp(self):
super(FloatingIPTestCase, self).setUp()
self.tempdir = tempfile.mkdtemp()
self.flags(logdir=self.tempdir)
self.network = TestFloatingIPManager()
temp = importutils.import_object('nova.network.minidns.MiniDNS')
self.network.floating_dns_manager = temp
self.network.db = db
self.project_id = 'testproject'
self.context = context.RequestContext('testuser', self.project_id,
is_admin=False)
def tearDown(self):
shutil.rmtree(self.tempdir)
super(FloatingIPTestCase, self).tearDown()
def test_double_deallocation(self):
instance_ref = db.api.instance_create(self.context,
{"project_id": self.project_id})
# Run it twice to make it fault if it does not handle
# instances without fixed networks
# If this fails in either, it does not handle having no addresses
self.network.deallocate_for_instance(self.context,
instance_id=instance_ref['id'])
self.network.deallocate_for_instance(self.context,
instance_id=instance_ref['id'])
def test_deallocation_deleted_instance(self):
self.stubs.Set(self.network, '_teardown_network_on_host',
lambda *args, **kwargs: None)
instance = db.api.instance_create(self.context, {
'project_id': self.project_id, 'deleted': True})
network = db.api.network_create_safe(self.context.elevated(), {
'project_id': self.project_id})
addr = db.fixed_ip_create(self.context, {'allocated': True,
'instance_uuid': instance['uuid'], 'address': '10.1.1.1',
'network_id': network['id']})
fixed = db.fixed_ip_get_by_address(
self.context.elevated(read_deleted='yes'), addr)
db.api.floating_ip_create(self.context, {
'address': '10.10.10.10', 'instance_uuid': instance['uuid'],
'fixed_ip_id': fixed['id'],
'project_id': self.project_id})
self.network.deallocate_for_instance(self.context,
instance_id=instance['id'])
def test_deallocation_duplicate_floating_ip(self):
self.stubs.Set(self.network, '_teardown_network_on_host',
lambda *args, **kwargs: None)
instance = db.api.instance_create(self.context, {
'project_id': self.project_id})
network = db.api.network_create_safe(self.context.elevated(), {
'project_id': self.project_id})
addr = db.fixed_ip_create(self.context, {'allocated': True,
'instance_uuid': instance['uuid'], 'address': '10.1.1.1',
'network_id': network['id']})
fixed = db.fixed_ip_get_by_address(
self.context.elevated(read_deleted='yes'), addr)
db.api.floating_ip_create(self.context, {
'address': '10.10.10.10',
'deleted': True})
db.api.floating_ip_create(self.context, {
'address': '10.10.10.10', 'instance_uuid': instance['uuid'],
'fixed_ip_id': fixed['id'],
'project_id': self.project_id})
self.network.deallocate_for_instance(self.context,
instance_id=instance['id'])
def test_floating_dns_create_conflict(self):
zone = "example.org"
address1 = "10.10.10.11"
name1 = "foo"
name2 = "bar"
self.network.add_dns_entry(self.context, address1, name1, "A", zone)
self.assertRaises(exception.FloatingIpDNSExists,
self.network.add_dns_entry, self.context,
address1, name1, "A", zone)
def test_floating_create_and_get(self):
zone = "example.org"
address1 = "10.10.10.11"
name1 = "foo"
name2 = "bar"
entries = self.network.get_dns_entries_by_address(self.context,
address1, zone)
self.assertFalse(entries)
self.network.add_dns_entry(self.context, address1, name1, "A", zone)
self.network.add_dns_entry(self.context, address1, name2, "A", zone)
entries = self.network.get_dns_entries_by_address(self.context,
address1, zone)
self.assertEquals(len(entries), 2)
self.assertEquals(entries[0], name1)
self.assertEquals(entries[1], name2)
entries = self.network.get_dns_entries_by_name(self.context,
name1, zone)
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], address1)
def test_floating_dns_delete(self):
zone = "example.org"
address1 = "10.10.10.11"
name1 = "foo"
name2 = "bar"
self.network.add_dns_entry(self.context, address1, name1, "A", zone)
self.network.add_dns_entry(self.context, address1, name2, "A", zone)
self.network.delete_dns_entry(self.context, name1, zone)
entries = self.network.get_dns_entries_by_address(self.context,
address1, zone)
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], name2)
self.assertRaises(exception.NotFound,
self.network.delete_dns_entry, self.context,
name1, zone)
def test_floating_dns_domains_public(self):
zone1 = "testzone"
domain1 = "example.org"
domain2 = "example.com"
address1 = '10.10.10.10'
entryname = 'testentry'
context_admin = context.RequestContext('testuser', 'testproject',
is_admin=True)
self.assertRaises(exception.AdminRequired,
self.network.create_public_dns_domain, self.context,
domain1, zone1)
self.network.create_public_dns_domain(context_admin, domain1,
'testproject')
self.network.create_public_dns_domain(context_admin, domain2,
'fakeproject')
domains = self.network.get_dns_domains(self.context)
self.assertEquals(len(domains), 2)
self.assertEquals(domains[0]['domain'], domain1)
self.assertEquals(domains[1]['domain'], domain2)
self.assertEquals(domains[0]['project'], 'testproject')
self.assertEquals(domains[1]['project'], 'fakeproject')
self.network.add_dns_entry(self.context, address1, entryname,
'A', domain1)
entries = self.network.get_dns_entries_by_name(self.context,
entryname, domain1)
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], address1)
self.assertRaises(exception.AdminRequired,
self.network.delete_dns_domain, self.context,
domain1)
self.network.delete_dns_domain(context_admin, domain1)
self.network.delete_dns_domain(context_admin, domain2)
# Verify that deleting the domain deleted the associated entry
entries = self.network.get_dns_entries_by_name(self.context,
entryname, domain1)
self.assertFalse(entries)
def test_delete_all_by_ip(self):
domain1 = "example.org"
domain2 = "example.com"
address = "10.10.10.10"
name1 = "foo"
name2 = "bar"
def fake_domains(context):
return [{'domain': 'example.org', 'scope': 'public'},
{'domain': 'example.com', 'scope': 'public'},
{'domain': 'test.example.org', 'scope': 'public'}]
self.stubs.Set(self.network, 'get_dns_domains', fake_domains)
context_admin = context.RequestContext('testuser', 'testproject',
is_admin=True)
self.network.create_public_dns_domain(context_admin, domain1,
'testproject')
self.network.create_public_dns_domain(context_admin, domain2,
'fakeproject')
domains = self.network.get_dns_domains(self.context)
for domain in domains:
self.network.add_dns_entry(self.context, address,
name1, "A", domain['domain'])
self.network.add_dns_entry(self.context, address,
name2, "A", domain['domain'])
entries = self.network.get_dns_entries_by_address(self.context,
address,
domain['domain'])
self.assertEquals(len(entries), 2)
self.network._delete_all_entries_for_ip(self.context, address)
for domain in domains:
entries = self.network.get_dns_entries_by_address(self.context,
address,
domain['domain'])
self.assertFalse(entries)
self.network.delete_dns_domain(context_admin, domain1)
self.network.delete_dns_domain(context_admin, domain2)
def test_mac_conflicts(self):
"""Make sure MAC collisions are retried"""
self.flags(create_unique_mac_address_attempts=3)
ctxt = context.RequestContext('testuser', 'testproject', is_admin=True)
macs = ['bb:bb:bb:bb:bb:bb', 'aa:aa:aa:aa:aa:aa']
# Create a VIF with aa:aa:aa:aa:aa:aa
crash_test_dummy_vif = {
'address': macs[1],
'instance_uuid': 'fake_uuid',
'network_id': 'fake_net',
'uuid': 'fake_uuid',
}
self.network.db.virtual_interface_create(ctxt, crash_test_dummy_vif)
# Hand out a collision first, then a legit MAC
def fake_gen_mac():
return macs.pop()
self.stubs.Set(utils, 'generate_mac_address', fake_gen_mac)
# SQLite doesn't seem to honor the uniqueness constraint on the
# address column, so fake the collision-avoidance here
def fake_vif_save(vif):
if vif.address == crash_test_dummy_vif['address']:
raise exception.DBError("If you're smart, you'll retry!")
self.stubs.Set(models.VirtualInterface, 'save', fake_vif_save)
# Attempt to add another and make sure that both MACs are consumed
# by the retry loop
self.network.add_virtual_interface(ctxt, 'fake_uuid', 'fake_net')
self.assertEqual(macs, [])
class NetworkPolicyTestCase(test.TestCase):
def setUp(self):
super(NetworkPolicyTestCase, self).setUp()
nova.policy.reset()
nova.policy.init()
self.context = context.get_admin_context()
def tearDown(self):
super(NetworkPolicyTestCase, self).tearDown()
nova.policy.reset()
def _set_rules(self, rules):
nova.common.policy.set_brain(nova.common.policy.HttpBrain(rules))
def test_check_policy(self):
self.mox.StubOutWithMock(nova.policy, 'enforce')
target = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
}
nova.policy.enforce(self.context, 'network:get_all', target)
self.mox.ReplayAll()
network_manager.check_policy(self.context, 'get_all')
class InstanceDNSTestCase(test.TestCase):
"""Tests nova.network.manager instance DNS"""
def setUp(self):
super(InstanceDNSTestCase, self).setUp()
self.tempdir = tempfile.mkdtemp()
self.flags(logdir=self.tempdir)
self.network = TestFloatingIPManager()
temp = importutils.import_object('nova.network.minidns.MiniDNS')
self.network.instance_dns_manager = temp
temp = importutils.import_object('nova.network.dns_driver.DNSDriver')
self.network.floating_dns_manager = temp
self.network.db = db
self.project_id = 'testproject'
self.context = context.RequestContext('testuser', self.project_id,
is_admin=False)
def tearDown(self):
shutil.rmtree(self.tempdir)
super(InstanceDNSTestCase, self).tearDown()
def test_dns_domains_private(self):
zone1 = 'testzone'
domain1 = 'example.org'
context_admin = context.RequestContext('testuser', 'testproject',
is_admin=True)
self.assertRaises(exception.AdminRequired,
self.network.create_private_dns_domain, self.context,
domain1, zone1)
self.network.create_private_dns_domain(context_admin, domain1, zone1)
domains = self.network.get_dns_domains(self.context)
self.assertEquals(len(domains), 1)
self.assertEquals(domains[0]['domain'], domain1)
self.assertEquals(domains[0]['availability_zone'], zone1)
self.assertRaises(exception.AdminRequired,
self.network.delete_dns_domain, self.context,
domain1)
self.network.delete_dns_domain(context_admin, domain1)
domain1 = "example.org"
domain2 = "example.com"
class LdapDNSTestCase(test.TestCase):
"""Tests nova.network.ldapdns.LdapDNS"""
def setUp(self):
super(LdapDNSTestCase, self).setUp()
self.saved_ldap = sys.modules.get('ldap')
import nova.auth.fakeldap
sys.modules['ldap'] = nova.auth.fakeldap
temp = importutils.import_object('nova.network.ldapdns.FakeLdapDNS')
self.driver = temp
self.driver.create_domain(domain1)
self.driver.create_domain(domain2)
def tearDown(self):
self.driver.delete_domain(domain1)
self.driver.delete_domain(domain2)
sys.modules['ldap'] = self.saved_ldap
super(LdapDNSTestCase, self).tearDown()
def test_ldap_dns_domains(self):
domains = self.driver.get_domains()
self.assertEqual(len(domains), 2)
self.assertIn(domain1, domains)
self.assertIn(domain2, domains)
def test_ldap_dns_create_conflict(self):
address1 = "10.10.10.11"
name1 = "foo"
name2 = "bar"
self.driver.create_entry(name1, address1, "A", domain1)
self.assertRaises(exception.FloatingIpDNSExists,
self.driver.create_entry,
name1, address1, "A", domain1)
def test_ldap_dns_create_and_get(self):
address1 = "10.10.10.11"
name1 = "foo"
name2 = "bar"
entries = self.driver.get_entries_by_address(address1, domain1)
self.assertFalse(entries)
self.driver.create_entry(name1, address1, "A", domain1)
self.driver.create_entry(name2, address1, "A", domain1)
entries = self.driver.get_entries_by_address(address1, domain1)
self.assertEquals(len(entries), 2)
self.assertEquals(entries[0], name1)
self.assertEquals(entries[1], name2)
entries = self.driver.get_entries_by_name(name1, domain1)
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], address1)
def test_ldap_dns_delete(self):
address1 = "10.10.10.11"
name1 = "foo"
name2 = "bar"
self.driver.create_entry(name1, address1, "A", domain1)
self.driver.create_entry(name2, address1, "A", domain1)
entries = self.driver.get_entries_by_address(address1, domain1)
self.assertEquals(len(entries), 2)
self.driver.delete_entry(name1, domain1)
entries = self.driver.get_entries_by_address(address1, domain1)
LOG.debug("entries: %s" % entries)
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], name2)
self.assertRaises(exception.NotFound,
self.driver.delete_entry,
name1, domain1)
| {
"content_hash": "77aaa0632b7964228285ace8800c8e9f",
"timestamp": "",
"source": "github",
"line_count": 1949,
"max_line_length": 79,
"avg_line_length": 42.670087224217546,
"alnum_prop": 0.5494444711654081,
"repo_name": "paulmathews/nova",
"id": "f27a17658d3624dff607f973851e5f6efb13ce79",
"size": "83868",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable/folsom",
"path": "nova/tests/network/test_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "7293434"
},
{
"name": "Shell",
"bytes": "16910"
}
],
"symlink_target": ""
} |
"""
Support for interfacing with the XBMC/Kodi JSON-RPC API.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.kodi/
"""
import asyncio
from collections import OrderedDict
from functools import wraps
import logging
import re
import socket
import urllib
import aiohttp
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, MEDIA_PLAYER_SCHEMA, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
DOMAIN, MEDIA_TYPE_CHANNEL, MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK,
SUPPORT_SHUFFLE_SET, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP)
from homeassistant.const import (
CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_PROXY_SSL,
CONF_TIMEOUT, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP, STATE_IDLE,
STATE_OFF, STATE_PAUSED, STATE_PLAYING)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import script
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.template import Template
from homeassistant.util.yaml import dump
import homeassistant.util.dt as dt_util
REQUIREMENTS = ['jsonrpc-async==0.6', 'jsonrpc-websocket==0.6']
_LOGGER = logging.getLogger(__name__)
EVENT_KODI_CALL_METHOD_RESULT = 'kodi_call_method_result'
CONF_TCP_PORT = 'tcp_port'
CONF_TURN_ON_ACTION = 'turn_on_action'
CONF_TURN_OFF_ACTION = 'turn_off_action'
CONF_ENABLE_WEBSOCKET = 'enable_websocket'
DEFAULT_NAME = 'Kodi'
DEFAULT_PORT = 8080
DEFAULT_TCP_PORT = 9090
DEFAULT_TIMEOUT = 5
DEFAULT_PROXY_SSL = False
DEFAULT_ENABLE_WEBSOCKET = True
DEPRECATED_TURN_OFF_ACTIONS = {
None: None,
'quit': 'Application.Quit',
'hibernate': 'System.Hibernate',
'suspend': 'System.Suspend',
'reboot': 'System.Reboot',
'shutdown': 'System.Shutdown'
}
# https://github.com/xbmc/xbmc/blob/master/xbmc/media/MediaType.h
MEDIA_TYPES = {
'music': MEDIA_TYPE_MUSIC,
'artist': MEDIA_TYPE_MUSIC,
'album': MEDIA_TYPE_MUSIC,
'song': MEDIA_TYPE_MUSIC,
'video': MEDIA_TYPE_VIDEO,
'set': MEDIA_TYPE_PLAYLIST,
'musicvideo': MEDIA_TYPE_VIDEO,
'movie': MEDIA_TYPE_MOVIE,
'tvshow': MEDIA_TYPE_TVSHOW,
'season': MEDIA_TYPE_TVSHOW,
'episode': MEDIA_TYPE_TVSHOW,
# Type 'channel' is used for radio or tv streams from pvr
'channel': MEDIA_TYPE_CHANNEL,
# Type 'audio' is used for audio media, that Kodi couldn't scroblle
'audio': MEDIA_TYPE_MUSIC,
}
SUPPORT_KODI = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK | \
SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_SHUFFLE_SET | \
SUPPORT_PLAY | SUPPORT_VOLUME_STEP
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_TCP_PORT, default=DEFAULT_TCP_PORT): cv.port,
vol.Optional(CONF_PROXY_SSL, default=DEFAULT_PROXY_SSL): cv.boolean,
vol.Optional(CONF_TURN_ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_TURN_OFF_ACTION):
vol.Any(cv.SCRIPT_SCHEMA, vol.In(DEPRECATED_TURN_OFF_ACTIONS)),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Inclusive(CONF_USERNAME, 'auth'): cv.string,
vol.Inclusive(CONF_PASSWORD, 'auth'): cv.string,
vol.Optional(CONF_ENABLE_WEBSOCKET, default=DEFAULT_ENABLE_WEBSOCKET):
cv.boolean,
})
SERVICE_ADD_MEDIA = 'kodi_add_to_playlist'
SERVICE_CALL_METHOD = 'kodi_call_method'
DATA_KODI = 'kodi'
ATTR_MEDIA_TYPE = 'media_type'
ATTR_MEDIA_NAME = 'media_name'
ATTR_MEDIA_ARTIST_NAME = 'artist_name'
ATTR_MEDIA_ID = 'media_id'
ATTR_METHOD = 'method'
MEDIA_PLAYER_ADD_MEDIA_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_TYPE): cv.string,
vol.Optional(ATTR_MEDIA_ID): cv.string,
vol.Optional(ATTR_MEDIA_NAME): cv.string,
vol.Optional(ATTR_MEDIA_ARTIST_NAME): cv.string,
})
MEDIA_PLAYER_CALL_METHOD_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_METHOD): cv.string,
}, extra=vol.ALLOW_EXTRA)
SERVICE_TO_METHOD = {
SERVICE_ADD_MEDIA: {
'method': 'async_add_media_to_playlist',
'schema': MEDIA_PLAYER_ADD_MEDIA_SCHEMA},
SERVICE_CALL_METHOD: {
'method': 'async_call_method',
'schema': MEDIA_PLAYER_CALL_METHOD_SCHEMA},
}
def _check_deprecated_turn_off(hass, turn_off_action):
"""Create an equivalent script for old turn off actions."""
if isinstance(turn_off_action, str):
method = DEPRECATED_TURN_OFF_ACTIONS[turn_off_action]
new_config = OrderedDict(
[('service', '{}.{}'.format(DOMAIN, SERVICE_CALL_METHOD)),
('data_template', OrderedDict(
[('entity_id', '{{ entity_id }}'),
('method', method)]))])
example_conf = dump(OrderedDict(
[(CONF_TURN_OFF_ACTION, new_config)]))
_LOGGER.warning(
"The '%s' action for turn off Kodi is deprecated and "
"will cease to function in a future release. You need to "
"change it for a generic Home Assistant script sequence, "
"which is, for this turn_off action, like this:\n%s",
turn_off_action, example_conf)
new_config['data_template'] = OrderedDict(
[(key, Template(value, hass))
for key, value in new_config['data_template'].items()])
turn_off_action = [new_config]
return turn_off_action
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the Kodi platform."""
if DATA_KODI not in hass.data:
hass.data[DATA_KODI] = dict()
unique_id = None
# Is this a manual configuration?
if discovery_info is None:
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
tcp_port = config.get(CONF_TCP_PORT)
encryption = config.get(CONF_PROXY_SSL)
websocket = config.get(CONF_ENABLE_WEBSOCKET)
else:
name = "{} ({})".format(DEFAULT_NAME, discovery_info.get('hostname'))
host = discovery_info.get('host')
port = discovery_info.get('port')
tcp_port = DEFAULT_TCP_PORT
encryption = DEFAULT_PROXY_SSL
websocket = DEFAULT_ENABLE_WEBSOCKET
properties = discovery_info.get('properties')
if properties is not None:
unique_id = properties.get('uuid', None)
# Only add a device once, so discovered devices do not override manual
# config.
ip_addr = socket.gethostbyname(host)
if ip_addr in hass.data[DATA_KODI]:
return
# If we got an unique id, check that it does not exist already.
# This is necessary as netdisco does not deterministally return the same
# advertisement when the service is offered over multiple IP addresses.
if unique_id is not None:
for device in hass.data[DATA_KODI].values():
if device.unique_id == unique_id:
return
entity = KodiDevice(
hass,
name=name,
host=host, port=port, tcp_port=tcp_port, encryption=encryption,
username=config.get(CONF_USERNAME),
password=config.get(CONF_PASSWORD),
turn_on_action=config.get(CONF_TURN_ON_ACTION),
turn_off_action=config.get(CONF_TURN_OFF_ACTION),
timeout=config.get(CONF_TIMEOUT), websocket=websocket,
unique_id=unique_id)
hass.data[DATA_KODI][ip_addr] = entity
async_add_entities([entity], update_before_add=True)
async def async_service_handler(service):
"""Map services to methods on MediaPlayerDevice."""
method = SERVICE_TO_METHOD.get(service.service)
if not method:
return
params = {key: value for key, value in service.data.items()
if key != 'entity_id'}
entity_ids = service.data.get('entity_id')
if entity_ids:
target_players = [player
for player in hass.data[DATA_KODI].values()
if player.entity_id in entity_ids]
else:
target_players = hass.data[DATA_KODI].values()
update_tasks = []
for player in target_players:
await getattr(player, method['method'])(**params)
for player in target_players:
if player.should_poll:
update_coro = player.async_update_ha_state(True)
update_tasks.append(update_coro)
if update_tasks:
await asyncio.wait(update_tasks, loop=hass.loop)
if hass.services.has_service(DOMAIN, SERVICE_ADD_MEDIA):
return
for service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[service]['schema']
hass.services.async_register(
DOMAIN, service, async_service_handler,
schema=schema)
def cmd(func):
"""Catch command exceptions."""
@wraps(func)
async def wrapper(obj, *args, **kwargs):
"""Wrap all command methods."""
import jsonrpc_base
try:
await func(obj, *args, **kwargs)
except jsonrpc_base.jsonrpc.TransportError as exc:
# If Kodi is off, we expect calls to fail.
if obj.state == STATE_OFF:
log_function = _LOGGER.info
else:
log_function = _LOGGER.error
log_function("Error calling %s on entity %s: %r",
func.__name__, obj.entity_id, exc)
return wrapper
class KodiDevice(MediaPlayerDevice):
"""Representation of a XBMC/Kodi device."""
def __init__(self, hass, name, host, port, tcp_port, encryption=False,
username=None, password=None,
turn_on_action=None, turn_off_action=None,
timeout=DEFAULT_TIMEOUT, websocket=True,
unique_id=None):
"""Initialize the Kodi device."""
import jsonrpc_async
import jsonrpc_websocket
self.hass = hass
self._name = name
self._unique_id = unique_id
self._media_position_updated_at = None
self._media_position = None
kwargs = {
'timeout': timeout,
'session': async_get_clientsession(hass),
}
if username is not None:
kwargs['auth'] = aiohttp.BasicAuth(username, password)
image_auth_string = "{}:{}@".format(username, password)
else:
image_auth_string = ""
http_protocol = 'https' if encryption else 'http'
ws_protocol = 'wss' if encryption else 'ws'
self._http_url = '{}://{}:{}/jsonrpc'.format(http_protocol, host, port)
self._image_url = '{}://{}{}:{}/image'.format(
http_protocol, image_auth_string, host, port)
self._ws_url = '{}://{}:{}/jsonrpc'.format(ws_protocol, host, tcp_port)
self._http_server = jsonrpc_async.Server(self._http_url, **kwargs)
if websocket:
# Setup websocket connection
self._ws_server = jsonrpc_websocket.Server(self._ws_url, **kwargs)
# Register notification listeners
self._ws_server.Player.OnPause = self.async_on_speed_event
self._ws_server.Player.OnPlay = self.async_on_speed_event
self._ws_server.Player.OnAVStart = self.async_on_speed_event
self._ws_server.Player.OnAVChange = self.async_on_speed_event
self._ws_server.Player.OnResume = self.async_on_speed_event
self._ws_server.Player.OnSpeedChanged = self.async_on_speed_event
self._ws_server.Player.OnSeek = self.async_on_speed_event
self._ws_server.Player.OnStop = self.async_on_stop
self._ws_server.Application.OnVolumeChanged = \
self.async_on_volume_changed
self._ws_server.System.OnQuit = self.async_on_quit
self._ws_server.System.OnRestart = self.async_on_quit
self._ws_server.System.OnSleep = self.async_on_quit
def on_hass_stop(event):
"""Close websocket connection when hass stops."""
self.hass.async_create_task(self._ws_server.close())
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, on_hass_stop)
else:
self._ws_server = None
# Script creation for the turn on/off config options
if turn_on_action is not None:
turn_on_action = script.Script(
self.hass, turn_on_action,
"{} turn ON script".format(self.name),
self.async_update_ha_state(True))
if turn_off_action is not None:
turn_off_action = script.Script(
self.hass, _check_deprecated_turn_off(hass, turn_off_action),
"{} turn OFF script".format(self.name))
self._turn_on_action = turn_on_action
self._turn_off_action = turn_off_action
self._enable_websocket = websocket
self._players = list()
self._properties = {}
self._item = {}
self._app_properties = {}
@callback
def async_on_speed_event(self, sender, data):
"""Handle player changes between playing and paused."""
self._properties['speed'] = data['player']['speed']
if not hasattr(data['item'], 'id'):
# If no item id is given, perform a full update
force_refresh = True
else:
# If a new item is playing, force a complete refresh
force_refresh = data['item']['id'] != self._item.get('id')
self.async_schedule_update_ha_state(force_refresh)
@callback
def async_on_stop(self, sender, data):
"""Handle the stop of the player playback."""
# Prevent stop notifications which are sent after quit notification
if self._players is None:
return
self._players = []
self._properties = {}
self._item = {}
self._media_position_updated_at = None
self._media_position = None
self.async_schedule_update_ha_state()
@callback
def async_on_volume_changed(self, sender, data):
"""Handle the volume changes."""
self._app_properties['volume'] = data['volume']
self._app_properties['muted'] = data['muted']
self.async_schedule_update_ha_state()
@callback
def async_on_quit(self, sender, data):
"""Reset the player state on quit action."""
self._players = None
self._properties = {}
self._item = {}
self._app_properties = {}
self.hass.async_create_task(self._ws_server.close())
async def _get_players(self):
"""Return the active player objects or None."""
import jsonrpc_base
try:
return await self.server.Player.GetActivePlayers()
except jsonrpc_base.jsonrpc.TransportError:
if self._players is not None:
_LOGGER.info("Unable to fetch kodi data")
_LOGGER.debug("Unable to fetch kodi data", exc_info=True)
return None
@property
def unique_id(self):
"""Return the unique id of the device."""
return self._unique_id
@property
def state(self):
"""Return the state of the device."""
if self._players is None:
return STATE_OFF
if not self._players:
return STATE_IDLE
if self._properties['speed'] == 0:
return STATE_PAUSED
return STATE_PLAYING
async def async_ws_connect(self):
"""Connect to Kodi via websocket protocol."""
import jsonrpc_base
try:
ws_loop_future = await self._ws_server.ws_connect()
except jsonrpc_base.jsonrpc.TransportError:
_LOGGER.info("Unable to connect to Kodi via websocket")
_LOGGER.debug(
"Unable to connect to Kodi via websocket", exc_info=True)
return
async def ws_loop_wrapper():
"""Catch exceptions from the websocket loop task."""
try:
await ws_loop_future
except jsonrpc_base.TransportError:
# Kodi abruptly ends ws connection when exiting. We will try
# to reconnect on the next poll.
pass
# Update HA state after Kodi disconnects
self.async_schedule_update_ha_state()
# Create a task instead of adding a tracking job, since this task will
# run until the websocket connection is closed.
self.hass.loop.create_task(ws_loop_wrapper())
async def async_update(self):
"""Retrieve latest state."""
self._players = await self._get_players()
if self._players is None:
self._properties = {}
self._item = {}
self._app_properties = {}
return
if self._enable_websocket and not self._ws_server.connected:
self.hass.async_create_task(self.async_ws_connect())
self._app_properties = \
await self.server.Application.GetProperties(
['volume', 'muted']
)
if self._players:
player_id = self._players[0]['playerid']
assert isinstance(player_id, int)
self._properties = await self.server.Player.GetProperties(
player_id,
['time', 'totaltime', 'speed', 'live']
)
position = self._properties['time']
if self._media_position != position:
self._media_position_updated_at = dt_util.utcnow()
self._media_position = position
self._item = (await self.server.Player.GetItem(
player_id,
['title', 'file', 'uniqueid', 'thumbnail', 'artist',
'albumartist', 'showtitle', 'album', 'season', 'episode']
))['item']
else:
self._properties = {}
self._item = {}
self._app_properties = {}
self._media_position = None
self._media_position_updated_at = None
@property
def server(self):
"""Active server for json-rpc requests."""
if self._enable_websocket and self._ws_server.connected:
return self._ws_server
return self._http_server
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return not (self._enable_websocket and self._ws_server.connected)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
if 'volume' in self._app_properties:
return self._app_properties['volume'] / 100.0
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._app_properties.get('muted')
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self._item.get('uniqueid', None)
@property
def media_content_type(self):
"""Content type of current playing media.
If the media type cannot be detected, the player type is used.
"""
if MEDIA_TYPES.get(self._item.get('type')) is None and self._players:
return MEDIA_TYPES.get(self._players[0]['type'])
return MEDIA_TYPES.get(self._item.get('type'))
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
if self._properties.get('live'):
return None
total_time = self._properties.get('totaltime')
if total_time is None:
return None
return (
total_time['hours'] * 3600 +
total_time['minutes'] * 60 +
total_time['seconds'])
@property
def media_position(self):
"""Position of current playing media in seconds."""
time = self._properties.get('time')
if time is None:
return None
return (
time['hours'] * 3600 +
time['minutes'] * 60 +
time['seconds'])
@property
def media_position_updated_at(self):
"""Last valid time of media position."""
return self._media_position_updated_at
@property
def media_image_url(self):
"""Image url of current playing media."""
thumbnail = self._item.get('thumbnail')
if thumbnail is None:
return None
url_components = urllib.parse.urlparse(thumbnail)
if url_components.scheme == 'image':
return '{}/{}'.format(
self._image_url,
urllib.parse.quote_plus(thumbnail))
@property
def media_title(self):
"""Title of current playing media."""
# find a string we can use as a title
item = self._item
return item.get('title') or item.get('label') or item.get('file')
@property
def media_series_title(self):
"""Title of series of current playing media, TV show only."""
return self._item.get('showtitle')
@property
def media_season(self):
"""Season of current playing media, TV show only."""
return self._item.get('season')
@property
def media_episode(self):
"""Episode of current playing media, TV show only."""
return self._item.get('episode')
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return self._item.get('album')
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
artists = self._item.get('artist', [])
if artists:
return artists[0]
return None
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
artists = self._item.get('albumartist', [])
if artists:
return artists[0]
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
supported_features = SUPPORT_KODI
if self._turn_on_action is not None:
supported_features |= SUPPORT_TURN_ON
if self._turn_off_action is not None:
supported_features |= SUPPORT_TURN_OFF
return supported_features
@cmd
async def async_turn_on(self):
"""Execute turn_on_action to turn on media player."""
if self._turn_on_action is not None:
await self._turn_on_action.async_run(
variables={"entity_id": self.entity_id})
else:
_LOGGER.warning("turn_on requested but turn_on_action is none")
@cmd
async def async_turn_off(self):
"""Execute turn_off_action to turn off media player."""
if self._turn_off_action is not None:
await self._turn_off_action.async_run(
variables={"entity_id": self.entity_id})
else:
_LOGGER.warning("turn_off requested but turn_off_action is none")
@cmd
async def async_volume_up(self):
"""Volume up the media player."""
assert (
await self.server.Input.ExecuteAction('volumeup')) == 'OK'
@cmd
async def async_volume_down(self):
"""Volume down the media player."""
assert (
await self.server.Input.ExecuteAction('volumedown')) == 'OK'
@cmd
def async_set_volume_level(self, volume):
"""Set volume level, range 0..1.
This method must be run in the event loop and returns a coroutine.
"""
return self.server.Application.SetVolume(int(volume * 100))
@cmd
def async_mute_volume(self, mute):
"""Mute (true) or unmute (false) media player.
This method must be run in the event loop and returns a coroutine.
"""
return self.server.Application.SetMute(mute)
async def async_set_play_state(self, state):
"""Handle play/pause/toggle."""
players = await self._get_players()
if players is not None and players:
await self.server.Player.PlayPause(
players[0]['playerid'], state)
@cmd
def async_media_play_pause(self):
"""Pause media on media player.
This method must be run in the event loop and returns a coroutine.
"""
return self.async_set_play_state('toggle')
@cmd
def async_media_play(self):
"""Play media.
This method must be run in the event loop and returns a coroutine.
"""
return self.async_set_play_state(True)
@cmd
def async_media_pause(self):
"""Pause the media player.
This method must be run in the event loop and returns a coroutine.
"""
return self.async_set_play_state(False)
@cmd
async def async_media_stop(self):
"""Stop the media player."""
players = await self._get_players()
if players:
await self.server.Player.Stop(players[0]['playerid'])
async def _goto(self, direction):
"""Handle for previous/next track."""
players = await self._get_players()
if players:
if direction == 'previous':
# First seek to position 0. Kodi goes to the beginning of the
# current track if the current track is not at the beginning.
await self.server.Player.Seek(players[0]['playerid'], 0)
await self.server.Player.GoTo(
players[0]['playerid'], direction)
@cmd
def async_media_next_track(self):
"""Send next track command.
This method must be run in the event loop and returns a coroutine.
"""
return self._goto('next')
@cmd
def async_media_previous_track(self):
"""Send next track command.
This method must be run in the event loop and returns a coroutine.
"""
return self._goto('previous')
@cmd
async def async_media_seek(self, position):
"""Send seek command."""
players = await self._get_players()
time = {}
time['milliseconds'] = int((position % 1) * 1000)
position = int(position)
time['seconds'] = int(position % 60)
position /= 60
time['minutes'] = int(position % 60)
position /= 60
time['hours'] = int(position)
if players:
await self.server.Player.Seek(players[0]['playerid'], time)
@cmd
def async_play_media(self, media_type, media_id, **kwargs):
"""Send the play_media command to the media player.
This method must be run in the event loop and returns a coroutine.
"""
if media_type == "CHANNEL":
return self.server.Player.Open(
{"item": {"channelid": int(media_id)}})
if media_type == "PLAYLIST":
return self.server.Player.Open(
{"item": {"playlistid": int(media_id)}})
return self.server.Player.Open(
{"item": {"file": str(media_id)}})
async def async_set_shuffle(self, shuffle):
"""Set shuffle mode, for the first player."""
if not self._players:
raise RuntimeError("Error: No active player.")
await self.server.Player.SetShuffle(
{"playerid": self._players[0]['playerid'], "shuffle": shuffle})
async def async_call_method(self, method, **kwargs):
"""Run Kodi JSONRPC API method with params."""
import jsonrpc_base
_LOGGER.debug("Run API method %s, kwargs=%s", method, kwargs)
result_ok = False
try:
result = await getattr(self.server, method)(**kwargs)
result_ok = True
except jsonrpc_base.jsonrpc.ProtocolError as exc:
result = exc.args[2]['error']
_LOGGER.error("Run API method %s.%s(%s) error: %s",
self.entity_id, method, kwargs, result)
except jsonrpc_base.jsonrpc.TransportError:
result = None
_LOGGER.warning("TransportError trying to run API method "
"%s.%s(%s)", self.entity_id, method, kwargs)
if isinstance(result, dict):
event_data = {'entity_id': self.entity_id,
'result': result,
'result_ok': result_ok,
'input': {'method': method, 'params': kwargs}}
_LOGGER.debug("EVENT kodi_call_method_result: %s", event_data)
self.hass.bus.async_fire(EVENT_KODI_CALL_METHOD_RESULT,
event_data=event_data)
return result
async def async_add_media_to_playlist(
self, media_type, media_id=None, media_name='ALL', artist_name=''):
"""Add a media to default playlist (i.e. playlistid=0).
First the media type must be selected, then
the media can be specified in terms of id or
name and optionally artist name.
All the albums of an artist can be added with
media_name="ALL"
"""
import jsonrpc_base
params = {"playlistid": 0}
if media_type == "SONG":
if media_id is None:
media_id = await self.async_find_song(
media_name, artist_name)
if media_id:
params["item"] = {"songid": int(media_id)}
elif media_type == "ALBUM":
if media_id is None:
if media_name == "ALL":
await self.async_add_all_albums(artist_name)
return
media_id = await self.async_find_album(
media_name, artist_name)
if media_id:
params["item"] = {"albumid": int(media_id)}
else:
raise RuntimeError("Unrecognized media type.")
if media_id is not None:
try:
await self.server.Playlist.Add(params)
except jsonrpc_base.jsonrpc.ProtocolError as exc:
result = exc.args[2]['error']
_LOGGER.error("Run API method %s.Playlist.Add(%s) error: %s",
self.entity_id, media_type, result)
except jsonrpc_base.jsonrpc.TransportError:
_LOGGER.warning("TransportError trying to add playlist to %s",
self.entity_id)
else:
_LOGGER.warning("No media detected for Playlist.Add")
async def async_add_all_albums(self, artist_name):
"""Add all albums of an artist to default playlist (i.e. playlistid=0).
The artist is specified in terms of name.
"""
artist_id = await self.async_find_artist(artist_name)
albums = await self.async_get_albums(artist_id)
for alb in albums['albums']:
await self.server.Playlist.Add(
{"playlistid": 0, "item": {"albumid": int(alb['albumid'])}})
async def async_clear_playlist(self):
"""Clear default playlist (i.e. playlistid=0)."""
return self.server.Playlist.Clear({"playlistid": 0})
async def async_get_artists(self):
"""Get artists list."""
return await self.server.AudioLibrary.GetArtists()
async def async_get_albums(self, artist_id=None):
"""Get albums list."""
if artist_id is None:
return await self.server.AudioLibrary.GetAlbums()
return (await self.server.AudioLibrary.GetAlbums(
{"filter": {"artistid": int(artist_id)}}))
async def async_find_artist(self, artist_name):
"""Find artist by name."""
artists = await self.async_get_artists()
try:
out = self._find(
artist_name, [a['artist'] for a in artists['artists']])
return artists['artists'][out[0][0]]['artistid']
except KeyError:
_LOGGER.warning("No artists were found: %s", artist_name)
return None
async def async_get_songs(self, artist_id=None):
"""Get songs list."""
if artist_id is None:
return await self.server.AudioLibrary.GetSongs()
return (await self.server.AudioLibrary.GetSongs(
{"filter": {"artistid": int(artist_id)}}))
async def async_find_song(self, song_name, artist_name=''):
"""Find song by name and optionally artist name."""
artist_id = None
if artist_name != '':
artist_id = await self.async_find_artist(artist_name)
songs = await self.async_get_songs(artist_id)
if songs['limits']['total'] == 0:
return None
out = self._find(song_name, [a['label'] for a in songs['songs']])
return songs['songs'][out[0][0]]['songid']
async def async_find_album(self, album_name, artist_name=''):
"""Find album by name and optionally artist name."""
artist_id = None
if artist_name != '':
artist_id = await self.async_find_artist(artist_name)
albums = await self.async_get_albums(artist_id)
try:
out = self._find(
album_name, [a['label'] for a in albums['albums']])
return albums['albums'][out[0][0]]['albumid']
except KeyError:
_LOGGER.warning("No albums were found with artist: %s, album: %s",
artist_name, album_name)
return None
@staticmethod
def _find(key_word, words):
key_word = key_word.split(' ')
patt = [re.compile(
'(^| )' + k + '( |$)', re.IGNORECASE) for k in key_word]
out = [[i, 0] for i in range(len(words))]
for i in range(len(words)):
mtc = [p.search(words[i]) for p in patt]
rate = [m is not None for m in mtc].count(True)
out[i][1] = rate
return sorted(out, key=lambda out: out[1], reverse=True)
| {
"content_hash": "2f33f8e267b4881fc72671fab1b34a3f",
"timestamp": "",
"source": "github",
"line_count": 970,
"max_line_length": 79,
"avg_line_length": 35.59484536082474,
"alnum_prop": 0.5902916558056014,
"repo_name": "PetePriority/home-assistant",
"id": "f8d0cdc5a12cf2ba47bdef3d4500b17dc004d009",
"size": "34527",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/media_player/kodi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1073"
},
{
"name": "Python",
"bytes": "13985647"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17364"
}
],
"symlink_target": ""
} |
import re
import os.path
from HTMLParser import HTMLParser
class StudentHTMLParser(HTMLParser):
def __init__(self, page_path):
HTMLParser.__init__(self)
self.content = ""
self.page_path = page_path.replace("‾","~")
self.page_title = ""
self.page_size = os.path.getsize(page_path)
self.is_content_cnt = 0
self.is_title = False
self.is_spam = False
self.htmltag_re = re.compile("html|h[1-6]|body|title|center|div|span|section|header|nav|article|section|footer|form|p|ul|ol|li|dl|dt|dd|pre|table|tr|th|td|a")
self.spam_htmltag_re = re.compile("script|style")
def increase_parameter(self, tag, param=[1,True,True]):
if self.htmltag_re.match(tag):
self.is_content_cnt+=param[0]
if tag == "title":
self.is_title = param[1]
if self.spam_htmltag_re.match(tag):
self.is_spam = param[2]
def handle_starttag(self, tag, attrs):
self.increase_parameter(tag)
def handle_endtag(self, tag):
self.increase_parameter(tag,[-1,False,False])
def handle_data(self, content):
if self.is_spam: return
if self.is_content_cnt > 0:
if self.is_title:
self.page_title = content.replace('\n','').encode("utf-8")
self.content += content + " "
def show(self):
print "-------------"
print self.page_title
print self.page_path
print self.page_size
print "-------------"
| {
"content_hash": "8af4c08f410aa3bd8f111049936b1347",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 166,
"avg_line_length": 34.95454545454545,
"alnum_prop": 0.5695708712613784,
"repo_name": "supertask/KSUHack",
"id": "c173bc2ea1c7c16264ce5f097ce34212093404ea",
"size": "1587",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "analyze/student_html_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "305"
},
{
"name": "HTML",
"bytes": "2464"
},
{
"name": "PHP",
"bytes": "6306"
},
{
"name": "Python",
"bytes": "48693"
}
],
"symlink_target": ""
} |
AUTHOR = 'Durham 2nd Ward'
SITENAME = 'Scriptures of the Restoration'
DESCRIPTION = ''
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_DATE_FORMAT = '%Y-%m-%d'
DEFAULT_LANG = 'en'
TYPOGRIFY = True # Nice typographic things
TYPOGRIFY_IGNORE_TAGS = ['h1']
GOOGLE_ANALYTICS = ''
# ---------------
# Site building
# ---------------
DELETE_OUTPUT_DIRECTORY = True
# Theme
THEME = 'theme'
# Folder where everything lives
PATH = 'content'
PAGE_URL = '{slug}/'
PAGE_SAVE_AS = '{slug}/index.html'
STATIC_PATHS = ['.htaccess', 'robots.txt', 'files']
READERS = {'html': None} # Don't parse HTML files
# No feeds
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# None of these pages
AUTHOR_SAVE_AS = ''
AUTHORS_SAVE_AS = ''
ARCHIVES_SAVE_AS = ''
TAGS_SAVE_AS = ''
CATEGORIES_SAVE_AS = ''
# ------------
# Site items
# ------------
MENUITEMS = [('Doctrine and Covenants', '/dc/'),
('Book of Mormon', '/bom/'),
('Book of Mormon Highlights', '/bom-highlights/'),
('Resources', '/resources/')]
DEFAULT_PAGINATION = False
PLUGIN_PATHS = ['plugins']
PLUGINS = ['pelican-bootstrapify']
MARKDOWN = {
'extension_configs': {
'markdown.extensions.smarty': {},
'markdown.extensions.extra': {},
'markdown.extensions.toc': {},
'markdown.extensions.meta': {}
},
'output_format': 'html5',
}
STATIC_PATHS = ['files', 'favicon.ico', 'config.php', 'p.php']
# ---------------
# Jinja filters
# ---------------
import jinja2
import os
# Get the final basename or directory name of a path
def get_slug(url):
slug = os.path.basename(os.path.dirname(url))
return slug
JINJA_FILTERS = {'get_slug': get_slug}
| {
"content_hash": "20290a8e9fb9f44152112d530547d8a3",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 63,
"avg_line_length": 20.471264367816094,
"alnum_prop": 0.6047164514317799,
"repo_name": "andrewheiss/scripturesrestoration",
"id": "d9d76540e05ab41afd3f8671442c02f4311eaf27",
"size": "1866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pelicanconf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10989"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Makefile",
"bytes": "2804"
},
{
"name": "PHP",
"bytes": "5929"
},
{
"name": "Python",
"bytes": "2271"
},
{
"name": "R",
"bytes": "4978"
},
{
"name": "Shell",
"bytes": "2202"
}
],
"symlink_target": ""
} |
import argparse
# pylint: disable=superfluous-parens
# pylint: disable=broad-except
import importlib
import logging
import sys
from rdr_service.tools.tool_libs import GCPProcessContext
from rdr_service.services.gcp_utils import gcp_get_app_access_token, gcp_get_app_host_name
from rdr_service.services.system_utils import make_api_request, setup_logging, setup_i18n
_logger = logging.getLogger("rdr_logger")
tool_cmd = "verify"
tool_desc = "test local environment"
class Verify(object):
def __init__(self, args, gcp_env):
self.args = args
self.gcp_env = gcp_env
def run(self):
"""
Main program process
:return: Exit code value
"""
result = 0
requests_mod = False
#
# python modules that need to be installed in the local system
#
modules = ["requests", "urllib3"]
for module in modules:
try:
mod = importlib.import_module(module)
_logger.info("found python module [{0}].".format(mod.__name__))
if module == "requests":
requests_mod = True
except ImportError:
_logger.error('missing python [{0}] module, please run "pip --install {0}"'.format(module))
result = 1
if self.args.project in ["localhost", "127.0.0.1"]:
_logger.warning("unable to perform additional testing unless '--project' parameter is set.")
return result
# Try making some API calls to to verify OAuth2 token functions.
if requests_mod:
host = gcp_get_app_host_name(self.args.project)
url = "rdr/v1"
_logger.info("attempting simple api request.")
code, resp = make_api_request(host, url)
if code != 200 or "version_id" not in resp:
_logger.error("simple api request failed")
return 1
_logger.info("{0} version is [{1}].".format(host, resp["version_id"]))
# verify OAuth2 token can be retrieved.
token = gcp_get_app_access_token()
if token and token.startswith("ya"):
_logger.info("verified app authentication token.")
# TODO: Make an authenticated API call here. What APIs are avail for this?
else:
_logger.error("app authentication token verification failed.")
result = 1
else:
_logger.warning("skipping api request tests.")
return result
def run():
# Set global debug value and setup application logging.
setup_logging(
_logger, tool_cmd, "--debug" in sys.argv, "{0}.log".format(tool_cmd) if "--log-file" in sys.argv else None
)
setup_i18n()
# Setup program arguments.
parser = argparse.ArgumentParser(prog=tool_cmd, description=tool_desc)
parser.add_argument("--debug", help="Enable debug output", default=False, action="store_true") # noqa
parser.add_argument("--log-file", help="write output to a log file", default=False, action="store_true") # noqa
parser.add_argument("--project", help="gcp project name", default="localhost") # noqa
parser.add_argument("--account", help="pmi-ops account", default=None) # noqa
parser.add_argument("--service-account", help="gcp service account", default=None) # noqa
args = parser.parse_args()
with GCPProcessContext(tool_cmd, args.project, args.account, args.service_account) as gcp_env:
process = Verify(args, gcp_env)
exit_code = process.run()
return exit_code
# --- Main Program Call ---
if __name__ == "__main__":
sys.exit(run())
| {
"content_hash": "864a2160bdd4e97300b60d32bf629719",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 116,
"avg_line_length": 34.698113207547166,
"alnum_prop": 0.6076672104404568,
"repo_name": "all-of-us/raw-data-repository",
"id": "c7f0209504efc83468c0e2d23d44426bc2910bb2",
"size": "3772",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "rdr_service/client/client_libs/verify_environment.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1866"
},
{
"name": "Mako",
"bytes": "1715"
},
{
"name": "Python",
"bytes": "17040924"
},
{
"name": "R",
"bytes": "2212"
},
{
"name": "Shell",
"bytes": "92213"
}
],
"symlink_target": ""
} |
from google.analytics import data_v1beta
async def sample_get_metadata():
# Create a client
client = data_v1beta.BetaAnalyticsDataAsyncClient()
# Initialize request argument(s)
request = data_v1beta.GetMetadataRequest(
name="name_value",
)
# Make the request
response = await client.get_metadata(request=request)
# Handle the response
print(response)
# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_async]
| {
"content_hash": "1d81abbea31d45fb3fc0472f2b3bb2a9",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 25.05263157894737,
"alnum_prop": 0.7205882352941176,
"repo_name": "googleapis/python-analytics-data",
"id": "f450aed282b02988fc8dfd4bef7c19e213a56a38",
"size": "1873",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "704503"
},
{
"name": "Shell",
"bytes": "30684"
}
],
"symlink_target": ""
} |
import numpy as np
from functools import reduce
import scipy.ndimage as nd
from lucid.misc.channel_reducer import ChannelReducer
import lucid.optvis.param as param
import lucid.optvis.objectives as objectives
import lucid.optvis.render as render
import lucid.optvis.transform as transform
from .attribution import get_acts, get_attr
def argmax_nd(x, axes, *, max_rep=np.inf, max_rep_strict=None):
assert max_rep > 0
assert np.isinf(max_rep) or max_rep_strict is not None
perm = list(range(len(x.shape)))
for axis in reversed(axes):
loc = perm.index(axis)
perm = [axis] + perm[:loc] + perm[loc + 1 :]
x = x.transpose(perm)
shape = x.shape
axes_size = reduce(lambda a, b: a * b, shape[: len(axes)], 1)
x = x.reshape([axes_size, -1])
indices = np.argsort(-x, axis=0)
result = indices[0].copy()
counts = np.zeros(len(indices), dtype=int)
unique_values, unique_counts = np.unique(result, return_counts=True)
counts[unique_values] = unique_counts
for i in range(1, len(indices) + (0 if max_rep_strict else 1)):
order = np.argsort(x[result, range(len(result))])
result_in_order = result[order]
current_counts = counts.copy()
changed = False
for j in range(len(order)):
value = result_in_order[j]
if current_counts[value] > max_rep:
pos = order[j]
new_value = indices[i % len(indices)][pos]
result[pos] = new_value
current_counts[value] -= 1
counts[value] -= 1
counts[new_value] += 1
changed = True
if not changed:
break
result = result.reshape(shape[len(axes) :])
return np.unravel_index(result, shape[: len(axes)])
class LayerNMF:
def __init__(
self,
model,
layer_name,
obses,
obses_full=None,
features=10,
*,
attr_layer_name=None,
attr_opts={"integrate_steps": 10},
):
self.model = model
self.layer_name = layer_name
self.obses = obses
self.obses_full = obses_full
if self.obses_full is None:
self.obses_full = obses
self.features = features
self.pad_h = 0
self.pad_w = 0
self.padded_obses = self.obses_full
if self.features is None:
self.reducer = None
else:
self.reducer = ChannelReducer(features)
acts = get_acts(model, layer_name, obses)
self.patch_h = self.obses_full.shape[1] / acts.shape[1]
self.patch_w = self.obses_full.shape[2] / acts.shape[2]
if self.reducer is None:
self.acts_reduced = acts
self.channel_dirs = np.eye(self.acts_reduced.shape[-1])
self.transform = lambda acts: acts.copy()
self.inverse_transform = lambda acts: acts.copy()
else:
if attr_layer_name is None:
self.acts_reduced = self.reducer.fit_transform(acts)
else:
attrs = get_attr(model, attr_layer_name, layer_name, obses, **attr_opts)
attrs_signed = np.concatenate(
[np.maximum(0, attrs), np.maximum(0, -attrs)], axis=0
)
self.reducer.fit(attrs_signed)
self.acts_reduced = self.reducer.transform(acts)
self.channel_dirs = self.reducer._reducer.components_
self.transform = lambda acts: self.reducer.transform(acts)
self.inverse_transform = lambda acts_r: ChannelReducer._apply_flat(
self.reducer._reducer.inverse_transform, acts_r
)
def vis_traditional(
self,
feature_list=None,
*,
transforms=[transform.jitter(2)],
l2_coeff=0.0,
l2_layer_name=None,
):
if feature_list is None:
feature_list = list(range(self.acts_reduced.shape[-1]))
try:
feature_list = list(feature_list)
except TypeError:
feature_list = [feature_list]
obj = sum(
[
objectives.direction_neuron(
self.layer_name, self.channel_dirs[feature], batch=feature
)
for feature in feature_list
]
)
if l2_coeff != 0.0:
assert (
l2_layer_name is not None
), "l2_layer_name must be specified if l2_coeff is non-zero"
obj -= objectives.L2(l2_layer_name) * l2_coeff
param_f = lambda: param.image(64, batch=len(feature_list))
return render.render_vis(
self.model, obj, param_f=param_f, transforms=transforms
)[-1]
def pad_obses(self, *, expand_mult=1):
pad_h = np.ceil(self.patch_h * expand_mult).astype(int)
pad_w = np.ceil(self.patch_w * expand_mult).astype(int)
if pad_h > self.pad_h or pad_w > self.pad_w:
self.pad_h = pad_h
self.pad_w = pad_w
self.padded_obses = (
np.indices(
(
self.obses_full.shape[1] + self.pad_h * 2,
self.obses_full.shape[2] + self.pad_w * 2,
)
).sum(axis=0)
% 2
)
self.padded_obses = self.padded_obses * 0.25 + 0.75
self.padded_obses = self.padded_obses.astype(self.obses_full.dtype)
self.padded_obses = self.padded_obses[None, ..., None]
self.padded_obses = self.padded_obses.repeat(
self.obses_full.shape[0], axis=0
)
self.padded_obses = self.padded_obses.repeat(3, axis=-1)
self.padded_obses[
:, self.pad_h : -self.pad_h, self.pad_w : -self.pad_w, :
] = self.obses_full
def get_patch(self, obs_index, pos_h, pos_w, *, expand_mult=1):
left_h = self.pad_h + (pos_h - 0.5 * expand_mult) * self.patch_h
right_h = self.pad_h + (pos_h + 0.5 * expand_mult) * self.patch_h
left_w = self.pad_w + (pos_w - 0.5 * expand_mult) * self.patch_w
right_w = self.pad_w + (pos_w + 0.5 * expand_mult) * self.patch_w
slice_h = slice(int(round(left_h)), int(round(right_h)))
slice_w = slice(int(round(left_w)), int(round(right_w)))
return self.padded_obses[obs_index, slice_h, slice_w]
def vis_dataset(self, feature, *, subdiv_mult=1, expand_mult=1, top_frac=0.1):
acts_h, acts_w = self.acts_reduced.shape[1:3]
zoom_h = subdiv_mult - (subdiv_mult - 1) / (acts_h + 2)
zoom_w = subdiv_mult - (subdiv_mult - 1) / (acts_w + 2)
acts_subdiv = self.acts_reduced[..., feature]
acts_subdiv = np.pad(acts_subdiv, [(0, 0), (1, 1), (1, 1)], mode="edge")
acts_subdiv = nd.zoom(acts_subdiv, [1, zoom_h, zoom_w], order=1, mode="nearest")
acts_subdiv = acts_subdiv[:, 1:-1, 1:-1]
if acts_subdiv.size == 0:
raise RuntimeError(
f"subdiv_mult of {subdiv_mult} too small for "
f"{self.acts_reduced.shape[1]}x{self.acts_reduced.shape[2]} "
"activations"
)
poses = np.indices((acts_h + 2, acts_w + 2)).transpose((1, 2, 0))
poses = nd.zoom(
poses.astype(float), [zoom_h, zoom_w, 1], order=1, mode="nearest"
)
poses = poses[1:-1, 1:-1, :] - 0.5
with np.errstate(divide="ignore"):
max_rep = np.ceil(
np.divide(
acts_subdiv.shape[1] * acts_subdiv.shape[2],
acts_subdiv.shape[0] * top_frac,
)
)
obs_indices = argmax_nd(
acts_subdiv, axes=[0], max_rep=max_rep, max_rep_strict=False
)[0]
self.pad_obses(expand_mult=expand_mult)
patches = []
patch_acts = np.zeros(obs_indices.shape)
for i in range(obs_indices.shape[0]):
patches.append([])
for j in range(obs_indices.shape[1]):
obs_index = obs_indices[i, j]
pos_h, pos_w = poses[i, j]
patch = self.get_patch(obs_index, pos_h, pos_w, expand_mult=expand_mult)
patches[i].append(patch)
patch_acts[i, j] = acts_subdiv[obs_index, i, j]
patch_acts_max = patch_acts.max()
opacities = patch_acts / (1 if patch_acts_max == 0 else patch_acts_max)
for i in range(obs_indices.shape[0]):
for j in range(obs_indices.shape[1]):
opacity = opacities[i, j][None, None, None]
opacity = opacity.repeat(patches[i][j].shape[0], axis=0)
opacity = opacity.repeat(patches[i][j].shape[1], axis=1)
patches[i][j] = np.concatenate([patches[i][j], opacity], axis=-1)
return (
np.concatenate(
[np.concatenate(patches[i], axis=1) for i in range(len(patches))],
axis=0,
),
obs_indices.tolist(),
)
def vis_dataset_thumbnail(
self, feature, *, num_mult=1, expand_mult=1, max_rep=None
):
if max_rep is None:
max_rep = num_mult
if self.acts_reduced.shape[0] < num_mult ** 2:
raise RuntimeError(
f"At least {num_mult ** 2} observations are required to produce"
" a thumbnail visualization."
)
acts_feature = self.acts_reduced[..., feature]
pos_indices = argmax_nd(
acts_feature, axes=[1, 2], max_rep=max_rep, max_rep_strict=True
)
acts_single = acts_feature[
range(acts_feature.shape[0]), pos_indices[0], pos_indices[1]
]
obs_indices = np.argsort(-acts_single, axis=0)[: num_mult ** 2]
coords = np.array(list(zip(*pos_indices)), dtype=[("h", int), ("w", int)])[
obs_indices
]
indices_order = np.argsort(coords, axis=0, order=("h", "w"))
indices_order = indices_order.reshape((num_mult, num_mult))
for i in range(num_mult):
indices_order[i] = indices_order[i][
np.argsort(coords[indices_order[i]], axis=0, order="w")
]
obs_indices = obs_indices[indices_order]
poses = np.array(pos_indices).transpose()[obs_indices] + 0.5
self.pad_obses(expand_mult=expand_mult)
patches = []
patch_acts = np.zeros((num_mult, num_mult))
patch_shapes = []
for i in range(num_mult):
patches.append([])
for j in range(num_mult):
obs_index = obs_indices[i, j]
pos_h, pos_w = poses[i, j]
patch = self.get_patch(obs_index, pos_h, pos_w, expand_mult=expand_mult)
patches[i].append(patch)
patch_acts[i, j] = acts_single[obs_index]
patch_shapes.append(patch.shape)
patch_acts_max = patch_acts.max()
opacities = patch_acts / (1 if patch_acts_max == 0 else patch_acts_max)
patch_min_h = np.array([s[0] for s in patch_shapes]).min()
patch_min_w = np.array([s[1] for s in patch_shapes]).min()
for i in range(num_mult):
for j in range(num_mult):
opacity = opacities[i, j][None, None, None]
opacity = opacity.repeat(patches[i][j].shape[0], axis=0)
opacity = opacity.repeat(patches[i][j].shape[1], axis=1)
patches[i][j] = np.concatenate([patches[i][j], opacity], axis=-1)
patches[i][j] = patches[i][j][:patch_min_h, :patch_min_w]
return (
np.concatenate(
[np.concatenate(patches[i], axis=1) for i in range(len(patches))],
axis=0,
),
obs_indices.tolist(),
)
def rescale_opacity(
images, min_opacity=15 / 255, opaque_frac=0.1, max_scale=10, keep_zeros=False
):
images_orig = images
images = images_orig.copy()
opacities_flat = images[..., 3].reshape(
images.shape[:-3] + (images.shape[-3] * images.shape[-2],)
)
opaque_threshold = np.percentile(opacities_flat, (1 - opaque_frac) * 100, axis=-1)
opaque_threshold = np.maximum(
opaque_threshold, np.amax(opacities_flat, axis=-1) / max_scale
)[..., None, None]
opaque_threshold[opaque_threshold == 0] = 1
images[..., 3] = images[..., 3] * (1 - min_opacity) / opaque_threshold
images[..., 3] = np.minimum(1, min_opacity + images[..., 3])
if keep_zeros:
images[..., 3][images_orig[..., 3] == 0] = 0
return images
| {
"content_hash": "96465271a60a6d50d74147db5e0b7190",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 88,
"avg_line_length": 41.764900662251655,
"alnum_prop": 0.5376991992388805,
"repo_name": "tensorflow/lucid",
"id": "0ff3459d13d0cb721e7600e091e62a663c800378",
"size": "12613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lucid/scratch/rl_util/nmf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "4315"
},
{
"name": "JavaScript",
"bytes": "11689"
},
{
"name": "Jupyter Notebook",
"bytes": "184265269"
},
{
"name": "Python",
"bytes": "478323"
},
{
"name": "Shell",
"bytes": "2438"
}
],
"symlink_target": ""
} |
import os
import signal
import subprocess
from behave import given, when, then
from test.behave_utils import utils
def _run_sql(sql, opts=None):
env = None
if opts is not None:
env = os.environ.copy()
options = ''
for key, value in opts.items():
options += "-c {}={} ".format(key, value)
env['PGOPTIONS'] = options
subprocess.check_call([
"psql",
"postgres",
"-c", sql,
], env=env)
@when('the standby host goes down')
def impl(context):
"""
Fakes a host failure by updating the standby segment entry to point at an
invalid hostname and address.
"""
opts = {
'gp_role': 'utility',
'allow_system_table_mods': 'on',
}
subprocess.check_call(['gpstart', '-am'])
_run_sql("""
UPDATE gp_segment_configuration
SET hostname = 'standby.invalid',
address = 'standby.invalid'
WHERE content = -1 AND role = 'm'
""", opts=opts)
subprocess.check_call(['gpstop', '-am'])
def cleanup(context):
"""
Reverses the above SQL by starting up in master-only utility mode. Since
the standby host is incorrect, a regular gpstart call won't work.
"""
utils.stop_database_if_started(context)
subprocess.check_call(['gpstart', '-am'])
_run_sql("""
UPDATE gp_segment_configuration
SET hostname = master.hostname,
address = master.address
FROM (
SELECT hostname, address
FROM gp_segment_configuration
WHERE content = -1 and role = 'p'
) master
WHERE content = -1 AND role = 'm'
""", opts=opts)
subprocess.check_call(['gpstop', '-am'])
context.add_cleanup(cleanup, context)
def _handle_sigpipe():
"""
Work around https://bugs.python.org/issue1615376, which is not fixed until
Python 3.2. This bug interferes with Bash pipelines that rely on SIGPIPE to
exit cleanly.
"""
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
@when('gpstart is run with prompts accepted')
def impl(context):
"""
Runs `yes | gpstart`.
"""
p = subprocess.Popen(
[ "bash", "-c", "yes | gpstart" ],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=_handle_sigpipe,
)
context.stdout_message, context.stderr_message = p.communicate()
context.ret_code = p.returncode
| {
"content_hash": "5a72b25e91b83d64940daa71bca4f120",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 80,
"avg_line_length": 27.554347826086957,
"alnum_prop": 0.5692307692307692,
"repo_name": "jmcatamney/gpdb",
"id": "47627411dd25bfe5da8f4d149a9e4b5471aa12bb",
"size": "2535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gpMgmt/test/behave/mgmt_utils/steps/gpstart.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3724"
},
{
"name": "Awk",
"bytes": "836"
},
{
"name": "Batchfile",
"bytes": "12854"
},
{
"name": "C",
"bytes": "42498841"
},
{
"name": "C++",
"bytes": "14366259"
},
{
"name": "CMake",
"bytes": "38452"
},
{
"name": "Csound Score",
"bytes": "223"
},
{
"name": "DTrace",
"bytes": "3873"
},
{
"name": "Dockerfile",
"bytes": "11932"
},
{
"name": "Emacs Lisp",
"bytes": "3488"
},
{
"name": "Fortran",
"bytes": "14863"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Gherkin",
"bytes": "335208"
},
{
"name": "HTML",
"bytes": "53484"
},
{
"name": "JavaScript",
"bytes": "23969"
},
{
"name": "Lex",
"bytes": "229556"
},
{
"name": "M4",
"bytes": "111147"
},
{
"name": "Makefile",
"bytes": "496239"
},
{
"name": "Objective-C",
"bytes": "38376"
},
{
"name": "PLpgSQL",
"bytes": "8009512"
},
{
"name": "Perl",
"bytes": "798767"
},
{
"name": "PowerShell",
"bytes": "422"
},
{
"name": "Python",
"bytes": "3000118"
},
{
"name": "Raku",
"bytes": "698"
},
{
"name": "Roff",
"bytes": "32437"
},
{
"name": "Ruby",
"bytes": "77585"
},
{
"name": "SCSS",
"bytes": "339"
},
{
"name": "Shell",
"bytes": "451713"
},
{
"name": "XS",
"bytes": "6983"
},
{
"name": "Yacc",
"bytes": "674092"
},
{
"name": "sed",
"bytes": "1231"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('todo', '0007_auto_20170825_1748'),
]
operations = [
migrations.RenameField(
model_name='item',
old_name='username',
new_name='user',
),
]
| {
"content_hash": "4b328e399c6c54f161dfb2d8b936fc5b",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 44,
"avg_line_length": 19.444444444444443,
"alnum_prop": 0.5657142857142857,
"repo_name": "noah-dev/todo_django",
"id": "5d3665c2f66442537f021178dd09863d827a9ba3",
"size": "423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "todo/migrations/0008_auto_20170825_1752.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "138847"
},
{
"name": "HTML",
"bytes": "120293"
},
{
"name": "JavaScript",
"bytes": "366216"
},
{
"name": "Python",
"bytes": "20795"
}
],
"symlink_target": ""
} |
"""Special argparse module that allows to bypass abbrev mode."""
from __future__ import absolute_import
from argparse import * # noqa
import sys
if sys.version_info < (3, 5):
class ArgumentParser(ArgumentParser): # noqa
def __init__(self, *args, **kwargs):
self.allow_abbrev = kwargs.pop("allow_abbrev", True)
super(ArgumentParser, self).__init__(*args, **kwargs)
def _get_option_tuples(self, option_string):
if self.allow_abbrev:
return super(ArgumentParser, self)._get_option_tuples(
option_string)
return ()
| {
"content_hash": "601b0f666e4e8ad1fc06eb3c9963518d",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 70,
"avg_line_length": 34.5,
"alnum_prop": 0.6038647342995169,
"repo_name": "dtroyer/cliff",
"id": "e48dc796e9e1624f9a928ec64adbefcb17898177",
"size": "1176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cliff/_argparse.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "393"
},
{
"name": "Python",
"bytes": "234798"
},
{
"name": "Shell",
"bytes": "1076"
}
],
"symlink_target": ""
} |
"""
Created by: Lee Bergstrand
Description: Contains an object for representing HMM hits and methods for parsing hmmsearch results into these
objects. Also contains methods filtering these objects by their properties.
Requirements: - This script requires HMMER 3.1 or later.
"""
# Imports & Setup:
import re
import sys
from hashlib import md5
hit_row_regex = re.compile("^\s*\d\s*((\?)|(\!))\s*")
# ========
# Classes:
# ========
class HMMHit(object):
def __init__(self, target_protein, hmm_name, score, e_value, hmm_from, hmm_to, ali_from, ali_to, hmm_length):
self.target_protein = str(target_protein)
self.hmm_name = str(hmm_name)
self.score = float(score)
self.e_value = float(e_value)
self.hmm_from = int(hmm_from)
self.hmm_to = int(hmm_to)
self.hmm_overlap = self.hmm_to - self.hmm_from
self.ali_from = int(ali_from)
self.ali_to = int(ali_to)
self.ali_length = self.ali_to - self.ali_from
self.hmm_coverage = float(self.hmm_overlap) / float(hmm_length)
def __repr__(self):
out_string = """
===========================
Target Protein: %s
HMM Name: %s
Score: %f
e_value: %f
hmm_from: %d
hmm_to: %d
hmm_overlap %d
ali_from: %d
ali_to: %d
ali_length: %d
hmm_coverage: %f
""" % (
self.target_protein, self.hmm_name, self.score, self.e_value, self.hmm_from, self.hmm_to,
self.hmm_overlap, self.ali_from, self.ali_to, self.ali_length, self.hmm_coverage)
return out_string
def __str__(self):
return self.__repr__()
def get_md5(self):
"""
Concatenates all object properties as strings and hashes them using an MD5 checksum.
:return: MD5 checksum of all object properties.
"""
hash_string = "".join([str(x) for x in self.__dict__.values()]) # Join all attributes into a single string.
hash_md5 = md5(hash_string.encode('utf-8')).hexdigest() # Create md5 hash.
return hash_md5
# ==========
# Functions:
# ==========
# ------------------------------------------------------------------------------------------------------------
def parse_hmmsearch_results(hmm_results_string, hmm_name, hmm_length):
"""
Parses HMM searches text output and generates a two dimensional array of the domain alignments results.
:param hmm_results_string: hmmsearch text output as a string.
:param hmm_name: The name of the HMM file.
:param hmm_length: The length of the HMM file.
:return: List of HMM hit objects.
"""
hmm_hit_list = hmm_results_string.split(">>") # Splits output at domain alignments.
del hmm_hit_list[0] # Removed string content above first >> which does not need to be iterated through.
# Removes alignment visualization by spiting on the visualization header
# and keeping all that is above it (first element) which is the HMM domain table.
hmm_hit_list_cleaned = [x.split("Alignments")[0] for x in hmm_hit_list]
hmm_object_list = []
for protein in hmm_hit_list_cleaned:
domain_table = protein.splitlines()
target_protein_name = domain_table[0].split()[0] # Gets target protein accession from domain table header row.
for row in domain_table:
if hit_row_regex.match(row): # If row is a domain table line.
column = row.split()
hmm_hit_object = HMMHit(
target_protein=target_protein_name,
hmm_name=hmm_name,
score=column[2],
e_value=column[5],
hmm_from=column[6],
hmm_to=column[7],
ali_from=column[9],
ali_to=column[10],
hmm_length=hmm_length)
hmm_object_list.append(hmm_hit_object)
return hmm_object_list
# ------------------------------------------------------------------------------------------------------------
def filter_hmm_hit_list(hmm_hit_list, e_value_cutoff="1e-25", hmm_coverage=0.3, max_align_overlap=0.5):
"""
Filters HMM gits by E-Value, Coverage and Overlap between hits.
:param hmm_hit_list: List of HMM hit objects.
:param e_value_cutoff: The E-Value cutoff for hits.
:param hmm_coverage: The HMM coverage cutoff for hits.
:param max_align_overlap: The maximum overlap percentage between overlapping HMM hits.
:return: List of filtered HMM hit objects.
"""
hmm_hit_list = [hit for hit in hmm_hit_list if hit.e_value < float(e_value_cutoff)] # Filters hits by E-value.
i = 0
while i < (len(hmm_hit_list) - 1):
hit_one = hmm_hit_list[i] # Current Row in hit table.
hit_two = hmm_hit_list[i + 1] # Row below.
if hit_one.target_protein == hit_two.target_protein:
overlap_between_hits = hit_one.ali_to - hit_two.ali_from
if overlap_between_hits > 0:
# If the overlap is greater than 50% of either alignment.
if ((float(overlap_between_hits) / float(hit_one.ali_length)) > max_align_overlap) or (
(float(overlap_between_hits) / float(hit_two.ali_length)) > max_align_overlap):
if hit_one.e_value < hit_two.e_value:
hmm_hit_list.remove(hit_two)
else:
hmm_hit_list.remove(hit_one)
i -= 1 # Resets list index.
i += 1
hmm_hit_list = [hit for hit in hmm_hit_list if hit.hmm_coverage > hmm_coverage] # Filters by Query Coverage.
return hmm_hit_list
# ------------------------------------------------------------------------------------------------------------
def get_hmm_length(hmm_path):
"""
Gets HMM Length.
:param hmm_path: path to the HMM file.
:return: The length of the hmm.
"""
try:
hmm_length_regex = re.compile("^LENG\s*\d*$")
print(">> Opening HMM file: " + hmm_path)
with open(hmm_path, "rU") as inFile:
current_line = inFile.readline()
while not hmm_length_regex.match(current_line):
current_line = inFile.readline()
hmm_length = int(current_line.split()[1])
except IOError:
print("Failed to open " + hmm_path)
sys.exit(1)
return hmm_length
| {
"content_hash": "3fecef4253e3c4b21733ab03b9b30023",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 113,
"avg_line_length": 33.27906976744186,
"alnum_prop": 0.6257861635220126,
"repo_name": "LeeBergstrand/HMMER-DB",
"id": "4bc92a331b74b386d37bbd1e93fdba9141c82131",
"size": "5747",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hmm_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLSQL",
"bytes": "2493"
},
{
"name": "Python",
"bytes": "31892"
},
{
"name": "R",
"bytes": "14145"
},
{
"name": "Shell",
"bytes": "812"
}
],
"symlink_target": ""
} |
'''
feature selection STRATEGY is implemented here
inputs : X, y, feature ranking vector, indices of the features accroding to the ranking
Author : Akhil P M
'''
def select_features(X, importances, indices):
#STRATEGY : select features having importance greater than some threshold
need = (importances > 0.00045)
new_indx = indices[need]
#print new_indx
newX = X[:, new_indx]
return newX
def norm_values(norms, importances, indices):
''' extracts the norm values of selected features powered by their n value'''
need = (importances > 0.0005)
new_indx = indices[need]
return norms[new_indx]
| {
"content_hash": "ed26b446128843d6b0251d9459fd1176",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 87,
"avg_line_length": 20.433333333333334,
"alnum_prop": 0.7259380097879282,
"repo_name": "akhilpm/Masters-Project",
"id": "7c111800aea813217e68cbe3f70370dfb51aaa4e",
"size": "613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kpcaWithTreeFS/mnistBackRandom/feat_select.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3311"
},
{
"name": "Python",
"bytes": "224741"
},
{
"name": "Shell",
"bytes": "133"
},
{
"name": "TeX",
"bytes": "23696"
}
],
"symlink_target": ""
} |
import copy
import datetime
import mock
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models
from mistral import exceptions as exc
from mistral.tests.unit.api import base
from mistral import utils
WF_DEFINITION = """
---
version: '2.0'
flow:
type: direct
input:
- param1
tasks:
task1:
action: std.echo output="Hi"
"""
WF_DB = models.WorkflowDefinition(
id='123e4567-e89b-12d3-a456-426655440000',
name='flow',
definition=WF_DEFINITION,
created_at=datetime.datetime(1970, 1, 1),
updated_at=datetime.datetime(1970, 1, 1),
spec={'input': ['param1']}
)
WF = {
'id': '123e4567-e89b-12d3-a456-426655440000',
'name': 'flow',
'definition': WF_DEFINITION,
'created_at': '1970-01-01 00:00:00',
'updated_at': '1970-01-01 00:00:00',
'input': 'param1'
}
WF_DEFINITION_WITH_INPUT = """
---
version: '2.0'
flow:
type: direct
input:
- param1
- param2: 2
tasks:
task1:
action: std.echo output="Hi"
"""
WF_DB_WITH_INPUT = models.WorkflowDefinition(
name='flow',
definition=WF_DEFINITION_WITH_INPUT,
created_at=datetime.datetime(1970, 1, 1),
updated_at=datetime.datetime(1970, 1, 1),
spec={'input': ['param1', {'param2': 2}]}
)
WF_WITH_DEFAULT_INPUT = {
'name': 'flow',
'definition': WF_DEFINITION_WITH_INPUT,
'created_at': '1970-01-01 00:00:00',
'updated_at': '1970-01-01 00:00:00',
'input': 'param1, param2=2'
}
UPDATED_WF_DEFINITION = """
---
version: '2.0'
flow:
type: direct
input:
- param1
- param2
tasks:
task1:
action: std.echo output="Hi"
"""
UPDATED_WF_DB = copy.copy(WF_DB)
UPDATED_WF_DB['definition'] = UPDATED_WF_DEFINITION
UPDATED_WF = copy.copy(WF)
UPDATED_WF['definition'] = UPDATED_WF_DEFINITION
WF_DEF_INVALID_MODEL_EXCEPTION = """
---
version: '2.0'
flow:
type: direct
tasks:
task1:
action: std.echo output="Hi"
workflow: wf1
"""
WF_DEF_DSL_PARSE_EXCEPTION = """
---
%
"""
WF_DEF_YAQL_PARSE_EXCEPTION = """
---
version: '2.0'
flow:
type: direct
tasks:
task1:
action: std.echo output=<% * %>
"""
MOCK_WF = mock.MagicMock(return_value=WF_DB)
MOCK_WF_WITH_INPUT = mock.MagicMock(return_value=WF_DB_WITH_INPUT)
MOCK_WFS = mock.MagicMock(return_value=[WF_DB])
MOCK_UPDATED_WF = mock.MagicMock(return_value=UPDATED_WF_DB)
MOCK_DELETE = mock.MagicMock(return_value=None)
MOCK_EMPTY = mock.MagicMock(return_value=[])
MOCK_NOT_FOUND = mock.MagicMock(side_effect=exc.NotFoundException())
MOCK_DUPLICATE = mock.MagicMock(side_effect=exc.DBDuplicateEntryException())
class TestWorkflowsController(base.FunctionalTest):
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
def test_get(self):
resp = self.app.get('/v2/workflows/123')
self.assertEqual(resp.status_int, 200)
self.assertDictEqual(WF, resp.json)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF_WITH_INPUT)
def test_get_with_input(self):
resp = self.app.get('/v2/workflows/123')
self.maxDiff = None
self.assertEqual(resp.status_int, 200)
self.assertDictEqual(WF_WITH_DEFAULT_INPUT, resp.json)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_NOT_FOUND)
def test_get_not_found(self):
resp = self.app.get('/v2/workflows/123', expect_errors=True)
self.assertEqual(resp.status_int, 404)
@mock.patch.object(
db_api, "update_workflow_definition", MOCK_UPDATED_WF
)
def test_put(self):
resp = self.app.put(
'/v2/workflows',
UPDATED_WF_DEFINITION,
headers={'Content-Type': 'text/plain'}
)
self.maxDiff = None
self.assertEqual(resp.status_int, 200)
self.assertDictEqual({'workflows': [UPDATED_WF]}, resp.json)
@mock.patch.object(
db_api, "update_workflow_definition", MOCK_WF_WITH_INPUT
)
def test_put_with_input(self):
resp = self.app.put(
'/v2/workflows',
WF_DEFINITION_WITH_INPUT,
headers={'Content-Type': 'text/plain'}
)
self.maxDiff = None
self.assertEqual(resp.status_int, 200)
self.assertDictEqual({'workflows': [WF_WITH_DEFAULT_INPUT]}, resp.json)
@mock.patch.object(
db_api, "update_workflow_definition", MOCK_NOT_FOUND
)
def test_put_not_found(self):
resp = self.app.put(
'/v2/workflows',
UPDATED_WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True,
)
self.assertEqual(resp.status_int, 404)
def test_put_invalid(self):
resp = self.app.put(
'/v2/workflows',
WF_DEF_INVALID_MODEL_EXCEPTION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn("Invalid DSL", resp.body)
@mock.patch.object(db_api, "create_workflow_definition")
def test_post(self, mock_mtd):
mock_mtd.return_value = WF_DB
resp = self.app.post(
'/v2/workflows',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'}
)
self.assertEqual(resp.status_int, 201)
self.assertDictEqual({'workflows': [WF]}, resp.json)
self.assertEqual(1, mock_mtd.call_count)
spec = mock_mtd.call_args[0][0]['spec']
self.assertIsNotNone(spec)
self.assertEqual(WF_DB.name, spec['name'])
@mock.patch.object(db_api, "create_workflow_definition", MOCK_DUPLICATE)
def test_post_dup(self):
resp = self.app.post(
'/v2/workflows',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 409)
def test_post_invalid(self):
resp = self.app.post(
'/v2/workflows',
WF_DEF_INVALID_MODEL_EXCEPTION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn("Invalid DSL", resp.body)
@mock.patch.object(db_api, "delete_workflow_definition", MOCK_DELETE)
def test_delete(self):
resp = self.app.delete('/v2/workflows/123')
self.assertEqual(resp.status_int, 204)
@mock.patch.object(db_api, "delete_workflow_definition", MOCK_NOT_FOUND)
def test_delete_not_found(self):
resp = self.app.delete('/v2/workflows/123', expect_errors=True)
self.assertEqual(resp.status_int, 404)
@mock.patch.object(db_api, "get_workflow_definitions", MOCK_WFS)
def test_get_all(self):
resp = self.app.get('/v2/workflows')
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(resp.json['workflows']), 1)
self.assertDictEqual(WF, resp.json['workflows'][0])
@mock.patch.object(db_api, "get_workflow_definitions", MOCK_EMPTY)
def test_get_all_empty(self):
resp = self.app.get('/v2/workflows')
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(resp.json['workflows']), 0)
@mock.patch.object(db_api, "get_workflow_definitions", MOCK_WFS)
def test_get_all_pagination(self):
resp = self.app.get(
'/v2/workflows?limit=1&sort_keys=id,name')
self.assertEqual(resp.status_int, 200)
self.assertIn('next', resp.json)
self.assertEqual(len(resp.json['workflows']), 1)
self.assertDictEqual(WF, resp.json['workflows'][0])
param_dict = utils.get_dict_from_string(
resp.json['next'].split('?')[1],
delimiter='&'
)
expected_dict = {
'marker': '123e4567-e89b-12d3-a456-426655440000',
'limit': 1,
'sort_keys': 'id,name',
'sort_dirs': 'asc,asc',
}
self.assertDictEqual(expected_dict, param_dict)
def test_get_all_pagination_limit_negative(self):
resp = self.app.get(
'/v2/workflows?limit=-1&sort_keys=id,name&sort_dirs=asc,asc',
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn("Limit must be positive", resp.body)
def test_get_all_pagination_limit_not_integer(self):
resp = self.app.get(
'/v2/workflows?limit=1.1&sort_keys=id,name&sort_dirs=asc,asc',
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn("unable to convert to int", resp.body)
def test_get_all_pagination_invalid_sort_dirs_length(self):
resp = self.app.get(
'/v2/workflows?limit=1&sort_keys=id,name&sort_dirs=asc,asc,asc',
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn(
"Length of sort_keys must be equal or greater than sort_dirs",
resp.body
)
def test_get_all_pagination_unknown_direction(self):
resp = self.app.get(
'/v2/workflows?limit=1&sort_keys=id&sort_dirs=nonexist',
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn("Unknown sort direction", resp.body)
@mock.patch('mistral.db.v2.api.get_workflow_definitions')
def test_get_all_with_fields_filter(self, mock_get_db_wfs):
mock_get_db_wfs.return_value = [
('123e4567-e89b-12d3-a456-426655440000', 'fake_name')
]
resp = self.app.get('/v2/workflows?fields=name')
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(resp.json['workflows']), 1)
expected_dict = {
'id': '123e4567-e89b-12d3-a456-426655440000',
'name': 'fake_name'
}
self.assertDictEqual(expected_dict, resp.json['workflows'][0])
def test_get_all_with_invalid_field(self):
resp = self.app.get(
'/v2/workflows?fields=name,nonexist',
expect_errors=True
)
self.assertEqual(resp.status_int, 400)
self.assertIn(
"nonexist are invalid",
resp.body
)
def test_validate(self):
resp = self.app.post(
'/v2/workflows/validate',
WF_DEFINITION,
headers={'Content-Type': 'text/plain'}
)
self.assertEqual(resp.status_int, 200)
self.assertTrue(resp.json['valid'])
def test_validate_invalid_model_exception(self):
resp = self.app.post(
'/v2/workflows/validate',
WF_DEF_INVALID_MODEL_EXCEPTION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 200)
self.assertFalse(resp.json['valid'])
self.assertIn("Invalid DSL", resp.json['error'])
def test_validate_dsl_parse_exception(self):
resp = self.app.post(
'/v2/workflows/validate',
WF_DEF_DSL_PARSE_EXCEPTION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 200)
self.assertFalse(resp.json['valid'])
self.assertIn("Definition could not be parsed", resp.json['error'])
def test_validate_yaql_parse_exception(self):
resp = self.app.post(
'/v2/workflows/validate',
WF_DEF_YAQL_PARSE_EXCEPTION,
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 200)
self.assertFalse(resp.json['valid'])
self.assertIn("unexpected '*' at position 1",
resp.json['error'])
def test_validate_empty(self):
resp = self.app.post(
'/v2/workflows/validate',
'',
headers={'Content-Type': 'text/plain'},
expect_errors=True
)
self.assertEqual(resp.status_int, 200)
self.assertFalse(resp.json['valid'])
self.assertIn("Invalid DSL", resp.json['error'])
| {
"content_hash": "482fa63f7b60cd7e04938566e08151a9",
"timestamp": "",
"source": "github",
"line_count": 439,
"max_line_length": 79,
"avg_line_length": 27.624145785876994,
"alnum_prop": 0.5982518347489074,
"repo_name": "dennybaa/mistral",
"id": "66fdd7fe32fab5e7aa1a3dbfc232b1084f37b937",
"size": "12771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mistral/tests/unit/api/v2/test_workflows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "951"
},
{
"name": "Python",
"bytes": "1037769"
},
{
"name": "Shell",
"bytes": "18657"
}
],
"symlink_target": ""
} |
import array
import zipfile
import numpy as np
import scipy.sparse as sp
from binge.data import _common
URL_100K = ('https://github.com/maciejkula/'
'lightfm_datasets/releases/'
'download/v0.1.0/movielens.zip')
URL_1M = ('http://files.grouplens.org/datasets/movielens/ml-1m.zip')
def _read_raw_data_100k(path):
"""
Return the raw lines of the train and test files.
"""
with zipfile.ZipFile(path) as datafile:
return datafile.read('ml-100k/u.data').decode().split('\n')
def _read_raw_data_1M(path):
with zipfile.ZipFile(path) as datafile:
return datafile.read('ml-1m/ratings.dat').decode().split('\n')
def _parse(data, separator='\t'):
for line in data:
if not line:
continue
uid, iid, rating, timestamp = [x for x in line.split(separator)]
yield int(uid), int(iid), float(rating), int(timestamp)
def _load_data(data, separator='\t'):
user_dict = {}
item_dict = {}
uids = array.array('i')
iids = array.array('i')
timestamps = array.array('i')
for uid, iid, rating, timestamp in _parse(data, separator):
uids.append(user_dict.setdefault(uid,
len(user_dict)))
iids.append(item_dict.setdefault(iid,
len(item_dict)))
timestamps.append(timestamp)
return (np.array(uids),
np.array(iids),
np.array(timestamps))
def fetch_movielens_100k(data_home=None, download_if_missing=True, random_seed=None):
"""
Fetch the `Movielens 100k dataset <http://grouplens.org/datasets/movielens/100k/>`_.
The dataset contains 100,000 interactions from 1000 users on 1700 movies,
and is exhaustively described in its
`README <http://files.grouplens.org/datasets/movielens/ml-100k-README.txt>`_.
Parameters
----------
data_home: path, optional
Path to the directory in which the downloaded data should be placed.
Defaults to ``~/lightfm_data/``.
download_if_missing: bool, optional
Download the data if not present. Raises an IOError if False and data is missing.
Notes
-----
The return value is a dictionary containing the following keys:
Returns
-------
"""
random_state = np.random.RandomState(random_seed)
validation_percentage = 0.1
test_percentage = 0.1
zip_path = _common.get_data(data_home,
('https://github.com/maciejkula/'
'lightfm_datasets/releases/'
'download/v0.1.0/movielens.zip'),
'movielens100k',
'movielens.zip',
download_if_missing)
# Load raw data
raw = _read_raw_data_100k(zip_path)
uids, iids, timestamps = _load_data(raw)
num_users = uids.max() + 1
num_items = iids.max() + 1
sort_indices = np.arange(len(timestamps))
random_state.shuffle(sort_indices)
uids = uids[sort_indices]
iids = iids[sort_indices]
timestamps = timestamps[sort_indices]
validation_index = int(validation_percentage * len(uids))
test_index = int((validation_percentage + test_percentage) * len(uids))
validation = sp.coo_matrix((np.ones_like(uids[:validation_index]),
(uids[:validation_index],
iids[:validation_index])),
shape=(num_users, num_items))
test = sp.coo_matrix((np.ones_like(uids[validation_index:test_index]),
(uids[validation_index:test_index],
iids[validation_index:test_index])),
shape=(num_users, num_items))
train = sp.coo_matrix((np.ones_like(uids[test_index:]),
(uids[test_index:],
iids[test_index:])),
shape=(num_users, num_items))
return train, test, validation
def fetch_movielens_1M(data_home=None, download_if_missing=True, random_seed=None):
"""
Parameters
----------
data_home: path, optional
Path to the directory in which the downloaded data should be placed.
Defaults to ``~/lightfm_data/``.
download_if_missing: bool, optional
Download the data if not present. Raises an IOError if False and data is missing.
Notes
-----
The return value is a dictionary containing the following keys:
Returns
-------
"""
random_state = np.random.RandomState(random_seed)
validation_percentage = 0.1
test_percentage = 0.1
zip_path = _common.get_data(data_home,
URL_1M,
'movielens1M',
'movielens.zip',
download_if_missing)
# Load raw data
raw = _read_raw_data_1M(zip_path)
uids, iids, timestamps = _load_data(raw, '::')
num_users = uids.max() + 1
num_items = iids.max() + 1
sort_indices = np.arange(len(timestamps))
random_state.shuffle(sort_indices)
uids = uids[sort_indices]
iids = iids[sort_indices]
timestamps = timestamps[sort_indices]
validation_index = int(validation_percentage * len(uids))
test_index = int((validation_percentage + test_percentage) * len(uids))
validation = sp.coo_matrix((np.ones_like(uids[:validation_index]),
(uids[:validation_index],
iids[:validation_index])),
shape=(num_users, num_items))
test = sp.coo_matrix((np.ones_like(uids[validation_index:test_index]),
(uids[validation_index:test_index],
iids[validation_index:test_index])),
shape=(num_users, num_items))
train = sp.coo_matrix((np.ones_like(uids[test_index:]),
(uids[test_index:],
iids[test_index:])),
shape=(num_users, num_items))
return train, test, validation
| {
"content_hash": "2967cfb77ee2da17851dd60c966ac845",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 89,
"avg_line_length": 30.72906403940887,
"alnum_prop": 0.5564283424174414,
"repo_name": "maciejkula/binge",
"id": "d95fea3d25f7d77fd7d6f2f12e0d204d593624e5",
"size": "6238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "binge/data/movielens.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "4259"
},
{
"name": "C++",
"bytes": "13845"
},
{
"name": "Makefile",
"bytes": "1034"
},
{
"name": "Python",
"bytes": "55572"
},
{
"name": "TeX",
"bytes": "105547"
}
],
"symlink_target": ""
} |
'''
@author: FangSun
'''
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import hot_plug_common
import zstackwoodpecker.operations.vm_operations as vm_ops
import time
_config_ = {
'timeout': 1000,
'noparallel' : False
}
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
vm = hot_plug_common.create_onlinechange_vm(test_stub, test_obj_dict)
(available_cpu_before, available_memory_before, vm_outer_cpu_before, vm_outer_mem_before,
vm_interal_cpu_before, vm_interal_mem_before) = hot_plug_common.check_cpu_mem(vm)
vm_instance_offering = test_lib.lib_get_instance_offering_by_uuid(vm.get_vm().instanceOfferingUuid)
vm_ops.update_vm(vm.get_vm().uuid, vm_instance_offering.cpuNum + 1, None)
vm.update()
time.sleep(10)
(available_cpu_after, available_memory_after, vm_outer_cpu_after, vm_outer_mem_after,
vm_interal_cpu_after, vm_internal_mem_after) = hot_plug_common.check_cpu_mem(vm)
MEM1G = 1024*1024*1024
assert available_cpu_before == available_cpu_after
assert available_memory_before == available_memory_after + MEM1G
assert vm_outer_cpu_before == vm_outer_cpu_after
assert vm_outer_mem_before == vm_outer_mem_after - MEM1G
assert vm_interal_cpu_before == vm_interal_cpu_after
assert vm_interal_mem_before == vm_internal_mem_after - MEM1G
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict) | {
"content_hash": "92a9941634e28031eca21e59167f0acd",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 103,
"avg_line_length": 35.642857142857146,
"alnum_prop": 0.7120908483633934,
"repo_name": "zstackio/zstack-woodpecker",
"id": "ef0e7ad22279657cc382e4fa41f6c17f10867fff",
"size": "1497",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/virt_plus/Hot_plug/test_hotplugin_memory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.registration_is_open'
db.add_column(u'sked_event', 'registration_is_open',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'Event.registration_url'
db.add_column(u'sked_event', 'registration_url',
self.gf('django.db.models.fields.URLField')(default='', max_length=200, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.registration_is_open'
db.delete_column(u'sked_event', 'registration_is_open')
# Deleting field 'Event.registration_url'
db.delete_column(u'sked_event', 'registration_url')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sked.event': {
'Meta': {'ordering': "('-start_date',)", 'object_name': 'Event'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sked_events'", 'to': u"orm['auth.User']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'event'", 'max_length': '64'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'registration_is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registration_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'session_label': ('django.db.models.fields.CharField', [], {'default': "'session'", 'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'sked.session': {
'Meta': {'ordering': "('start_time',)", 'unique_together': "(('event', 'slug'),)", 'object_name': 'Session'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessions'", 'to': u"orm['sked.Event']"}),
'extra_data': ('jsonfield.fields.JSONField', [], {'default': "'{}'", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'published_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'approved_sked_sessions'", 'to': u"orm['auth.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'speakers': ('jsonfield.fields.JSONField', [], {'default': "'[]'"}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['sked'] | {
"content_hash": "05cc9839217db3df1b11e152c380ec72",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 187,
"avg_line_length": 71.56140350877193,
"alnum_prop": 0.5519735229222849,
"repo_name": "sunlightlabs/tcamp",
"id": "1c59da304c69bebfc7e1f0fd3167da8ba738c8ce",
"size": "8182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tcamp/sked/migrations/0002_auto__add_field_event_registration_is_open__add_field_event_registrati.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "191488"
},
{
"name": "HTML",
"bytes": "832187"
},
{
"name": "JavaScript",
"bytes": "86789"
},
{
"name": "Python",
"bytes": "703083"
},
{
"name": "Shell",
"bytes": "623"
}
],
"symlink_target": ""
} |
"""Create accounts needed on this GCE instance.
Creates accounts based on the contents of ACCOUNTS_URL, which should contain a
newline-delimited file of accounts and SSH public keys. Each line represents a
SSH public key which should be allowed to log in to that account.
If the account does not already exist on the system, it is created and added
to /etc/sudoers to allow that account to administer the machine without needing
a password.
"""
import errno
import grp
import logging
import os
import pwd
import re
import stat
import tempfile
import time
def EnsureTrailingNewline(line):
if line.endswith('\n'):
return line
return line + '\n'
def IsUserSudoerInLines(user, sudoer_lines):
"""Return whether the user has an entry in the sudoer lines."""
def IsUserSudoerEntry(line):
return re.match(r'^%s\s+' % user, line)
return filter(IsUserSudoerEntry, sudoer_lines)
class Accounts(object):
"""Manage accounts on a machine."""
# Comes from IEEE Std 1003.1-2001. Characters from the portable
# filename character set. The hyphen should not be the first char
# of a portable user name.
VALID_USERNAME_CHARS = set(
map(chr, range(ord('A'), ord('Z') + 1)) +
map(chr, range(ord('a'), ord('z') + 1)) +
map(chr, range(ord('0'), ord('9') + 1)) +
['_', '-', '.'])
def __init__(self, grp_module=grp, os_module=os,
pwd_module=pwd, system_module=None,
urllib2_module=None, time_module=time):
"""Construct an Accounts given the module injections."""
self.system_module = system_module
self.grp = grp_module
self.os = os_module
self.pwd = pwd_module
self.system = system_module
self.time_module = time_module
self.urllib2 = urllib2_module
self.default_user_groups = self.GroupsThatExist(
['adm', 'video', 'dip', 'plugdev'])
def CreateUser(self, username, ssh_keys):
"""Create username on the system, with authorized ssh_keys."""
if not self.IsValidUsername(username):
logging.warning(
'Not creating account for user %s. Usernames must comprise'
' characters [A-Za-z0-9._-] and not start with \'-\'.', username)
return
if not self.UserExists(username):
self.system.UserAdd(username, self.default_user_groups)
if self.UserExists(username):
self.MakeUserSudoer(username)
self.AuthorizeSshKeys(username, ssh_keys)
def IsValidUsername(self, username):
"""Return whether username looks like a valid user name."""
def InvalidCharacterFilter(c):
return c not in Accounts.VALID_USERNAME_CHARS
if filter(InvalidCharacterFilter, username):
# There's an invalid character in it.
return False
if username.startswith('-'):
return False
return True
def GroupsThatExist(self, groups_list):
"""Return all the groups in groups_list that exist on the machine."""
def GroupExists(group):
try:
self.grp.getgrnam(group)
return True
except KeyError:
return False
return filter(GroupExists, groups_list)
def GetUserInfo(self, user):
"""Return a tuple of the user's (home_dir, pid, gid)."""
pwent = self.pwd.getpwnam(user)
return (pwent.pw_dir, pwent.pw_uid, pwent.pw_gid)
def UserExists(self, user):
"""Test whether a given user exists or not."""
try:
self.pwd.getpwnam(user)
return True
except KeyError:
return False
def LockSudoers(self):
"""Create an advisory lock on /etc/sudoers.tmp.
Returns:
True if successful, False if not.
"""
try:
f = self.os.open('/etc/sudoers.tmp', os.O_EXCL|os.O_CREAT)
self.os.close(f)
return True
except OSError as e:
if e.errno == errno.EEXIST:
logging.warning('/etc/sudoers.tmp lock file already exists')
else:
logging.warning('Could not create /etc/sudoers.tmp lock file: %s', e)
return False
def UnlockSudoers(self):
"""Remove the advisory lock on /etc/sudoers.tmp."""
try:
self.os.unlink('/etc/sudoers.tmp')
return True
except OSError as e:
if e.errno == errno.ENOENT:
return True
logging.warning('Could not remove /etc/sudoers.tmp: %s', e)
return False
def MakeUserSudoer(self, user):
"""Add user to the sudoers file."""
# If the user has no sudoers file, don't add an entry.
if not self.os.path.isfile('/etc/sudoers'):
logging.info('Did not grant admin access to %s. /etc/sudoers not found.',
user)
return
with self.system.OpenFile('/etc/sudoers', 'r') as sudoer_f:
sudoer_lines = sudoer_f.readlines()
if IsUserSudoerInLines(user, sudoer_lines):
# User is already sudoer. Done. We don't have to check for a lock
# file.
return
# Lock sudoers.
if not self.LockSudoers():
logging.warning('Did not grant admin access to %s. /etc/sudoers locked.',
user)
return
try:
# First read in the sudoers file (this time under the lock).
with self.system.OpenFile('/etc/sudoers', 'r') as sudoer_f:
sudoer_lines = sudoer_f.readlines()
if IsUserSudoerInLines(user, sudoer_lines):
# User is already sudoer. Done.
return
# Create a temporary sudoers file with the contents we want.
sudoer_lines.append('%s ALL=NOPASSWD: ALL' % user)
sudoer_lines = [EnsureTrailingNewline(line) for line in sudoer_lines]
(tmp_sudoers_fd, tmp_sudoers_fname) = tempfile.mkstemp()
with self.os.fdopen(tmp_sudoers_fd, 'w+') as tmp_sudoer_f:
# Put the old lines.
tmp_sudoer_f.writelines(sudoer_lines)
tmp_sudoer_f.seek(0)
try:
# Validate our result.
if not self.system.IsValidSudoersFile(tmp_sudoers_fname):
logging.warning(
'Did not grant admin access to %s. Sudoers was invalid.', user)
return
self.os.chmod('/etc/sudoers', 0640)
with self.system.OpenFile('/etc/sudoers', 'w') as sudoer_f:
sudoer_f.writelines(sudoer_lines)
# Make sure we're still 0640.
self.os.fchmod(sudoer_f.fileno(), stat.S_IWUSR | 0640)
try:
self.os.fchmod(sudoer_f.fileno(), 0440)
except (IOError, OSError) as e:
logging.warning('Could not restore perms to /etc/sudoers: %s', e)
finally:
# Clean up the temp file.
try:
self.os.unlink(tmp_sudoers_fname)
except (IOError, OSError) as e:
pass
except (IOError, OSError) as e:
logging.warning('Could not grant %s admin access: %s', user, e)
finally:
self.UnlockSudoers()
def AuthorizeSshKeys(self, user, ssh_keys):
"""Add ssh_keys to the user's ssh authorized_keys.gce file."""
(home_dir, uid, gid) = self.GetUserInfo(user)
ssh_dir = os.path.join(home_dir, '.ssh')
if not self.os.path.isdir(ssh_dir):
# Create a user's ssh directory, with u+rwx as the only permissions.
# There's proper handling and logging of OSError within EnsureDir(),
# so neither of these calls needs th handle that.
if not self.EnsureHomeDir(home_dir, uid, gid):
return False
if not self.EnsureDir(ssh_dir, uid, gid, 0700):
return False
# Not all sshd's support mulitple authorized_keys files. We have to
# share one with the user. We add our entries as follows:
# # Added by Google
# authorized_key_entry
authorized_keys_file = os.path.join(ssh_dir, 'authorized_keys')
try:
self.WriteAuthorizedSshKeysFile(authorized_keys_file, ssh_keys, uid, gid)
except IOError as e:
logging.warning('Could not update %s due to %s', authorized_keys_file, e)
def SetSELinuxContext(self, path):
"""Set the appropriate SELinux context, if SELinux tools are installed.
Calls /sbin/restorecon on the provided path to set the SELinux context as
specified by policy. This call does not operate recursively.
Only some OS configurations use SELinux. It is therefore acceptable for
restorecon to be missing, in which case we do nothing.
Arguments:
path: The path on which to fix the SELinux context.
Returns:
True if successful or if restorecon is missing, False in case of error.
"""
if self.system.IsExecutable('/sbin/restorecon'):
result = self.system.RunCommand(['/sbin/restorecon', path])
if self.system.RunCommandFailed(result):
logging.error('Unable to set SELinux context for %s', path)
return False
else:
return True
else:
logging.debug('restorecon missing; not setting SELinux context for %s',
path)
return True
def EnsureHomeDir(self, home_dir, uid, gid):
"""Make sure user's home directory exists.
Create the directory and its ancestor directories if necessary.
No changes are made to the ownership or permissions of a directory which
already exists.
Arguments:
home_dir: The path to the home directory.
uid: user ID to own the home dir.
gid: group ID to own the home dir.
Returns:
True if successful, False if not.
"""
if self.os.path.isdir(home_dir):
return True
# Use root as owner when creating ancestor directories.
if not self.EnsureDir(home_dir, 0, 0, 0755):
return False
self.os.chown(home_dir, uid, gid)
return True
def EnsureDir(self, dir_path, uid, gid, mode):
"""Make sure the specified directory exists.
If dir doesn't exist, create it and its ancestor directories, if necessary.
No changes are made to the ownership or permissions of a directory which
already exists.
Arguments:
dir_path: The path to the dir.
uid: user ID of the owner.
gid: group ID of the owner.
mode: Permissions for the dir, as an integer (e.g. 0755).
Returns:
True if successful, False if not.
"""
if self.os.path.isdir(dir_path):
return True # We are done
parent_dir = self.os.path.dirname(dir_path)
if not parent_dir == dir_path:
if not self.EnsureDir(parent_dir, uid, gid, 0755):
return False
try:
self.os.mkdir(dir_path, mode)
self.os.chown(dir_path, uid, gid)
self.SetSELinuxContext(dir_path)
except OSError as e:
if self.os.path.isdir(dir_path):
logging.warning('Could not prepare %s: %s', dir_path, e)
return True
logging.error('Could not create %s: %s', dir_path, e)
return False
return True
def WriteAuthorizedSshKeysFile(
self, authorized_keys_file, ssh_keys, uid, gid):
"""Update the authorized_keys_file to contain the given ssh_keys.
Arguments:
authorized_keys_file: The name of the authorized keys file.
ssh_keys: The google added ssh keys for the file.
uid: The uid for the user.
gid: The gid for the user.
"""
# Create a temp file to store the new keys.
with self.system.CreateTempFile(delete=False) as keys_file:
new_keys_path = keys_file.name
# Read all the ssh keys in the original key file if it exists.
if self.os.path.exists(authorized_keys_file):
with self.system.OpenFile(authorized_keys_file, 'r') as original_keys:
original_keys.seek(0)
lines = original_keys.readlines()
else:
lines = []
# Pull out the # Added by Google lines.
google_added_ixs = [i for i in range(len(lines) - 1) if
lines[i].startswith('# Added by Google')]
google_added_ixs += [i + 1 for i in google_added_ixs]
user_lines = [
lines[i] for i in range(len(lines)) if i not in google_added_ixs]
# First write user's entries.
for user_line in user_lines:
keys_file.write(EnsureTrailingNewline(user_line))
# Put google entries at the end, each preceeded by Added by Google.
for ssh_key in ssh_keys:
keys_file.write('# Added by Google\n')
keys_file.write(EnsureTrailingNewline(ssh_key))
# Override the old authorized keys file with the new one.
self.system.MoveFile(new_keys_path, authorized_keys_file)
# Make sure the authorized_keys_file has the right perms (u+rw).
self.os.chmod(authorized_keys_file, 0600)
self.os.chown(authorized_keys_file, uid, gid)
# Set SELinux context, if applicable to this system
self.SetSELinuxContext(authorized_keys_file)
| {
"content_hash": "aa1fa9f2f1296e946c036474b46b9ef9",
"timestamp": "",
"source": "github",
"line_count": 383,
"max_line_length": 79,
"avg_line_length": 32.59791122715405,
"alnum_prop": 0.6470164197036443,
"repo_name": "coreos/compute-image-packages",
"id": "6f970c41e303ab3fdc8e1a336bc7a672a09662dc",
"size": "13096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "google-daemon/usr/share/google/google_daemon/accounts.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "168786"
},
{
"name": "Shell",
"bytes": "54236"
}
],
"symlink_target": ""
} |
from mistral.api import app
application = app.init_wsgi()
| {
"content_hash": "ea5b1451071dba1a8dc6fa1c9b1a76ff",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 29,
"avg_line_length": 19.666666666666668,
"alnum_prop": 0.7627118644067796,
"repo_name": "StackStorm/mistral",
"id": "27866f0c1a82e0a2d2b9d50087c297ae5be8f37c",
"size": "669",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mistral/api/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1494"
},
{
"name": "Mako",
"bytes": "951"
},
{
"name": "Python",
"bytes": "2249335"
},
{
"name": "Shell",
"bytes": "31326"
}
],
"symlink_target": ""
} |
from traitlets import default
from .baseapp import NbGrader, nbgrader_aliases, nbgrader_flags
from ..exchange import Exchange, ExchangeFetchAssignment, ExchangeError
aliases = {}
aliases.update(nbgrader_aliases)
aliases.update({
"timezone": "Exchange.timezone",
"course": "CourseDirectory.course_id",
})
flags = {}
flags.update(nbgrader_flags)
flags.update({
'replace': (
{'ExchangeFetchAssignment': {'replace_missing_files': True}},
"replace missing files, even if the assignment has already been fetched"
),
})
class FetchAssignmentApp(NbGrader):
name = u'nbgrader-fetch-assignment'
description = u'Fetch an assignment from the nbgrader exchange'
aliases = aliases
flags = flags
examples = """
Fetch an assignment that an instructor has released. For the usage of students.
You can run this command from any directory, but usually, you will have a
directory where you are keeping your course assignments.
To fetch an assignment by name into the current directory:
nbgrader fetch_assignment assignment1
To fetch an assignment for a specific course, you must first know the
`course_id` for your course. If you don't know it, ask your instructor.
Then, simply include the argument with the '--course' flag.
nbgrader fetch_assignment assignment1 --course=phys101
This will create an new directory named `assignment1` where you can work
on the assignment. When you are done, use the `nbgrader submit` command
to turn in the assignment.
"""
@default("classes")
def _classes_default(self):
classes = super(FetchAssignmentApp, self)._classes_default()
classes.extend([Exchange, ExchangeFetchAssignment])
return classes
def _load_config(self, cfg, **kwargs):
if 'FetchApp' in cfg:
self.log.warning(
"Use ExchangeFetchAssignment in config, not FetchApp. Outdated config:\n%s",
'\n'.join(
'FetchApp.{key} = {value!r}'.format(key=key, value=value)
for key, value in cfg.FetchApp.items()
)
)
cfg.ExchangeFetchAssignment.merge(cfg.FetchApp)
del cfg.FetchApp
super(FetchAssignmentApp, self)._load_config(cfg, **kwargs)
def start(self):
super(FetchAssignmentApp, self).start()
# set assignment and course
if len(self.extra_args) == 0 and self.coursedir.assignment_id == "":
self.fail("Must provide assignment name:\nnbgrader fetch_assignment ASSIGNMENT [ --course COURSE ]")
if self.coursedir.assignment_id != "":
fetch = self.exchange.FetchAssignment(
coursedir=self.coursedir,
authenticator=self.authenticator,
parent=self)
try:
fetch.start()
except ExchangeError:
self.fail("nbgrader fetch_assignment failed")
else:
failed = False
for arg in self.extra_args:
self.coursedir.assignment_id = arg
fetch = self.exchange.FetchAssignment(
coursedir=self.coursedir,
authenticator=self.authenticator,
parent=self)
try:
fetch.start()
except ExchangeError:
failed = True
if failed:
self.fail("nbgrader fetch_assignment failed")
| {
"content_hash": "7b62c3595b9adbdeb673a7f5c9c59097",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 112,
"avg_line_length": 34.33653846153846,
"alnum_prop": 0.6118734248109773,
"repo_name": "jhamrick/nbgrader",
"id": "a2edcbcccbbde7ea2ba84fb05c4e311b35ccfb77",
"size": "3588",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "nbgrader/apps/fetchassignmentapp.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7271"
},
{
"name": "CSS",
"bytes": "22778"
},
{
"name": "HTML",
"bytes": "559352"
},
{
"name": "JavaScript",
"bytes": "291787"
},
{
"name": "Jinja",
"bytes": "17019"
},
{
"name": "Jupyter Notebook",
"bytes": "988310"
},
{
"name": "Makefile",
"bytes": "7065"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "1314410"
},
{
"name": "Smarty",
"bytes": "21035"
},
{
"name": "TeX",
"bytes": "4226"
},
{
"name": "TypeScript",
"bytes": "183093"
}
],
"symlink_target": ""
} |
"""
Platform for the Garadget cover component.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/garadget/
"""
import logging
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.cover import CoverDevice, PLATFORM_SCHEMA
from homeassistant.helpers.event import track_utc_time_change
from homeassistant.const import (
CONF_DEVICE, CONF_USERNAME, CONF_PASSWORD, CONF_ACCESS_TOKEN, CONF_NAME,
STATE_UNKNOWN, STATE_CLOSED, STATE_OPEN, CONF_COVERS)
_LOGGER = logging.getLogger(__name__)
ATTR_AVAILABLE = "available"
ATTR_SENSOR_STRENGTH = "sensor reflection rate"
ATTR_SIGNAL_STRENGTH = "wifi signal strength (dB)"
ATTR_TIME_IN_STATE = "time in state"
DEFAULT_NAME = 'Garadget'
STATE_CLOSING = 'closing'
STATE_OFFLINE = 'offline'
STATE_OPENING = 'opening'
STATE_STOPPED = 'stopped'
STATES_MAP = {
'open': STATE_OPEN,
'opening': STATE_OPENING,
'closed': STATE_CLOSED,
'closing': STATE_CLOSING,
'stopped': STATE_STOPPED
}
COVER_SCHEMA = vol.Schema({
vol.Optional(CONF_ACCESS_TOKEN): cv.string,
vol.Optional(CONF_DEVICE): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_COVERS): vol.Schema({cv.slug: COVER_SCHEMA}),
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Garadget covers."""
covers = []
devices = config.get(CONF_COVERS)
for device_id, device_config in devices.items():
args = {
'name': device_config.get(CONF_NAME),
'device_id': device_config.get(CONF_DEVICE, device_id),
'username': device_config.get(CONF_USERNAME),
'password': device_config.get(CONF_PASSWORD),
'access_token': device_config.get(CONF_ACCESS_TOKEN)
}
covers.append(GaradgetCover(hass, args))
add_devices(covers)
class GaradgetCover(CoverDevice):
"""Representation of a Garadget cover."""
# pylint: disable=no-self-use
def __init__(self, hass, args):
"""Initialize the cover."""
self.particle_url = 'https://api.particle.io'
self.hass = hass
self._name = args['name']
self.device_id = args['device_id']
self.access_token = args['access_token']
self.obtained_token = False
self._username = args['username']
self._password = args['password']
self._state = STATE_UNKNOWN
self.time_in_state = None
self.signal = None
self.sensor = None
self._unsub_listener_cover = None
self._available = True
if self.access_token is None:
self.access_token = self.get_token()
self._obtained_token = True
try:
if self._name is None:
doorconfig = self._get_variable('doorConfig')
if doorconfig['nme'] is not None:
self._name = doorconfig['nme']
self.update()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error(
"Unable to connect to server: %(reason)s", dict(reason=ex))
self._state = STATE_OFFLINE
self._available = False
self._name = DEFAULT_NAME
except KeyError as ex:
_LOGGER.warning("Garadget device %(device)s seems to be offline",
dict(device=self.device_id))
self._name = DEFAULT_NAME
self._state = STATE_OFFLINE
self._available = False
def __del__(self):
"""Try to remove token."""
if self._obtained_token is True:
if self.access_token is not None:
self.remove_token()
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo cover."""
return True
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def device_state_attributes(self):
"""Return the device state attributes."""
data = {}
if self.signal is not None:
data[ATTR_SIGNAL_STRENGTH] = self.signal
if self.time_in_state is not None:
data[ATTR_TIME_IN_STATE] = self.time_in_state
if self.sensor is not None:
data[ATTR_SENSOR_STRENGTH] = self.sensor
if self.access_token is not None:
data[CONF_ACCESS_TOKEN] = self.access_token
return data
@property
def is_closed(self):
"""Return if the cover is closed."""
if self._state == STATE_UNKNOWN:
return None
else:
return self._state == STATE_CLOSED
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return 'garage'
def get_token(self):
"""Get new token for usage during this session."""
args = {
'grant_type': 'password',
'username': self._username,
'password': self._password
}
url = '{}/oauth/token'.format(self.particle_url)
ret = requests.post(
url, auth=('particle', 'particle'), data=args, timeout=10)
try:
return ret.json()['access_token']
except KeyError:
_LOGGER.error("Unable to retrieve access token")
def remove_token(self):
"""Remove authorization token from API."""
url = '{}/v1/access_tokens/{}'.format(
self.particle_url, self.access_token)
ret = requests.delete(
url, auth=(self._username, self._password), timeout=10)
return ret.text
def _start_watcher(self, command):
"""Start watcher."""
_LOGGER.debug("Starting Watcher for command: %s ", command)
if self._unsub_listener_cover is None:
self._unsub_listener_cover = track_utc_time_change(
self.hass, self._check_state)
def _check_state(self, now):
"""Check the state of the service during an operation."""
self.schedule_update_ha_state(True)
def close_cover(self):
"""Close the cover."""
if self._state not in ['close', 'closing']:
ret = self._put_command('setState', 'close')
self._start_watcher('close')
return ret.get('return_value') == 1
def open_cover(self):
"""Open the cover."""
if self._state not in ['open', 'opening']:
ret = self._put_command('setState', 'open')
self._start_watcher('open')
return ret.get('return_value') == 1
def stop_cover(self):
"""Stop the door where it is."""
if self._state not in ['stopped']:
ret = self._put_command('setState', 'stop')
self._start_watcher('stop')
return ret['return_value'] == 1
def update(self):
"""Get updated status from API."""
try:
status = self._get_variable('doorStatus')
_LOGGER.debug("Current Status: %s", status['status'])
self._state = STATES_MAP.get(status['status'], STATE_UNKNOWN)
self.time_in_state = status['time']
self.signal = status['signal']
self.sensor = status['sensor']
self._available = True
except requests.exceptions.ConnectionError as ex:
_LOGGER.error(
"Unable to connect to server: %(reason)s", dict(reason=ex))
self._state = STATE_OFFLINE
except KeyError as ex:
_LOGGER.warning("Garadget device %(device)s seems to be offline",
dict(device=self.device_id))
self._state = STATE_OFFLINE
if self._state not in [STATE_CLOSING, STATE_OPENING]:
if self._unsub_listener_cover is not None:
self._unsub_listener_cover()
self._unsub_listener_cover = None
def _get_variable(self, var):
"""Get latest status."""
url = '{}/v1/devices/{}/{}?access_token={}'.format(
self.particle_url, self.device_id, var, self.access_token)
ret = requests.get(url, timeout=10)
result = {}
for pairs in ret.json()['result'].split('|'):
key = pairs.split('=')
result[key[0]] = key[1]
return result
def _put_command(self, func, arg=None):
"""Send commands to API."""
params = {'access_token': self.access_token}
if arg:
params['command'] = arg
url = '{}/v1/devices/{}/{}'.format(
self.particle_url, self.device_id, func)
ret = requests.post(url, data=params, timeout=10)
return ret.json()
| {
"content_hash": "fa3a9d005dc46dd9f9e250df58413e35",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 77,
"avg_line_length": 33.405204460966544,
"alnum_prop": 0.5835744491431115,
"repo_name": "happyleavesaoc/home-assistant",
"id": "457312c8a4a58271163b9cf4adb244852c58977e",
"size": "8986",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/components/cover/garadget.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1952235"
},
{
"name": "Python",
"bytes": "6672532"
},
{
"name": "Ruby",
"bytes": "517"
},
{
"name": "Shell",
"bytes": "14949"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.