text stringlengths 4 1.02M | meta dict |
|---|---|
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("orgs", "0017_auto_20161026_1513")]
operations = [
migrations.AlterField(
model_name="invitation",
name="created_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was originally created",
),
),
migrations.AlterField(
model_name="invitation",
name="modified_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was last modified",
),
),
migrations.AlterField(
model_name="org",
name="created_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was originally created",
),
),
migrations.AlterField(
model_name="org",
name="modified_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was last modified",
),
),
migrations.AlterField(
model_name="orgbackground",
name="created_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was originally created",
),
),
migrations.AlterField(
model_name="orgbackground",
name="modified_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was last modified",
),
),
]
| {
"content_hash": "804cbd5023762b09ef530d9f92ae1acf",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 66,
"avg_line_length": 31.9,
"alnum_prop": 0.5015673981191222,
"repo_name": "rapidpro/dash",
"id": "a6564fd4252696065a38694edacf2a76406af79f",
"size": "2308",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "dash/orgs/migrations/0018_auto_20170301_0914.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2916"
},
{
"name": "Haml",
"bytes": "9669"
},
{
"name": "Python",
"bytes": "387905"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
import lasagne.init as LI
import lasagne.layers as L
import lasagne.nonlinearities as LN
import lasagne.nonlinearities as nl
import numpy as np
import theano
from visual_dynamics.predictors import layers_theano as LT
class VggConvNetwork(object):
def __init__(self, input_shape, output_dim, hidden_sizes,
conv_filters, conv_filter_sizes, conv_strides, conv_pads,
encoding_levels=None, num_encoding_levels=5, xd_dim=32,
hidden_W_init=LI.GlorotUniform(), hidden_b_init=LI.Constant(0.),
output_W_init=LI.GlorotUniform(), output_b_init=LI.Constant(0.),
hidden_nonlinearity=LN.rectify, output_nonlinearity=None,
name=None, input_var=None):
if name is None:
prefix = ""
else:
prefix = name + "_"
if len(input_shape) == 3:
l_in = L.InputLayer(shape=(None, np.prod(input_shape)), input_var=input_var)
l_hid = L.reshape(l_in, ([0],) + input_shape)
elif len(input_shape) == 2:
l_in = L.InputLayer(shape=(None, np.prod(input_shape)), input_var=input_var)
input_shape = (1,) + input_shape
l_hid = L.reshape(l_in, ([0],) + input_shape)
else:
l_in = L.InputLayer(shape=(None,) + input_shape, input_var=input_var)
l_hid = l_in
assert input_shape[0] % 2 == 0
l_hid0 = L.SliceLayer(l_hid, slice(None, input_shape[0] // 2), axis=1)
l_hid1 = L.SliceLayer(l_hid, slice(input_shape[0] // 2, None), axis=1)
l_hids = [l_hid0, l_hid1]
if encoding_levels is None:
encoding_levels = [num_encoding_levels]
else:
assert max(encoding_levels) == num_encoding_levels
xlevels_c_dim = OrderedDict(zip(range(num_encoding_levels + 1), [3, 64, 128, 256, 512, 512]))
import h5py
params_file = h5py.File("models/theano/vgg16_levelsall_nodyn_model.h5", 'r')
params_kwargs_list = []
# encoding
for ihid, l_hid in enumerate(l_hids):
l_xlevels = OrderedDict()
l_xdlevels = OrderedDict() # downsampled version of l_xlevels at the resolution for servoing
for level in range(num_encoding_levels + 1):
if level == 0:
l_xlevel = l_hid
elif level < 3:
l_xlevelm1 = l_xlevels[level - 1]
if level == 1:
# change from BGR to RGB and subtract mean pixel values
# (X - mean_pixel_bgr[None, :, None, None])[:, ::-1, :, :]
# X[:, ::-1, :, :] - mean_pixel_rgb[None, :, None, None]
if ihid == 0:
mean_pixel_bgr = np.array([103.939, 116.779, 123.68], dtype=np.float32)
mean_pixel_rgb = mean_pixel_bgr[::-1]
W = np.eye(3)[::-1, :].reshape((3, 3, 1, 1)).astype(np.float32)
b = -mean_pixel_rgb
params_kwargs = dict(W=W, b=b)
for k, v in params_kwargs.items():
bcast = tuple(s == 1 for s in v.shape)
params_kwargs[k] = theano.shared(v, broadcastable=bcast)
params_kwargs_list.append(params_kwargs)
else:
params_kwargs = params_kwargs_list.pop(0)
l_xlevelm1 = L.Conv2DLayer(l_xlevelm1, num_filters=3, filter_size=1,
nonlinearity=nl.identity,
**params_kwargs)
l_xlevelm1.W.name = 'x0.W'
l_xlevelm1.params[l_xlevelm1.W].remove('trainable')
l_xlevelm1.b.name = 'x0.b'
l_xlevelm1.params[l_xlevelm1.b].remove('trainable')
if ihid == 0:
conv1_W = params_file['conv%d_1.W' % level][()]
conv1_b = params_file['conv%d_1.b' % level][()]
conv2_W = params_file['conv%d_2.W' % level][()]
conv2_b = params_file['conv%d_2.b' % level][()]
params_kwargs = dict(conv1_W=conv1_W, conv1_b=conv1_b,
conv2_W=conv2_W, conv2_b=conv2_b)
for k, v in params_kwargs.items():
bcast = tuple(s == 1 for s in v.shape)
params_kwargs[k] = theano.shared(v, broadcastable=bcast)
params_kwargs_list.append(params_kwargs)
else:
params_kwargs = params_kwargs_list.pop(0)
l_xlevel = LT.VggEncodingLayer(l_xlevelm1, xlevels_c_dim[level], level=str(level),
**params_kwargs)
else:
if ihid == 0:
conv1_W = params_file['conv%d_1.W' % level][()]
conv1_b = params_file['conv%d_1.b' % level][()]
conv2_W = params_file['conv%d_2.W' % level][()]
conv2_b = params_file['conv%d_2.b' % level][()]
conv3_W = params_file['conv%d_3.W' % level][()]
conv3_b = params_file['conv%d_3.b' % level][()]
params_kwargs = dict(conv1_W=conv1_W, conv1_b=conv1_b,
conv2_W=conv2_W, conv2_b=conv2_b,
conv3_W=conv3_W, conv3_b=conv3_b)
for k, v in params_kwargs.items():
bcast = tuple(s == 1 for s in v.shape)
params_kwargs[k] = theano.shared(v, broadcastable=bcast)
params_kwargs_list.append(params_kwargs)
else:
params_kwargs = params_kwargs_list.pop(0)
l_xlevel = LT.VggEncoding3Layer(l_xlevels[level - 1], xlevels_c_dim[level],
dilation=(2 ** (level - 3),) * 2, level=str(level),
**params_kwargs)
# TODO:
LT.set_layer_param_tags(l_xlevel, trainable=False)
# downsample to servoing resolution
xlevel_shape = L.get_output_shape(l_xlevel)
xlevel_dim = xlevel_shape[-1]
assert xlevel_shape[-2] == xlevel_dim
scale_factor = xlevel_dim // xd_dim
if scale_factor > 1:
l_xdlevel = LT.Downscale2DLayer(l_xlevel, scale_factor=scale_factor, name='x%dd' % level)
elif scale_factor == 1:
l_xdlevel = l_xlevel
else:
raise NotImplementedError
if 0 < level < 3:
l_xlevel = L.MaxPool2DLayer(l_xlevel, pool_size=2, stride=2, pad=0,
name='pool%d' % level)
l_xlevels[level] = l_xlevel
l_xdlevels[level] = l_xdlevel
l_ylevels = OrderedDict() # standarized version of l_xdlevels used as the feature for servoing
for level in encoding_levels:
if ihid == 0:
offset = params_file['y%d.offset' % level][()]
scale = params_file['y%d.scale' % level][()]
params_kwargs = dict(offset=offset, scale=scale)
for k, v in params_kwargs.items():
bcast = tuple(s == 1 for s in v.shape)
params_kwargs[k] = theano.shared(v, broadcastable=bcast)
params_kwargs_list.append(params_kwargs)
else:
params_kwargs = params_kwargs_list.pop(0)
l_ylevels[level] = LT.StandarizeLayer(l_xdlevels[level], name='y%d' % level,
**params_kwargs)
l_hids[ihid] = L.ConcatLayer([l_ylevels[level] for level in encoding_levels], axis=1)
assert not params_kwargs_list
l_hid = L.ConcatLayer(l_hids, axis=1)
for idx, conv_filter, filter_size, stride, pad in zip(
range(len(conv_filters)),
conv_filters,
conv_filter_sizes,
conv_strides,
conv_pads,
):
l_hid = L.Conv2DLayer(
l_hid,
num_filters=conv_filter,
filter_size=filter_size,
stride=(stride, stride),
pad=pad,
nonlinearity=hidden_nonlinearity,
name="%sconv_hidden_%d" % (prefix, idx),
)
conv_out = l_hid
for idx, hidden_size in enumerate(hidden_sizes):
l_hid = L.DenseLayer(
l_hid,
num_units=hidden_size,
nonlinearity=hidden_nonlinearity,
name="%shidden_%d" % (prefix, idx),
W=hidden_W_init,
b=hidden_b_init,
)
l_out = L.DenseLayer(
l_hid,
num_units=output_dim,
nonlinearity=output_nonlinearity,
name="%soutput" % (prefix,),
W=output_W_init,
b=output_b_init,
)
self._l_in = l_in
self._l_out = l_out
self._input_var = l_in.input_var
self._conv_out = conv_out
@property
def input_layer(self):
return self._l_in
@property
def output_layer(self):
return self._l_out
@property
def input_var(self):
return self._l_in.input_var
@property
def conv_output_layer(self):
return self._conv_out
| {
"content_hash": "cc52a6006e57d995385dbde02ce51a97",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 109,
"avg_line_length": 47.20283018867924,
"alnum_prop": 0.4701708803837314,
"repo_name": "alexlee-gk/visual_dynamics",
"id": "8ee1b20da60fac5f8d1b28677586fd28b2f38bc3",
"size": "10007",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "visual_dynamics/policies/vgg_conv_network.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "926637"
}
],
"symlink_target": ""
} |
import logging
import os
import time
from parsl.channels import LocalChannel
from parsl.providers.cluster_provider import ClusterProvider
from parsl.providers.grid_engine.template import template_string
from parsl.launchers import SingleNodeLauncher
from parsl.providers.provider_base import JobState, JobStatus
from parsl.utils import RepresentationMixin
logger = logging.getLogger(__name__)
translate_table = {
'qw': JobState.PENDING,
'hqw': JobState.PENDING,
'hrwq': JobState.PENDING,
'r': JobState.RUNNING,
's': JobState.FAILED, # obsuspended
'ts': JobState.FAILED,
't': JobState.FAILED, # Suspended by alarm
'eqw': JobState.FAILED, # Error states
'ehqw': JobState.FAILED, # ..
'ehrqw': JobState.FAILED, # ..
'd': JobState.COMPLETED,
'dr': JobState.COMPLETED,
'dt': JobState.COMPLETED,
'drt': JobState.COMPLETED,
'ds': JobState.COMPLETED,
'drs': JobState.COMPLETED,
}
class GridEngineProvider(ClusterProvider, RepresentationMixin):
"""A provider for the Grid Engine scheduler.
Parameters
----------
channel : Channel
Channel for accessing this provider. Possible channels include
:class:`~parsl.channels.LocalChannel` (the default),
:class:`~parsl.channels.SSHChannel`, or
:class:`~parsl.channels.SSHInteractiveLoginChannel`.
nodes_per_block : int
Nodes to provision per block.
min_blocks : int
Minimum number of blocks to maintain.
max_blocks : int
Maximum number of blocks to maintain.
parallelism : float
Ratio of provisioned task slots to active tasks. A parallelism value of 1 represents aggressive
scaling where as many resources as possible are used; parallelism close to 0 represents
the opposite situation in which as few resources as possible (i.e., min_blocks) are used.
walltime : str
Walltime requested per block in HH:MM:SS.
scheduler_options : str
String to prepend to the #$$ blocks in the submit script to the scheduler.
worker_init : str
Command to be run before starting a worker, such as 'module load Anaconda; source activate env'.
launcher : Launcher
Launcher for this provider. Possible launchers include
:class:`~parsl.launchers.SingleNodeLauncher` (the default),
cmd_timeout : int
Timeout for commands made to the scheduler in seconds
"""
def __init__(self,
channel=LocalChannel(),
nodes_per_block=1,
init_blocks=1,
min_blocks=0,
max_blocks=1,
parallelism=1,
walltime="00:10:00",
scheduler_options='',
worker_init='',
launcher=SingleNodeLauncher(),
cmd_timeout: int = 60,
queue=None):
label = 'grid_engine'
super().__init__(label,
channel,
nodes_per_block,
init_blocks,
min_blocks,
max_blocks,
parallelism,
walltime,
launcher,
cmd_timeout=cmd_timeout)
self.scheduler_options = scheduler_options
self.worker_init = worker_init
self.queue = queue
if launcher in ['srun', 'srun_mpi']:
logger.warning("Use of {} launcher is usually appropriate for Slurm providers. "
"Recommended options include 'single_node' or 'aprun'.".format(launcher))
def get_configs(self, command, tasks_per_node):
"""Compose a dictionary with information for writing the submit script."""
logger.debug("Requesting one block with {} nodes per block and {} tasks per node".format(
self.nodes_per_block, tasks_per_node))
job_config = {}
job_config["submit_script_dir"] = self.channel.script_dir
job_config["nodes"] = self.nodes_per_block
job_config["walltime"] = self.walltime
job_config["scheduler_options"] = self.scheduler_options
job_config["worker_init"] = self.worker_init
job_config["user_script"] = command
job_config["user_script"] = self.launcher(command,
tasks_per_node,
self.nodes_per_block)
return job_config
def submit(self, command, tasks_per_node, job_name="parsl.sge"):
''' The submit method takes the command string to be executed upon
instantiation of a resource most often to start a pilot (such as IPP engine
or even Swift-T engines).
Args :
- command (str) : The bash command string to be executed.
- tasks_per_node (int) : command invocations to be launched per node
KWargs:
- job_name (str) : Human friendly name to be assigned to the job request
Returns:
- A job identifier, this could be an integer, string etc
Raises:
- ExecutionProviderException or its subclasses
'''
# Set job name
job_name = "{0}.{1}".format(job_name, time.time())
# Set script path
script_path = "{0}/{1}.submit".format(self.script_dir, job_name)
script_path = os.path.abspath(script_path)
job_config = self.get_configs(command, tasks_per_node)
logger.debug("Writing submit script")
self._write_submit_script(template_string, script_path, job_name, job_config)
channel_script_path = self.channel.push_file(script_path, self.channel.script_dir)
if self.queue is not None:
cmd = "qsub -q {0} -terse {1}".format(self.queue, channel_script_path)
else:
cmd = "qsub -terse {0}".format(channel_script_path)
retcode, stdout, stderr = self.execute_wait(cmd)
if retcode == 0:
for line in stdout.split('\n'):
job_id = line.strip()
if not job_id:
continue
self.resources[job_id] = {'job_id': job_id, 'status': JobStatus(JobState.PENDING)}
return job_id
else:
logger.error("Submit command failed")
logger.error("Retcode:%s STDOUT:%s STDERR:%s", retcode, stdout.strip(), stderr.strip())
def _status(self):
''' Get the status of a list of jobs identified by the job identifiers
returned from the submit request.
Returns:
- A list of JobStatus objects corresponding to each job_id in the job_ids list.
Raises:
- ExecutionProviderException or its subclasses
'''
cmd = "qstat"
retcode, stdout, stderr = self.execute_wait(cmd)
# Execute_wait failed. Do no update
if retcode != 0:
return
jobs_missing = list(self.resources.keys())
for line in stdout.split('\n'):
parts = line.split()
if parts and parts[0].lower().lower() != 'job-id' \
and not parts[0].startswith('----'):
job_id = parts[0]
state = translate_table.get(parts[4].lower(), JobState.UNKNOWN)
if job_id in self.resources:
self.resources[job_id]['status'] = JobStatus(state)
jobs_missing.remove(job_id)
# Filling in missing blanks for jobs that might have gone missing
# we might lose some information about why the jobs failed.
for missing_job in jobs_missing:
self.resources[missing_job]['status'] = JobStatus(JobState.COMPLETED)
def cancel(self, job_ids):
''' Cancels the resources identified by the job_ids provided by the user.
Args:
- job_ids (list): A list of job identifiers
Returns:
- A list of status from cancelling the job which can be True, False
Raises:
- ExecutionProviderException or its subclasses
'''
job_id_list = ' '.join(job_ids)
cmd = "qdel {}".format(job_id_list)
retcode, stdout, stderr = self.execute_wait(cmd)
rets = None
if retcode == 0:
for jid in job_ids:
self.resources[jid]['status'] = JobStatus(JobState.COMPLETED)
rets = [True for i in job_ids]
else:
rets = [False for i in job_ids]
return rets
@property
def status_polling_interval(self):
return 60
| {
"content_hash": "4d371834d46fc26ea35599fda4e2dd10",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 104,
"avg_line_length": 37.19827586206897,
"alnum_prop": 0.5855156431054461,
"repo_name": "Parsl/parsl",
"id": "65547c92ccdf4aff3ca94a9def1797b1398ad23f",
"size": "8630",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "parsl/providers/grid_engine/grid_engine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1263"
},
{
"name": "CSS",
"bytes": "337"
},
{
"name": "HTML",
"bytes": "12706"
},
{
"name": "Makefile",
"bytes": "4908"
},
{
"name": "Python",
"bytes": "1173869"
},
{
"name": "Shell",
"bytes": "12057"
}
],
"symlink_target": ""
} |
import pymysql
import os
from time import sleep
from getpass import getpass
from queue import Queue
q = Queue()
pl = []
def create_playlist():
passwd = getpass()
with pymysql.connect(user='root', db='music', passwd=passwd) as cur:
cur.execute('SELECT LINK FROM LANWE')
result = cur.fetchall()
for link in result:
q.put(link)
pl.append(link)
def play_music():
try:
while not q.empty():
os.system('mpv %s --no-video' % q.get())
except (KeyboardInterrupt, SystemExit):
raise
except:
print('exiting')
if __name__ == '__main__':
create_playlist()
play_music()
| {
"content_hash": "133405d0d249b420ec0d32ea00ae5703",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 72,
"avg_line_length": 20.424242424242426,
"alnum_prop": 0.5845697329376854,
"repo_name": "MrLanwe/Laplayer",
"id": "d46a656e5929a828af90feb7a6550ccf6e1260f5",
"size": "674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Laplayer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5002"
}
],
"symlink_target": ""
} |
import os
class Workspace():
def __init__(self, directory):
self.directory = directory
self.config_file = None
if directory is not None:
self.config_file = directory + "/project.bluep"
def check_workspace(self):
"""Check if workspace dir and workspace config exist
Create them if not
Return True if ok, message error if not"""
feed_back = True
if self.directory is "":
feed_back = "Please, specify a directory"
else:
try:
os.makedirs(self.directory, exist_ok=True)
if not os.path.exists(self.directory):
feed_back = "Could not resolve " + self.directory
else:
open(self.config_file, 'a').close()
if not os.path.exists(self.config_file):
feed_back = "Can't create " + self.config_file
except PermissionError:
feed_back = "Can't create " + self.directory
except FileNotFoundError:
feed_back = "Can't create " + self.config_file
return feed_back
| {
"content_hash": "fa048350f102c16b59c883578511c112",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 70,
"avg_line_length": 33.371428571428574,
"alnum_prop": 0.5351027397260274,
"repo_name": "X-Scapin/BlueP",
"id": "45e60239b202649d13083f4cb95ebd73425f5805",
"size": "1168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "back/workspace.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4537"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('cities_light', '0006_compensate_for_0003_bytestring_bug'),
]
operations = [
migrations.CreateModel(
name='Cars',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('description', models.CharField(max_length=150)),
('is_active', models.BooleanField(default=True)),
('is_reserved', models.BooleanField(default=False)),
],
options={
'verbose_name': 'Car',
'verbose_name_plural': 'Company cars',
},
),
migrations.CreateModel(
name='Companies',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=50)),
('slug_name', models.SlugField()),
('is_active', models.BooleanField(default=True)),
('cities', models.ManyToManyField(blank=True, to='cities_light.City')),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cities_light.Country')),
],
options={
'verbose_name': 'A company',
'verbose_name_plural': 'List of companies',
},
),
migrations.AddField(
model_name='cars',
name='company',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='companies.Companies'),
),
migrations.AddField(
model_name='cars',
name='location',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cities_light.City'),
),
]
| {
"content_hash": "b1c8fad553950c93531a0cf7e6601e52",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 119,
"avg_line_length": 39.41379310344828,
"alnum_prop": 0.5520559930008749,
"repo_name": "moas/carbooking",
"id": "a4f8b0d2b042696289e400747f1ad8debf058151",
"size": "2358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "booking/companies/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2742"
},
{
"name": "Python",
"bytes": "34505"
}
],
"symlink_target": ""
} |
"""
[+] problem description
========================
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
[+] reference
=============
- leetcode-001: https://leetcode.com/problems/two-sum/
"""
class Solution(object):
def two_sum(self, nums, target):
"""
description: time complexity: O(n), space complexity: O(n)
:param nums: List[int]
:param target: int
:return: List[int]
"""
# create an empty dictionary
hash_map = {}
for index, n in enumerate(nums):
complement = target - n
# if the complement is not present in hash_map then insert the number as key, index as value
if complement not in hash_map:
hash_map[n] = index
else: # if found the complement in the hash_map then pick up the previous index
prev_index = hash_map[complement]
return [prev_index, index]
# condition where target not found
return [None, None]
def main():
s = Solution()
print s.two_sum([12, 7, 11, 15], 18)
print s.two_sum([], 9)
if __name__ == '__main__':
main()
| {
"content_hash": "0fc3f771a37954d110b8b9d76beb1227",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 107,
"avg_line_length": 25.90909090909091,
"alnum_prop": 0.5740350877192982,
"repo_name": "greyshell/Guff",
"id": "54b696e3fcf82924e6fcf93e7e73ce4563644400",
"size": "1465",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "my-code/two_sum.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6576"
}
],
"symlink_target": ""
} |
"""
Prints a comparison between different serializers.
Compares results based on size of the output, and time taken to (de)serialize.
"""
from __future__ import print_function
from timeit import default_timer as perf_timer
import sys
import datetime
import decimal
import uuid
import Pyro4.util
import Pyro4.errors
import Pyro4.core
data = {
"bytes": b"0123456789abcdefghijklmnopqrstuvwxyz" * 2000,
"bytearray": bytearray(b"0123456789abcdefghijklmnopqrstuvwxyz") * 2000,
"str": "\"0123456789\"\n'abcdefghijklmnopqrstuvwxyz'\t" * 2000,
"unicode": u"abcdefghijklmnopqrstuvwxyz\u20ac\u20ac\u20ac\u20ac\u20ac" * 2000,
"int": [123456789] * 1000,
"double": [12345.987654321] * 1000,
"long": [123456789123456789123456789123456789] * 1000,
"tuple": [(x * x, "tuple", (300, 400, (500, 600, (x * x, x * x)))) for x in range(200)],
"list": [[x * x, "list", [300, 400, [500, 600, [x * x]]]] for x in range(200)],
"set": set(x * x for x in range(1000)),
"dict": {str(i * i): {str(1000 + j): chr(j + 65) for j in range(5)} for i in range(100)},
"exception": [ZeroDivisionError("test exeception", x * x) for x in range(1000)],
"class": [Pyro4.core.URI("PYRO:obj@addr:9999") for x in range(1000)],
"datetime": [datetime.datetime.now() for x in range(1000)],
"complex": [complex(x + x, x * x) for x in range(1000)],
"decimal": [decimal.Decimal("1122334455667788998877665544332211.9876543212345678987654321123456789") for x in range(1000)],
"uuid": uuid.uuid4()
}
no_result = 9999999999
def run():
results = {}
number = 10
repeat = 3
for serializername, ser in Pyro4.util._serializers.items():
print("\nserializer:", serializername)
results[serializername] = {"sizes": {}, "ser-times": {}, "deser-times": {}}
for key in sorted(data):
print(key, end="; ")
sys.stdout.flush()
try:
serialized = ser.dumps(data[key])
except (TypeError, ValueError, OverflowError, Pyro4.errors.SerializeError) as x:
print("error!")
print(x, key)
results[serializername]["sizes"][key] = no_result
results[serializername]["ser-times"][key] = no_result
results[serializername]["deser-times"][key] = no_result
else:
results[serializername]["sizes"][key] = len(serialized)
durations_ser = []
durations_deser = []
serialized_data = ser.dumps(data[key])
for _ in range(repeat):
start = perf_timer()
for _ in range(number):
ser.dumps(data[key])
durations_ser.append(perf_timer() - start)
for _ in range(repeat):
start = perf_timer()
for _ in range(number):
ser.loads(serialized_data)
durations_deser.append(perf_timer() - start)
duration_ser = min(durations_ser)
duration_deser = min(durations_deser)
results[serializername]["ser-times"][key] = round(duration_ser * 1e6 / number, 2)
results[serializername]["deser-times"][key] = round(duration_deser * 1e6 / number, 2)
print()
return results
def tables_size(results):
print("\nSIZE RESULTS\n")
sizes_per_datatype = {}
for ser in results:
for datatype in results[ser]["sizes"]:
size = results[ser]["sizes"][datatype]
if datatype not in sizes_per_datatype:
sizes_per_datatype[datatype] = []
sizes_per_datatype[datatype].append((size, ser))
sizes_per_datatype = {datatype: sorted(sizes) for datatype, sizes in sizes_per_datatype.items()}
for dt in sorted(sizes_per_datatype):
print(dt)
for pos, (size, serializer) in enumerate(sizes_per_datatype[dt]):
if size == no_result:
size = "unsupported"
else:
size = "%8d" % size
print(" %2d: %-8s %s" % (pos + 1, serializer, size))
print()
def tables_speed(results, what_times, header):
print("\n%s\n" % header)
durations_per_datatype = {}
for ser in results:
for datatype in results[ser]["sizes"]:
duration = results[ser][what_times][datatype]
if datatype not in durations_per_datatype:
durations_per_datatype[datatype] = []
durations_per_datatype[datatype].append((duration, ser))
durations_per_datatype = {datatype: sorted(durations) for datatype, durations in durations_per_datatype.items()}
for dt in sorted(durations_per_datatype):
print(dt)
for pos, (duration, serializer) in enumerate(durations_per_datatype[dt]):
if duration == no_result:
duration = "unsupported"
else:
duration = "%8d" % duration
print(" %2d: %-8s %s" % (pos + 1, serializer, duration))
print()
if __name__ == "__main__":
results = run()
tables_size(results)
tables_speed(results, "ser-times", "SPEED RESULTS (SERIALIZATION)")
tables_speed(results, "deser-times", "SPEED RESULTS (DESERIALIZATION)")
| {
"content_hash": "2837893778d075e868b67fff3b73f582",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 127,
"avg_line_length": 41.62992125984252,
"alnum_prop": 0.582750141857386,
"repo_name": "irmen/Pyro4",
"id": "9c6d5ac7153defb5043a23c91d98c4f8f301e020",
"size": "5287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/run_ser_performance.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1283"
},
{
"name": "Python",
"bytes": "618799"
},
{
"name": "Shell",
"bytes": "2394"
}
],
"symlink_target": ""
} |
from __future__ import annotations
from collections import defaultdict
from pants.engine.addresses import Address
class PackageRootedDependencyMap:
"""A utility class for mapping Java FQTs and packages to owning source Addresses.
This class treats Java packages as logically opaque strings, ignoring the
apparent hierarchy (which is itself misleading). For example, the packages
"org.pantsbuild" and "org.pantsbuild.foo" are treated as completely unrelated
packages by this mapping implementation.
We keep track of two things:
* Which Address provides a fully qualified symbol
* The set of Addresses that provide a given package
"""
class ConflictingTypeOwnershipError(Exception):
"""Raised when a Java FQT appears to be provided by more than one Address."""
def __init__(self):
self._type_map: dict[str, Address] = {}
self._package_map: dict[str, set[Address]] = defaultdict(set)
def add_top_level_type(self, package: str, type_: str, address: Address):
"""Declare a single Address as the provider of a top level type.
Raises ConflictingTypeOwnershipError if another address is already the provider
of the passed FQT.
This method also associates the address with the type's package, and there can
be more than one address associated with a given package.
"""
fqt = ".".join([package, type_])
if fqt in self._type_map and self._type_map[fqt] != address:
raise PackageRootedDependencyMap.ConflictingTypeOwnershipError(
f"Attempted register '{address}' as provider of fully qualified type"
f" '{fqt}', but it is already provided by '{self._type_map[fqt]}'"
)
self._type_map[fqt] = address
self._package_map[package].add(address)
def add_package(self, package: str, address: Address):
"""Add an address as one of the providers of a package."""
self._package_map[package].add(address)
def addresses_for_symbol(self, symbol: str) -> frozenset[Address]:
"""Returns the set of addresses that provide the passed symbol.
`symbol` should be a fully qualified Java type (FQT) (e.g. `foo.bar.Thing`),
or a Java package (e.g. `foo.bar`).
We first check if the symbol has an exact matching provider address for the FQT.
If it does, only that address is returned. We then check if the symbol is
actually a package, in which case we return the set of addresses that provide
that package.
We then chop off the rightmost part of the symbol (e.g. `foo.bar.Thing` becomes
`foo.bar`) and repeat the above process until there is nothing left. If nothing
is found, an empty set is returned.
"""
parts = symbol.split(".")
for num_parts in range(len(parts), 0, -1):
prefix = ".".join(parts[:num_parts])
if prefix in self._type_map:
return frozenset([self._type_map[prefix]])
if prefix in self._package_map:
return frozenset(self._package_map[prefix])
return frozenset()
def merge(self, other: PackageRootedDependencyMap):
"""Merge 'other' into this dependency map.
Raises ConflictingTypeOwnershipError if 'other' has an FQT mapped to an address that
conflicts with this dep map.
"""
for type_, address in other._type_map.items():
if type_ in self._type_map and self._type_map[type_] != address:
raise PackageRootedDependencyMap.ConflictingTypeOwnershipError(
'Conflicting ownership of FQT "{type_}": both {self._type_map[type_]}'
" and {address} appear to provide this type."
)
self._type_map[type_] = address
for package, addresses in other._package_map.items():
self._package_map[package] |= addresses
def to_json_dict(self):
return {
"type_map": {ty: str(addr) for ty, addr in self._type_map.items()},
"package_map": {
pkg: [str(addr) for addr in addrs] for pkg, addrs in self._package_map.items()
},
}
def __repr__(self) -> str:
type_map = ", ".join(f"{ty}:{addr}" for ty, addr in self._type_map.items())
package_map = ", ".join(
f"{pkg}:{', '.join(str(addr) for addr in addrs)}"
for pkg, addrs in self._package_map.items()
)
return f"PackageRootedDependencyMap(type_map={type_map}, package_map={package_map})"
| {
"content_hash": "cef44827dc699ff03050cd6803e69980",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 94,
"avg_line_length": 44.05714285714286,
"alnum_prop": 0.6277561608300908,
"repo_name": "patricklaw/pants",
"id": "4e1f8e9a8780b639f56f8cae9b5b6f4614485b90",
"size": "4758",
"binary": false,
"copies": "1",
"ref": "refs/heads/scala",
"path": "src/python/pants/backend/java/dependency_inference/package_prefix_tree.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import json
from io import BytesIO
from rest_framework.parsers import JSONParser
from django.conf import settings
from django.test import Client
from equipment.models import Equipment, EquipmentItem
from equipment.serializers import EquipmentItemSerializer, EquipmentSerializer
from emstrack.tests.util import date2iso
from login.tests.setup_data import TestSetup
class TestEquipmentItemGetList(TestSetup):
def test_equipment_item_serializer(self):
# test HospitalSerializer
for he in (self.he1, self.he2, self.he3, self.he4):
serializer = EquipmentItemSerializer(he)
result = {
'equipmentholder_id': he.equipmentholder.id,
'equipment_id': he.equipment.id,
'equipment_name': he.equipment.name,
'equipment_type': he.equipment.type,
'value': he.value,
'comment': he.comment,
'updated_by': he.updated_by.id,
'updated_on': date2iso(he.updated_on)
}
self.assertDictEqual(serializer.data, result)
def test_equipment_item_get_viewset(self):
# instantiate client
client = Client()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e2.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e2.id)).data
self.assertDictEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e3.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e3.id)).data
self.assertDictEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h3.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h3.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve inexistent
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h3.equipmentholder.id), str(self.e2.id)),
follow=True)
self.assertEqual(response.status_code, 404)
# logout
client.logout()
# login as testuser1
client.login(username='testuser1', password='top_secret')
# retrieve someone else's
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h3.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve own hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve own hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e2.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e2.id)).data
self.assertDictEqual(result, answer)
# retrieve own hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve own hospital equipment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e3.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e3.id)).data
self.assertDictEqual(result, answer)
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
# retrieve someone else's
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h3.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve someone else's
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve someone else's
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e2.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve someone else's
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve someone else's
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e3.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
def test_equipment_item_list_viewset(self):
# instantiate client
client = Client()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h1.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e1.id)).data,
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e2.id)).data
]
self.assertCountEqual(result, answer)
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h2.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e1.id)).data,
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e3.id)).data
]
self.assertCountEqual(result, answer)
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h3.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h3.equipmentholder.id, equipment=self.e1.id)).data
]
self.assertCountEqual(result, answer)
# retrieve inexistent
response = client.get('/en/api/equipment/{}/item/'.format(1000),
follow=True)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
# login as testuser1
client.login(username='testuser1', password='top_secret')
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h1.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e1.id)).data,
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e2.id)).data
]
self.assertCountEqual(result, answer)
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h2.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e1.id)).data,
EquipmentItemSerializer(EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e3.id)).data
]
self.assertCountEqual(result, answer)
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h3.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h1.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h2.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# retrieve all hospital equipment
response = client.get('/en/api/equipment/{}/item/'.format(str(self.h3.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
class TestEquipmentItemUpdate(TestSetup):
def test_equipment_item_update_viewset(self):
# instantiate client
client = Client()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# set equipment value
value = 'True'
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'value': value
})
)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve equipment value
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['value'], value)
# set equipment comment
comment = 'some comment'
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'comment': comment
})
)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h1.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve equipment comment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['value'], value)
self.assertEqual(result['comment'], comment)
# set inexistent equipment
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e3.id)),
content_type='application/json',
data=json.dumps({
'comment': comment
})
)
self.assertEqual(response.status_code, 404)
# set wrong ambulance id
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id + 100), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'comment': comment
})
)
self.assertEqual(response.status_code, 403)
# set wrong equipment name
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), -1),
content_type='application/json',
data=json.dumps({
'comment': comment
})
)
self.assertEqual(response.status_code, 404)
# logout
client.logout()
# login as testuser1
client.login(username='testuser1', password='top_secret')
# set equipment value
value = 'False'
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'value': value
})
)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve equipment value
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['value'], value)
# set equipment comment
comment = 'some new comment'
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'comment': comment
})
)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = EquipmentItemSerializer(
EquipmentItem.objects.get(equipmentholder=self.h2.equipmentholder.id, equipment=self.e1.id)).data
self.assertDictEqual(result, answer)
# retrieve equipment comment
response = client.get('/en/api/equipment/{}/item/{}/'.format(str(self.h2.equipmentholder.id), str(self.e1.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['value'], value)
self.assertEqual(result['comment'], comment)
# not permitted to write
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'value': value
})
)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
# set equipment value
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e1.id)),
content_type='application/json',
data=json.dumps({
'value': value
})
)
self.assertEqual(response.status_code, 403)
# set equipment value
response = client.patch('/en/api/equipment/{}/item/{}/'.format(str(self.h1.equipmentholder.id), str(self.e2.id)),
content_type='application/json',
data=json.dumps({
'value': value
})
)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
class TestEquipmentMetadata(TestSetup):
def test_equipment_metadata_viewset(self):
# instantiate client
client = Client()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h1.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentSerializer(Equipment.objects.get(id=self.e1.id)).data,
EquipmentSerializer(Equipment.objects.get(id=self.e2.id)).data
]
self.assertCountEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h2.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentSerializer(Equipment.objects.get(id=self.e1.id)).data,
EquipmentSerializer(Equipment.objects.get(id=self.e3.id)).data
]
self.assertCountEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h3.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentSerializer(Equipment.objects.get(id=self.e1.id)).data
]
self.assertCountEqual(result, answer)
# logout
client.logout()
# login as testuser1
client.login(username='testuser1', password='top_secret')
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h1.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentSerializer(Equipment.objects.get(id=self.e1.id)).data,
EquipmentSerializer(Equipment.objects.get(id=self.e2.id)).data
]
self.assertCountEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h2.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = [
EquipmentSerializer(Equipment.objects.get(id=self.e1.id)).data,
EquipmentSerializer(Equipment.objects.get(id=self.e3.id)).data
]
self.assertCountEqual(result, answer)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h3.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 404)
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h1.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 404)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h2.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 404)
# retrieve any hospital equipment
response = client.get('/en/api/equipment/{}/metadata/'.format(str(self.h3.equipmentholder.id)),
follow=True)
self.assertEqual(response.status_code, 404)
# logout
client.logout()
| {
"content_hash": "4c7060b4520821376d4bd22c3dbecc85",
"timestamp": "",
"source": "github",
"line_count": 536,
"max_line_length": 134,
"avg_line_length": 46.076492537313435,
"alnum_prop": 0.5918937522776045,
"repo_name": "EMSTrack/WebServerAndClient",
"id": "5da5063f4f967a2e09b74fcf7973d42af9c84c6c",
"size": "24697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "equipment/tests/test_equipment.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "10055"
},
{
"name": "HTML",
"bytes": "105332"
},
{
"name": "JavaScript",
"bytes": "169499"
},
{
"name": "Python",
"bytes": "609216"
}
],
"symlink_target": ""
} |
"""Provide methods to bootstrap a Home Assistant instance."""
import asyncio
import contextlib
from datetime import datetime
import logging
import logging.handlers
import os
import sys
import threading
from time import monotonic
from typing import TYPE_CHECKING, Any, Dict, Optional, Set
import voluptuous as vol
import yarl
from homeassistant import config as conf_util, config_entries, core, loader
from homeassistant.components import http
from homeassistant.const import REQUIRED_NEXT_PYTHON_DATE, REQUIRED_NEXT_PYTHON_VER
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import area_registry, device_registry, entity_registry
from homeassistant.helpers.typing import ConfigType
from homeassistant.setup import (
DATA_SETUP,
DATA_SETUP_STARTED,
async_set_domains_to_be_loaded,
async_setup_component,
)
from homeassistant.util.async_ import gather_with_concurrency
from homeassistant.util.logging import async_activate_log_queue_handler
from homeassistant.util.package import async_get_user_site, is_virtual_env
if TYPE_CHECKING:
from .runner import RuntimeConfig
_LOGGER = logging.getLogger(__name__)
ERROR_LOG_FILENAME = "home-assistant.log"
# hass.data key for logging information.
DATA_LOGGING = "logging"
LOG_SLOW_STARTUP_INTERVAL = 60
STAGE_1_TIMEOUT = 120
STAGE_2_TIMEOUT = 300
WRAP_UP_TIMEOUT = 300
COOLDOWN_TIME = 60
MAX_LOAD_CONCURRENTLY = 6
DEBUGGER_INTEGRATIONS = {"debugpy"}
CORE_INTEGRATIONS = ("homeassistant", "persistent_notification")
LOGGING_INTEGRATIONS = {
# Set log levels
"logger",
# Error logging
"system_log",
"sentry",
# To record data
"recorder",
}
STAGE_1_INTEGRATIONS = {
# To make sure we forward data to other instances
"mqtt_eventstream",
# To provide account link implementations
"cloud",
# Ensure supervisor is available
"hassio",
# Get the frontend up and running as soon
# as possible so problem integrations can
# be removed
"frontend",
}
async def async_setup_hass(
runtime_config: "RuntimeConfig",
) -> Optional[core.HomeAssistant]:
"""Set up Home Assistant."""
hass = core.HomeAssistant()
hass.config.config_dir = runtime_config.config_dir
async_enable_logging(
hass,
runtime_config.verbose,
runtime_config.log_rotate_days,
runtime_config.log_file,
runtime_config.log_no_color,
)
hass.config.skip_pip = runtime_config.skip_pip
if runtime_config.skip_pip:
_LOGGER.warning(
"Skipping pip installation of required modules. This may cause issues"
)
if not await conf_util.async_ensure_config_exists(hass):
_LOGGER.error("Error getting configuration path")
return None
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
config_dict = None
basic_setup_success = False
safe_mode = runtime_config.safe_mode
if not safe_mode:
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
try:
config_dict = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(
"Failed to parse configuration.yaml: %s. Activating safe mode",
err,
)
else:
if not is_virtual_env():
await async_mount_local_lib_path(runtime_config.config_dir)
basic_setup_success = (
await async_from_config_dict(config_dict, hass) is not None
)
if config_dict is None:
safe_mode = True
elif not basic_setup_success:
_LOGGER.warning("Unable to set up core integrations. Activating safe mode")
safe_mode = True
elif (
"frontend" in hass.data.get(DATA_SETUP, {})
and "frontend" not in hass.config.components
):
_LOGGER.warning("Detected that frontend did not load. Activating safe mode")
# Ask integrations to shut down. It's messy but we can't
# do a clean stop without knowing what is broken
with contextlib.suppress(asyncio.TimeoutError):
async with hass.timeout.async_timeout(10):
await hass.async_stop()
safe_mode = True
old_config = hass.config
hass = core.HomeAssistant()
hass.config.skip_pip = old_config.skip_pip
hass.config.internal_url = old_config.internal_url
hass.config.external_url = old_config.external_url
hass.config.config_dir = old_config.config_dir
if safe_mode:
_LOGGER.info("Starting in safe mode")
hass.config.safe_mode = True
http_conf = (await http.async_get_last_config(hass)) or {}
await async_from_config_dict(
{"safe_mode": {}, "http": http_conf},
hass,
)
if runtime_config.open_ui:
hass.add_job(open_hass_ui, hass)
return hass
def open_hass_ui(hass: core.HomeAssistant) -> None:
"""Open the UI."""
import webbrowser # pylint: disable=import-outside-toplevel
if hass.config.api is None or "frontend" not in hass.config.components:
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
return
scheme = "https" if hass.config.api.use_ssl else "http"
url = str(
yarl.URL.build(scheme=scheme, host="127.0.0.1", port=hass.config.api.port)
)
if not webbrowser.open(url):
_LOGGER.warning(
"Unable to open the Home Assistant UI in a browser. Open it yourself at %s",
url,
)
async def async_from_config_dict(
config: ConfigType, hass: core.HomeAssistant
) -> Optional[core.HomeAssistant]:
"""Try to configure Home Assistant from a configuration dictionary.
Dynamically loads required components and its dependencies.
This method is a coroutine.
"""
start = monotonic()
hass.config_entries = config_entries.ConfigEntries(hass, config)
await hass.config_entries.async_initialize()
# Set up core.
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
if not all(
await asyncio.gather(
*(
async_setup_component(hass, domain, config)
for domain in CORE_INTEGRATIONS
)
)
):
_LOGGER.error("Home Assistant core failed to initialize. ")
return None
_LOGGER.debug("Home Assistant core initialized")
core_config = config.get(core.DOMAIN, {})
try:
await conf_util.async_process_ha_core_config(hass, core_config)
except vol.Invalid as config_err:
conf_util.async_log_exception(config_err, "homeassistant", core_config, hass)
return None
except HomeAssistantError:
_LOGGER.error(
"Home Assistant core failed to initialize. "
"Further initialization aborted"
)
return None
await _async_set_up_integrations(hass, config)
stop = monotonic()
_LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
if REQUIRED_NEXT_PYTHON_DATE and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER:
msg = (
"Support for the running Python version "
f"{'.'.join(str(x) for x in sys.version_info[:3])} is deprecated and will "
f"be removed in the first release after {REQUIRED_NEXT_PYTHON_DATE}. "
"Please upgrade Python to "
f"{'.'.join(str(x) for x in REQUIRED_NEXT_PYTHON_VER)} or "
"higher."
)
_LOGGER.warning(msg)
hass.components.persistent_notification.async_create(
msg, "Python version", "python_version"
)
return hass
@core.callback
def async_enable_logging(
hass: core.HomeAssistant,
verbose: bool = False,
log_rotate_days: Optional[int] = None,
log_file: Optional[str] = None,
log_no_color: bool = False,
) -> None:
"""Set up the logging.
This method must be run in the event loop.
"""
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
datefmt = "%Y-%m-%d %H:%M:%S"
if not log_no_color:
try:
# pylint: disable=import-outside-toplevel
from colorlog import ColoredFormatter
# basicConfig must be called after importing colorlog in order to
# ensure that the handlers it sets up wraps the correct streams.
logging.basicConfig(level=logging.INFO)
colorfmt = f"%(log_color)s{fmt}%(reset)s"
logging.getLogger().handlers[0].setFormatter(
ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
},
)
)
except ImportError:
pass
# If the above initialization failed for any reason, setup the default
# formatting. If the above succeeds, this will result in a no-op.
logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO)
# Suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
sys.excepthook = lambda *args: logging.getLogger(None).exception(
"Uncaught exception", exc_info=args # type: ignore
)
threading.excepthook = lambda args: logging.getLogger(None).exception(
"Uncaught thread exception",
exc_info=(args.exc_type, args.exc_value, args.exc_traceback), # type: ignore[arg-type]
)
# Log errors to a file if we have write access to file or config dir
if log_file is None:
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
else:
err_log_path = os.path.abspath(log_file)
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
if log_rotate_days:
err_handler: logging.FileHandler = (
logging.handlers.TimedRotatingFileHandler(
err_log_path, when="midnight", backupCount=log_rotate_days
)
)
else:
err_handler = logging.FileHandler(err_log_path, mode="w", delay=True)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
logger = logging.getLogger("")
logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
async_activate_log_queue_handler(hass)
async def async_mount_local_lib_path(config_dir: str) -> str:
"""Add local library to Python Path.
This function is a coroutine.
"""
deps_dir = os.path.join(config_dir, "deps")
lib_dir = await async_get_user_site(deps_dir)
if lib_dir not in sys.path:
sys.path.insert(0, lib_dir)
return deps_dir
@core.callback
def _get_domains(hass: core.HomeAssistant, config: Dict[str, Any]) -> Set[str]:
"""Get domains of components to set up."""
# Filter out the repeating and common config section [homeassistant]
domains = {key.split(" ")[0] for key in config if key != core.DOMAIN}
# Add config entry domains
if not hass.config.safe_mode:
domains.update(hass.config_entries.async_domains())
# Make sure the Hass.io component is loaded
if "HASSIO" in os.environ:
domains.add("hassio")
return domains
async def _async_log_pending_setups(
hass: core.HomeAssistant, domains: Set[str], setup_started: Dict[str, datetime]
) -> None:
"""Periodic log of setups that are pending for longer than LOG_SLOW_STARTUP_INTERVAL."""
while True:
await asyncio.sleep(LOG_SLOW_STARTUP_INTERVAL)
remaining = [domain for domain in domains if domain in setup_started]
if remaining:
_LOGGER.warning(
"Waiting on integrations to complete setup: %s",
", ".join(remaining),
)
_LOGGER.debug("Running timeout Zones: %s", hass.timeout.zones)
async def async_setup_multi_components(
hass: core.HomeAssistant,
domains: Set[str],
config: Dict[str, Any],
setup_started: Dict[str, datetime],
) -> None:
"""Set up multiple domains. Log on failure."""
futures = {
domain: hass.async_create_task(async_setup_component(hass, domain, config))
for domain in domains
}
log_task = asyncio.create_task(
_async_log_pending_setups(hass, domains, setup_started)
)
await asyncio.wait(futures.values())
log_task.cancel()
errors = [domain for domain in domains if futures[domain].exception()]
for domain in errors:
exception = futures[domain].exception()
assert exception is not None
_LOGGER.error(
"Error setting up integration %s - received exception",
domain,
exc_info=(type(exception), exception, exception.__traceback__),
)
async def _async_set_up_integrations(
hass: core.HomeAssistant, config: Dict[str, Any]
) -> None:
"""Set up all the integrations."""
setup_started = hass.data[DATA_SETUP_STARTED] = {}
domains_to_setup = _get_domains(hass, config)
# Resolve all dependencies so we know all integrations
# that will have to be loaded and start rightaway
integration_cache: Dict[str, loader.Integration] = {}
to_resolve = domains_to_setup
while to_resolve:
old_to_resolve = to_resolve
to_resolve = set()
integrations_to_process = [
int_or_exc
for int_or_exc in await gather_with_concurrency(
loader.MAX_LOAD_CONCURRENTLY,
*(
loader.async_get_integration(hass, domain)
for domain in old_to_resolve
),
return_exceptions=True,
)
if isinstance(int_or_exc, loader.Integration)
]
resolve_dependencies_tasks = [
itg.resolve_dependencies()
for itg in integrations_to_process
if not itg.all_dependencies_resolved
]
if resolve_dependencies_tasks:
await asyncio.gather(*resolve_dependencies_tasks)
for itg in integrations_to_process:
integration_cache[itg.domain] = itg
for dep in itg.all_dependencies:
if dep in domains_to_setup:
continue
domains_to_setup.add(dep)
to_resolve.add(dep)
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
logging_domains = domains_to_setup & LOGGING_INTEGRATIONS
# Load logging as soon as possible
if logging_domains:
_LOGGER.info("Setting up logging: %s", logging_domains)
await async_setup_multi_components(hass, logging_domains, config, setup_started)
# Start up debuggers. Start these first in case they want to wait.
debuggers = domains_to_setup & DEBUGGER_INTEGRATIONS
if debuggers:
_LOGGER.debug("Setting up debuggers: %s", debuggers)
await async_setup_multi_components(hass, debuggers, config, setup_started)
# calculate what components to setup in what stage
stage_1_domains = set()
# Find all dependencies of any dependency of any stage 1 integration that
# we plan on loading and promote them to stage 1
deps_promotion = STAGE_1_INTEGRATIONS
while deps_promotion:
old_deps_promotion = deps_promotion
deps_promotion = set()
for domain in old_deps_promotion:
if domain not in domains_to_setup or domain in stage_1_domains:
continue
stage_1_domains.add(domain)
dep_itg = integration_cache.get(domain)
if dep_itg is None:
continue
deps_promotion.update(dep_itg.all_dependencies)
stage_2_domains = domains_to_setup - logging_domains - debuggers - stage_1_domains
# Load the registries
await asyncio.gather(
device_registry.async_load(hass),
entity_registry.async_load(hass),
area_registry.async_load(hass),
)
# Start setup
if stage_1_domains:
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
try:
async with hass.timeout.async_timeout(
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
):
await async_setup_multi_components(
hass, stage_1_domains, config, setup_started
)
except asyncio.TimeoutError:
_LOGGER.warning("Setup timed out for stage 1 - moving forward")
# Enables after dependencies
async_set_domains_to_be_loaded(hass, stage_2_domains)
if stage_2_domains:
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
try:
async with hass.timeout.async_timeout(
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
):
await async_setup_multi_components(
hass, stage_2_domains, config, setup_started
)
except asyncio.TimeoutError:
_LOGGER.warning("Setup timed out for stage 2 - moving forward")
# Wrap up startup
_LOGGER.debug("Waiting for startup to wrap up")
try:
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
await hass.async_block_till_done()
except asyncio.TimeoutError:
_LOGGER.warning("Setup timed out for bootstrap - moving forward")
| {
"content_hash": "6f20dcc6a84d62b8094695c0cae2ebbf",
"timestamp": "",
"source": "github",
"line_count": 552,
"max_line_length": 95,
"avg_line_length": 33.19384057971015,
"alnum_prop": 0.6298095290072586,
"repo_name": "partofthething/home-assistant",
"id": "6d334ac8953d391cae334037585d6d36fa178623",
"size": "18323",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/bootstrap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
class TestSetupOneArgumentPlugin:
def __init__(self, cardinal):
self.cardinal = cardinal
def setup(cardinal):
return TestSetupOneArgumentPlugin(cardinal)
| {
"content_hash": "1dc2196ec8e0092fb1de70a336832a25",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 47,
"avg_line_length": 24.571428571428573,
"alnum_prop": 0.7325581395348837,
"repo_name": "JohnMaguire/Cardinal",
"id": "62ee28e75fcbea298dabad802ae15ae4430c704f",
"size": "172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cardinal/fixtures/fake_plugins/setup_one_argument/plugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "256"
},
{
"name": "Python",
"bytes": "318314"
}
],
"symlink_target": ""
} |
"""
Coursera's wrapper for data exports API.
"""
import requests
from courseraoauth2client import oauth2
from courseraresearchexports.models.utils import requests_response_to_model
from courseraresearchexports.constants.api_constants import \
RESEARCH_EXPORTS_APP, RESEARCH_EXPORTS_API, CLICKSTREAM_API
from courseraresearchexports.models.ExportRequestWithMetadata import \
ExportRequestWithMetadata
@requests_response_to_model(ExportRequestWithMetadata.from_response)
def get(export_job_id):
"""
Use Coursera's Research Export Resource to get a data export job given an
export job id.
:param export_job_id:
:return export_request_with_metadata: [ExportRequestWithMetaData]
"""
auth = oauth2.build_oauth2(app=RESEARCH_EXPORTS_APP).build_authorizer()
response = requests.get(
url=requests.compat.urljoin(RESEARCH_EXPORTS_API, export_job_id),
auth=auth)
return response
@requests_response_to_model(ExportRequestWithMetadata.from_response)
def get_all():
"""
Uses Coursera's Research Exports Resource to get all data export job
requests created by a user. Limited to the 100 most recent requests.
:return export_requests: [ExportRequestWithMetaData]
"""
auth = oauth2.build_oauth2(app=RESEARCH_EXPORTS_APP).build_authorizer()
response = requests.get(
url=RESEARCH_EXPORTS_API,
auth=auth,
params={'q': 'my'})
return response
@requests_response_to_model(ExportRequestWithMetadata.from_response)
def post(export_request):
"""
Creates a data export job using a formatted json request.
:param export_request:
:return export_request_with_metadata: [ExportRequestWithMetadata]
"""
auth = oauth2.build_oauth2(app=RESEARCH_EXPORTS_APP).build_authorizer()
response = requests.post(
url=RESEARCH_EXPORTS_API,
json=export_request.to_json(),
auth=auth)
return response
@requests_response_to_model(lambda response: response.json())
def get_clickstream_download_links(clickstream_download_links_request):
"""
Return the download links for clickstream exports in a given scope.
:param clickstream_download_links_request: ClickstreamDownloadLinksRequest
"""
auth = oauth2.build_oauth2(app=RESEARCH_EXPORTS_APP).build_authorizer()
response = requests.post(
url=CLICKSTREAM_API,
params=clickstream_download_links_request.to_url_params(),
auth=auth)
return response
| {
"content_hash": "98c67ec185a5f63711a8df779d29aeb2",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 78,
"avg_line_length": 33.513513513513516,
"alnum_prop": 0.7294354838709678,
"repo_name": "coursera/courseraresearchexports",
"id": "a3d60bf63c3cb3c02b83c6e3059deda0513fce2d",
"size": "3053",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "courseraresearchexports/exports/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "97250"
}
],
"symlink_target": ""
} |
class InterpolationError(Exception):
pass
def interpolate_using_previous(pointlist):
'''
Throw
InterpolationError
if a key has no non-null values
Return
pointlist without gaps
'''
pl = pointlist # alias
npl = [] # new, interpolated pointlist
if len(pl) < 1:
raise InterpolationError('Empty list cannot be interpolated')
first_nonnull = [None, None]
# Find first non-nulls
for i in [0, 1]:
for p in pl:
if p[i] is not None:
first_nonnull[i] = p[i]
break
if first_nonnull[0] == None or first_nonnull[1] == None:
# No good values found for every key
raise InterpolationError('No non-null values to interpolate against: ' + str(first_nonnull))
prev_nonnull = first_nonnull
for p in pl:
np = list(p) # Copy point, this way we avoid modifying the original.
for k in [0, 1]:
if np[k] is None:
np[k] = prev_nonnull[k]
else:
prev_nonnull[k] = np[k]
npl.append(np)
return npl
| {
"content_hash": "846f34d531c8e8be71eceb8f1f9578e6",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 100,
"avg_line_length": 26.046511627906977,
"alnum_prop": 0.5598214285714286,
"repo_name": "infant-cognition-tampere/saccademodel-py",
"id": "330f91e8367a69d8360947433d4c6c149a33b98c",
"size": "1121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saccademodel/interpolate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1823967"
},
{
"name": "Python",
"bytes": "23376"
}
],
"symlink_target": ""
} |
import numpy as np
import math
from vraag import *
import bpy
# Nozzle diameter. The "fudge factor" tricks Cura into actually slicing the lines
d = 0.4
# Number of grid/column layers, the actual number of layers is double this number
layers = 5
# String width
xs, ys = 1*d,1*d # strings
# Gap width
xh, yh = 1.5, 1.5
x_spacing, y_spacing = xs+xh, ys+yh
#x_size = x_spacing*n
#y_size = y_spacing*m
x_size = 130
y_size = 70
n, m = round(y_size/y_spacing),round(x_size/x_spacing)
layer_height = 0.2
V("#Cube").remove()
root = V.construct()
for layer in range(layers):
pos = root.translate((0, -y_size/2, 2*layer*layer_height))
for i in range(n+1):
pos.translate((0,i*y_spacing,0)).cube((x_size,ys+0.1,layer_height))
pos = root.translate((-x_size/2, 0, 2*layer*layer_height+layer_height))
for i in range(m+1):
pos.translate((i*x_spacing,0,0)).cube((xs+0.1,y_size,layer_height))
| {
"content_hash": "2fe19e0c176ee1a973bbb0230b666a1c",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 81,
"avg_line_length": 24.72972972972973,
"alnum_prop": 0.6622950819672131,
"repo_name": "akloster/blender-vraag",
"id": "18ebacb8c26dea43ddd793bd678657a8dc17c7f9",
"size": "915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/mesher/mesher3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54631"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'', include('restapp.urls')),
)
| {
"content_hash": "e6d78e838ac50043983499dc9bcaacb0",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 51,
"avg_line_length": 23,
"alnum_prop": 0.6763285024154589,
"repo_name": "rodrigorn/rest_demo",
"id": "985c458252db9f729d0772d172a399f35cf8fdf2",
"size": "207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest_demo/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8527"
}
],
"symlink_target": ""
} |
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import epysteme
import epysteme.exploration as epex
import epysteme.helpers as helpers | {
"content_hash": "c2403b658c91493d5fc51eb4edaf0f49",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 41,
"avg_line_length": 21.428571428571427,
"alnum_prop": 0.7933333333333333,
"repo_name": "sjpet/epysteme",
"id": "2898fb60f5e09aa2d96ae311094cc6f277c32c4b",
"size": "150",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "203484"
}
],
"symlink_target": ""
} |
import random
import asyncio
from logging import getLogger
from rpcudp.protocol import RPCProtocol
from kademlia.node import Node
from kademlia.routing import RoutingTable
from kademlia.utils import digest
class KademliaProtocol(RPCProtocol):
def __init__(self, sourceNode, storage, ksize):
RPCProtocol.__init__(self)
self.router = RoutingTable(self, ksize, sourceNode)
self.storage = storage
self.sourceNode = sourceNode
self.log = getLogger("kademlia-protocol")
def getRefreshIDs(self):
"""
Get ids to search for to keep old buckets up to date.
"""
ids = []
for bucket in self.router.getLonelyBuckets():
ids.append(random.randint(*bucket.range).to_bytes(20, byteorder='big'))
return ids
def rpc_stun(self, sender):
return sender
def rpc_ping(self, sender, nodeid):
source = Node(nodeid, sender[0], sender[1])
self.welcomeIfNewNode(source)
return self.sourceNode.id
def rpc_store(self, sender, nodeid, key, value):
source = Node(nodeid, sender[0], sender[1])
self.welcomeIfNewNode(source)
self.log.debug("got a store request from %s, storing value" % str(sender))
self.storage[key] = value
return True
def rpc_find_node(self, sender, nodeid, key):
self.log.info("finding neighbors of %i in local table" % int(nodeid.hex(), 16))
source = Node(nodeid, sender[0], sender[1])
self.welcomeIfNewNode(source)
node = Node(key)
return list(map(tuple, self.router.findNeighbors(node, exclude=source)))
def rpc_find_value(self, sender, nodeid, key):
source = Node(nodeid, sender[0], sender[1])
self.welcomeIfNewNode(source)
value = self.storage.get(key, None)
if value is None:
return self.rpc_find_node(sender, nodeid, key)
return { 'value': value }
async def callFindNode(self, nodeToAsk, nodeToFind):
address = (nodeToAsk.ip, nodeToAsk.port)
result = await self.find_node(address, self.sourceNode.id, nodeToFind.id)
return self.handleCallResponse(result, nodeToAsk)
async def callFindValue(self, nodeToAsk, nodeToFind):
address = (nodeToAsk.ip, nodeToAsk.port)
result = await self.find_value(address, self.sourceNode.id, nodeToFind.id)
return self.handleCallResponse(result, nodeToAsk)
async def callPing(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
result = await self.ping(address, self.sourceNode.id)
return self.handleCallResponse(result, nodeToAsk)
async def callStore(self, nodeToAsk, key, value):
address = (nodeToAsk.ip, nodeToAsk.port)
result = await self.store(address, self.sourceNode.id, key, value)
return self.handleCallResponse(result, nodeToAsk)
def welcomeIfNewNode(self, node):
"""
Given a new node, send it all the keys/values it should be storing,
then add it to the routing table.
@param node: A new node that just joined (or that we just found out
about).
Process:
For each key in storage, get k closest nodes. If newnode is closer
than the furtherst in that list, and the node for this server
is closer than the closest in that list, then store the key/value
on the new node (per section 2.5 of the paper)
"""
if not self.router.isNewNode(node):
return
self.log.info("never seen %s before, adding to router and setting nearby " % node)
for key, value in self.storage.items():
keynode = Node(digest(key))
neighbors = self.router.findNeighbors(keynode)
if len(neighbors) > 0:
newNodeClose = node.distanceTo(keynode) < neighbors[-1].distanceTo(keynode)
thisNodeClosest = self.sourceNode.distanceTo(keynode) < neighbors[0].distanceTo(keynode)
if len(neighbors) == 0 or (newNodeClose and thisNodeClosest):
asyncio.ensure_future(self.callStore(node, key, value))
self.router.addContact(node)
def handleCallResponse(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if not result[0]:
self.log.warning("no response from %s, removing from router" % node)
self.router.removeContact(node)
return result
self.log.info("got successful response from %s")
self.welcomeIfNewNode(node)
return result
| {
"content_hash": "3ca65ed19626128a7d0a250dd74e4c28",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 104,
"avg_line_length": 39.403361344537814,
"alnum_prop": 0.6427809767541054,
"repo_name": "faisalburhanudin/kademlia",
"id": "c7fa9d3207b2d5d539158a79bf7028b25f6eee95",
"size": "4689",
"binary": false,
"copies": "1",
"ref": "refs/heads/asyncio",
"path": "kademlia/protocol.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "203"
},
{
"name": "Python",
"bytes": "42370"
}
],
"symlink_target": ""
} |
import unittest
class TestMaxVersionsGCRule(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.bigtable.column_family import MaxVersionsGCRule
return MaxVersionsGCRule
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test___eq__type_differ(self):
gc_rule1 = self._make_one(10)
gc_rule2 = object()
self.assertNotEqual(gc_rule1, gc_rule2)
def test___eq__same_value(self):
gc_rule1 = self._make_one(2)
gc_rule2 = self._make_one(2)
self.assertEqual(gc_rule1, gc_rule2)
def test___ne__same_value(self):
gc_rule1 = self._make_one(99)
gc_rule2 = self._make_one(99)
comparison_val = (gc_rule1 != gc_rule2)
self.assertFalse(comparison_val)
def test_to_pb(self):
max_num_versions = 1337
gc_rule = self._make_one(max_num_versions=max_num_versions)
pb_val = gc_rule.to_pb()
expected = _GcRulePB(max_num_versions=max_num_versions)
self.assertEqual(pb_val, expected)
class TestMaxAgeGCRule(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.bigtable.column_family import MaxAgeGCRule
return MaxAgeGCRule
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test___eq__type_differ(self):
max_age = object()
gc_rule1 = self._make_one(max_age=max_age)
gc_rule2 = object()
self.assertNotEqual(gc_rule1, gc_rule2)
def test___eq__same_value(self):
max_age = object()
gc_rule1 = self._make_one(max_age=max_age)
gc_rule2 = self._make_one(max_age=max_age)
self.assertEqual(gc_rule1, gc_rule2)
def test___ne__same_value(self):
max_age = object()
gc_rule1 = self._make_one(max_age=max_age)
gc_rule2 = self._make_one(max_age=max_age)
comparison_val = (gc_rule1 != gc_rule2)
self.assertFalse(comparison_val)
def test_to_pb(self):
import datetime
from google.protobuf import duration_pb2
max_age = datetime.timedelta(seconds=1)
duration = duration_pb2.Duration(seconds=1)
gc_rule = self._make_one(max_age=max_age)
pb_val = gc_rule.to_pb()
self.assertEqual(pb_val, _GcRulePB(max_age=duration))
class TestGCRuleUnion(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.bigtable.column_family import GCRuleUnion
return GCRuleUnion
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test_constructor(self):
rules = object()
rule_union = self._make_one(rules)
self.assertIs(rule_union.rules, rules)
def test___eq__(self):
rules = object()
gc_rule1 = self._make_one(rules)
gc_rule2 = self._make_one(rules)
self.assertEqual(gc_rule1, gc_rule2)
def test___eq__type_differ(self):
rules = object()
gc_rule1 = self._make_one(rules)
gc_rule2 = object()
self.assertNotEqual(gc_rule1, gc_rule2)
def test___ne__same_value(self):
rules = object()
gc_rule1 = self._make_one(rules)
gc_rule2 = self._make_one(rules)
comparison_val = (gc_rule1 != gc_rule2)
self.assertFalse(comparison_val)
def test_to_pb(self):
import datetime
from google.protobuf import duration_pb2
from google.cloud.bigtable.column_family import MaxAgeGCRule
from google.cloud.bigtable.column_family import MaxVersionsGCRule
max_num_versions = 42
rule1 = MaxVersionsGCRule(max_num_versions)
pb_rule1 = _GcRulePB(max_num_versions=max_num_versions)
max_age = datetime.timedelta(seconds=1)
rule2 = MaxAgeGCRule(max_age)
pb_rule2 = _GcRulePB(
max_age=duration_pb2.Duration(seconds=1))
rule3 = self._make_one(rules=[rule1, rule2])
pb_rule3 = _GcRulePB(
union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2]))
gc_rule_pb = rule3.to_pb()
self.assertEqual(gc_rule_pb, pb_rule3)
def test_to_pb_nested(self):
import datetime
from google.protobuf import duration_pb2
from google.cloud.bigtable.column_family import MaxAgeGCRule
from google.cloud.bigtable.column_family import MaxVersionsGCRule
max_num_versions1 = 42
rule1 = MaxVersionsGCRule(max_num_versions1)
pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1)
max_age = datetime.timedelta(seconds=1)
rule2 = MaxAgeGCRule(max_age)
pb_rule2 = _GcRulePB(
max_age=duration_pb2.Duration(seconds=1))
rule3 = self._make_one(rules=[rule1, rule2])
pb_rule3 = _GcRulePB(
union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2]))
max_num_versions2 = 1337
rule4 = MaxVersionsGCRule(max_num_versions2)
pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2)
rule5 = self._make_one(rules=[rule3, rule4])
pb_rule5 = _GcRulePB(
union=_GcRuleUnionPB(rules=[pb_rule3, pb_rule4]))
gc_rule_pb = rule5.to_pb()
self.assertEqual(gc_rule_pb, pb_rule5)
class TestGCRuleIntersection(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.bigtable.column_family import GCRuleIntersection
return GCRuleIntersection
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test_constructor(self):
rules = object()
rule_intersection = self._make_one(rules)
self.assertIs(rule_intersection.rules, rules)
def test___eq__(self):
rules = object()
gc_rule1 = self._make_one(rules)
gc_rule2 = self._make_one(rules)
self.assertEqual(gc_rule1, gc_rule2)
def test___eq__type_differ(self):
rules = object()
gc_rule1 = self._make_one(rules)
gc_rule2 = object()
self.assertNotEqual(gc_rule1, gc_rule2)
def test___ne__same_value(self):
rules = object()
gc_rule1 = self._make_one(rules)
gc_rule2 = self._make_one(rules)
comparison_val = (gc_rule1 != gc_rule2)
self.assertFalse(comparison_val)
def test_to_pb(self):
import datetime
from google.protobuf import duration_pb2
from google.cloud.bigtable.column_family import MaxAgeGCRule
from google.cloud.bigtable.column_family import MaxVersionsGCRule
max_num_versions = 42
rule1 = MaxVersionsGCRule(max_num_versions)
pb_rule1 = _GcRulePB(max_num_versions=max_num_versions)
max_age = datetime.timedelta(seconds=1)
rule2 = MaxAgeGCRule(max_age)
pb_rule2 = _GcRulePB(
max_age=duration_pb2.Duration(seconds=1))
rule3 = self._make_one(rules=[rule1, rule2])
pb_rule3 = _GcRulePB(
intersection=_GcRuleIntersectionPB(
rules=[pb_rule1, pb_rule2]))
gc_rule_pb = rule3.to_pb()
self.assertEqual(gc_rule_pb, pb_rule3)
def test_to_pb_nested(self):
import datetime
from google.protobuf import duration_pb2
from google.cloud.bigtable.column_family import MaxAgeGCRule
from google.cloud.bigtable.column_family import MaxVersionsGCRule
max_num_versions1 = 42
rule1 = MaxVersionsGCRule(max_num_versions1)
pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1)
max_age = datetime.timedelta(seconds=1)
rule2 = MaxAgeGCRule(max_age)
pb_rule2 = _GcRulePB(
max_age=duration_pb2.Duration(seconds=1))
rule3 = self._make_one(rules=[rule1, rule2])
pb_rule3 = _GcRulePB(
intersection=_GcRuleIntersectionPB(
rules=[pb_rule1, pb_rule2]))
max_num_versions2 = 1337
rule4 = MaxVersionsGCRule(max_num_versions2)
pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2)
rule5 = self._make_one(rules=[rule3, rule4])
pb_rule5 = _GcRulePB(
intersection=_GcRuleIntersectionPB(
rules=[pb_rule3, pb_rule4]))
gc_rule_pb = rule5.to_pb()
self.assertEqual(gc_rule_pb, pb_rule5)
class TestColumnFamily(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.bigtable.column_family import ColumnFamily
return ColumnFamily
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test_constructor(self):
column_family_id = u'column-family-id'
table = object()
gc_rule = object()
column_family = self._make_one(
column_family_id, table, gc_rule=gc_rule)
self.assertEqual(column_family.column_family_id, column_family_id)
self.assertIs(column_family._table, table)
self.assertIs(column_family.gc_rule, gc_rule)
def test_name_property(self):
column_family_id = u'column-family-id'
table_name = 'table_name'
table = _Table(table_name)
column_family = self._make_one(column_family_id, table)
expected_name = table_name + '/columnFamilies/' + column_family_id
self.assertEqual(column_family.name, expected_name)
def test___eq__(self):
column_family_id = 'column_family_id'
table = object()
gc_rule = object()
column_family1 = self._make_one(column_family_id, table,
gc_rule=gc_rule)
column_family2 = self._make_one(column_family_id, table,
gc_rule=gc_rule)
self.assertEqual(column_family1, column_family2)
def test___eq__type_differ(self):
column_family1 = self._make_one('column_family_id', None)
column_family2 = object()
self.assertNotEqual(column_family1, column_family2)
def test___ne__same_value(self):
column_family_id = 'column_family_id'
table = object()
gc_rule = object()
column_family1 = self._make_one(column_family_id, table,
gc_rule=gc_rule)
column_family2 = self._make_one(column_family_id, table,
gc_rule=gc_rule)
comparison_val = (column_family1 != column_family2)
self.assertFalse(comparison_val)
def test___ne__(self):
column_family1 = self._make_one('column_family_id1', None)
column_family2 = self._make_one('column_family_id2', None)
self.assertNotEqual(column_family1, column_family2)
def test_to_pb_no_rules(self):
column_family = self._make_one('column_family_id', None)
pb_val = column_family.to_pb()
expected = _ColumnFamilyPB()
self.assertEqual(pb_val, expected)
def test_to_pb_with_rule(self):
from google.cloud.bigtable.column_family import MaxVersionsGCRule
gc_rule = MaxVersionsGCRule(1)
column_family = self._make_one('column_family_id', None,
gc_rule=gc_rule)
pb_val = column_family.to_pb()
expected = _ColumnFamilyPB(gc_rule=gc_rule.to_pb())
self.assertEqual(pb_val, expected)
def _create_test_helper(self, gc_rule=None):
from google.cloud.bigtable._generated import (
bigtable_table_admin_pb2 as table_admin_v2_pb2)
from unit_tests._testing import _FakeStub
project_id = 'project-id'
zone = 'zone'
cluster_id = 'cluster-id'
table_id = 'table-id'
column_family_id = 'column-family-id'
table_name = ('projects/' + project_id + '/zones/' + zone +
'/clusters/' + cluster_id + '/tables/' + table_id)
client = _Client()
table = _Table(table_name, client=client)
column_family = self._make_one(
column_family_id, table, gc_rule=gc_rule)
# Create request_pb
if gc_rule is None:
column_family_pb = _ColumnFamilyPB()
else:
column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb())
request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(
name=table_name)
request_pb.modifications.add(
id=column_family_id,
create=column_family_pb,
)
# Create response_pb
response_pb = _ColumnFamilyPB()
# Patch the stub used by the API method.
client._table_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = None # create() has no return value.
# Perform the method and check the result.
self.assertEqual(stub.results, (response_pb,))
result = column_family.create()
self.assertEqual(stub.results, ())
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'ModifyColumnFamilies',
(request_pb,),
{},
)])
def test_create(self):
self._create_test_helper(gc_rule=None)
def test_create_with_gc_rule(self):
from google.cloud.bigtable.column_family import MaxVersionsGCRule
gc_rule = MaxVersionsGCRule(1337)
self._create_test_helper(gc_rule=gc_rule)
def _update_test_helper(self, gc_rule=None):
from unit_tests._testing import _FakeStub
from google.cloud.bigtable._generated import (
bigtable_table_admin_pb2 as table_admin_v2_pb2)
project_id = 'project-id'
zone = 'zone'
cluster_id = 'cluster-id'
table_id = 'table-id'
column_family_id = 'column-family-id'
table_name = ('projects/' + project_id + '/zones/' + zone +
'/clusters/' + cluster_id + '/tables/' + table_id)
client = _Client()
table = _Table(table_name, client=client)
column_family = self._make_one(
column_family_id, table, gc_rule=gc_rule)
# Create request_pb
if gc_rule is None:
column_family_pb = _ColumnFamilyPB()
else:
column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb())
request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(
name=table_name)
request_pb.modifications.add(
id=column_family_id,
update=column_family_pb,
)
# Create response_pb
response_pb = _ColumnFamilyPB()
# Patch the stub used by the API method.
client._table_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = None # update() has no return value.
# Perform the method and check the result.
self.assertEqual(stub.results, (response_pb,))
result = column_family.update()
self.assertEqual(stub.results, ())
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'ModifyColumnFamilies',
(request_pb,),
{},
)])
def test_update(self):
self._update_test_helper(gc_rule=None)
def test_update_with_gc_rule(self):
from google.cloud.bigtable.column_family import MaxVersionsGCRule
gc_rule = MaxVersionsGCRule(1337)
self._update_test_helper(gc_rule=gc_rule)
def test_delete(self):
from google.protobuf import empty_pb2
from google.cloud.bigtable._generated import (
bigtable_table_admin_pb2 as table_admin_v2_pb2)
from unit_tests._testing import _FakeStub
project_id = 'project-id'
zone = 'zone'
cluster_id = 'cluster-id'
table_id = 'table-id'
column_family_id = 'column-family-id'
table_name = ('projects/' + project_id + '/zones/' + zone +
'/clusters/' + cluster_id + '/tables/' + table_id)
client = _Client()
table = _Table(table_name, client=client)
column_family = self._make_one(column_family_id, table)
# Create request_pb
request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(
name=table_name)
request_pb.modifications.add(
id=column_family_id,
drop=True)
# Create response_pb
response_pb = empty_pb2.Empty()
# Patch the stub used by the API method.
client._table_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = None # delete() has no return value.
# Perform the method and check the result.
self.assertEqual(stub.results, (response_pb,))
result = column_family.delete()
self.assertEqual(stub.results, ())
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'ModifyColumnFamilies',
(request_pb,),
{},
)])
class Test__gc_rule_from_pb(unittest.TestCase):
def _call_fut(self, *args, **kwargs):
from google.cloud.bigtable.column_family import _gc_rule_from_pb
return _gc_rule_from_pb(*args, **kwargs)
def test_empty(self):
gc_rule_pb = _GcRulePB()
self.assertIsNone(self._call_fut(gc_rule_pb))
def test_max_num_versions(self):
from google.cloud.bigtable.column_family import MaxVersionsGCRule
orig_rule = MaxVersionsGCRule(1)
gc_rule_pb = orig_rule.to_pb()
result = self._call_fut(gc_rule_pb)
self.assertIsInstance(result, MaxVersionsGCRule)
self.assertEqual(result, orig_rule)
def test_max_age(self):
import datetime
from google.cloud.bigtable.column_family import MaxAgeGCRule
orig_rule = MaxAgeGCRule(datetime.timedelta(seconds=1))
gc_rule_pb = orig_rule.to_pb()
result = self._call_fut(gc_rule_pb)
self.assertIsInstance(result, MaxAgeGCRule)
self.assertEqual(result, orig_rule)
def test_union(self):
import datetime
from google.cloud.bigtable.column_family import GCRuleUnion
from google.cloud.bigtable.column_family import MaxAgeGCRule
from google.cloud.bigtable.column_family import MaxVersionsGCRule
rule1 = MaxVersionsGCRule(1)
rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1))
orig_rule = GCRuleUnion([rule1, rule2])
gc_rule_pb = orig_rule.to_pb()
result = self._call_fut(gc_rule_pb)
self.assertIsInstance(result, GCRuleUnion)
self.assertEqual(result, orig_rule)
def test_intersection(self):
import datetime
from google.cloud.bigtable.column_family import GCRuleIntersection
from google.cloud.bigtable.column_family import MaxAgeGCRule
from google.cloud.bigtable.column_family import MaxVersionsGCRule
rule1 = MaxVersionsGCRule(1)
rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1))
orig_rule = GCRuleIntersection([rule1, rule2])
gc_rule_pb = orig_rule.to_pb()
result = self._call_fut(gc_rule_pb)
self.assertIsInstance(result, GCRuleIntersection)
self.assertEqual(result, orig_rule)
def test_unknown_field_name(self):
class MockProto(object):
names = []
@classmethod
def WhichOneof(cls, name):
cls.names.append(name)
return 'unknown'
self.assertEqual(MockProto.names, [])
self.assertRaises(ValueError, self._call_fut, MockProto)
self.assertEqual(MockProto.names, ['rule'])
def _GcRulePB(*args, **kw):
from google.cloud.bigtable._generated import (
table_pb2 as table_v2_pb2)
return table_v2_pb2.GcRule(*args, **kw)
def _GcRuleIntersectionPB(*args, **kw):
from google.cloud.bigtable._generated import (
table_pb2 as table_v2_pb2)
return table_v2_pb2.GcRule.Intersection(*args, **kw)
def _GcRuleUnionPB(*args, **kw):
from google.cloud.bigtable._generated import (
table_pb2 as table_v2_pb2)
return table_v2_pb2.GcRule.Union(*args, **kw)
def _ColumnFamilyPB(*args, **kw):
from google.cloud.bigtable._generated import (
table_pb2 as table_v2_pb2)
return table_v2_pb2.ColumnFamily(*args, **kw)
class _Instance(object):
def __init__(self, client=None):
self._client = client
class _Client(object):
pass
class _Table(object):
def __init__(self, name, client=None):
self.name = name
self._instance = _Instance(client)
| {
"content_hash": "33a5548a7e9e98d84f89d2f67619f796",
"timestamp": "",
"source": "github",
"line_count": 617,
"max_line_length": 74,
"avg_line_length": 33.416531604538086,
"alnum_prop": 0.6103889805024736,
"repo_name": "daspecster/google-cloud-python",
"id": "126a18da3003f5eb43f603bdc3ccac77bc64e11d",
"size": "21195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bigtable/unit_tests/test_column_family.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "62009"
},
{
"name": "Python",
"bytes": "4033334"
},
{
"name": "Shell",
"bytes": "7548"
}
],
"symlink_target": ""
} |
""":mod:`gitconfig_parser.parser` -- Parser implementation
"""
from pyparsing import (
OneOrMore, restOfLine, Group, ZeroOrMore,
CharsNotIn, Suppress, Word, alphanums, Literal, pythonStyleComment)
def build_parser():
key = Word(alphanums).setResultsName('key')
value = restOfLine.setParseAction(
lambda string, location, tokens: tokens[0].strip()
).setResultsName('value')
property_ = Group(key + Suppress(Literal('=')) + value)
properties = Group(OneOrMore(property_)).setResultsName('properties')
section_name = (Suppress('[') + OneOrMore(CharsNotIn(']')) +
Suppress(']')).setResultsName('section')
section = Group(section_name + properties)
ini_file = ZeroOrMore(section).setResultsName('sections')
ini_file.ignore(pythonStyleComment)
return ini_file
def parse_file(file_):
parser = build_parser()
return parser.parseWithTabs().parseFile(file_, parseAll=True)
| {
"content_hash": "4c64a218d55cdeed854b1edfbf8ca35a",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 73,
"avg_line_length": 36.61538461538461,
"alnum_prop": 0.6838235294117647,
"repo_name": "seanfisk/gitconfig-parser",
"id": "d40cd5794c364cbfbd47cd7ffe66063081da15e8",
"size": "952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gitconfig_parser/parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24038"
},
{
"name": "Shell",
"bytes": "5123"
}
],
"symlink_target": ""
} |
"""
Tests for states.py.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.ParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['option_lists'] = [
["""\
Short options:
-a option -a
-b file option -b
-c name option -c
""",
"""\
<document source="test data">
<paragraph>
Short options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b
<option_list_item>
<option_group>
<option>
<option_string>
-c
<option_argument delimiter=" ">
name
<description>
<paragraph>
option -c
"""],
["""\
Long options:
--aaaa option --aaaa
--bbbb=file option --bbbb
--cccc name option --cccc
--d-e-f-g option --d-e-f-g
--h_i_j_k option --h_i_j_k
""",
"""\
<document source="test data">
<paragraph>
Long options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--aaaa
<description>
<paragraph>
option --aaaa
<option_list_item>
<option_group>
<option>
<option_string>
--bbbb
<option_argument delimiter="=">
file
<description>
<paragraph>
option --bbbb
<option_list_item>
<option_group>
<option>
<option_string>
--cccc
<option_argument delimiter=" ">
name
<description>
<paragraph>
option --cccc
<option_list_item>
<option_group>
<option>
<option_string>
--d-e-f-g
<description>
<paragraph>
option --d-e-f-g
<option_list_item>
<option_group>
<option>
<option_string>
--h_i_j_k
<description>
<paragraph>
option --h_i_j_k
"""],
["""\
Old GNU-style options:
+a option +a
+b file option +b
+c name option +c
""",
"""\
<document source="test data">
<paragraph>
Old GNU-style options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
+a
<description>
<paragraph>
option +a
<option_list_item>
<option_group>
<option>
<option_string>
+b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option +b
<option_list_item>
<option_group>
<option>
<option_string>
+c
<option_argument delimiter=" ">
name
<description>
<paragraph>
option +c
"""],
["""\
VMS/DOS-style options:
/A option /A
/B file option /B
/CCC option /CCC
/DDD string option /DDD
/EEE=int option /EEE
""",
"""\
<document source="test data">
<paragraph>
VMS/DOS-style options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
/A
<description>
<paragraph>
option /A
<option_list_item>
<option_group>
<option>
<option_string>
/B
<option_argument delimiter=" ">
file
<description>
<paragraph>
option /B
<option_list_item>
<option_group>
<option>
<option_string>
/CCC
<description>
<paragraph>
option /CCC
<option_list_item>
<option_group>
<option>
<option_string>
/DDD
<option_argument delimiter=" ">
string
<description>
<paragraph>
option /DDD
<option_list_item>
<option_group>
<option>
<option_string>
/EEE
<option_argument delimiter="=">
int
<description>
<paragraph>
option /EEE
"""],
["""\
Mixed short, long, and VMS/DOS options:
-a option -a
--bbbb=file option -bbbb
/C option /C
--dddd name option --dddd
-e string option -e
/F file option /F
""",
"""\
<document source="test data">
<paragraph>
Mixed short, long, and VMS/DOS options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a
<option_list_item>
<option_group>
<option>
<option_string>
--bbbb
<option_argument delimiter="=">
file
<description>
<paragraph>
option -bbbb
<option_list_item>
<option_group>
<option>
<option_string>
/C
<description>
<paragraph>
option /C
<option_list_item>
<option_group>
<option>
<option_string>
--dddd
<option_argument delimiter=" ">
name
<description>
<paragraph>
option --dddd
<option_list_item>
<option_group>
<option>
<option_string>
-e
<option_argument delimiter=" ">
string
<description>
<paragraph>
option -e
<option_list_item>
<option_group>
<option>
<option_string>
/F
<option_argument delimiter=" ">
file
<description>
<paragraph>
option /F
"""],
["""\
Aliased options:
-a, --aaaa, /A option -a, --aaaa, /A
-b file, --bbbb=file, /B file option -b, --bbbb, /B
""",
"""\
<document source="test data">
<paragraph>
Aliased options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<option>
<option_string>
--aaaa
<option>
<option_string>
/A
<description>
<paragraph>
option -a, --aaaa, /A
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<option>
<option_string>
--bbbb
<option_argument delimiter="=">
file
<option>
<option_string>
/B
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, --bbbb, /B
"""],
["""\
Multiple lines in descriptions, aligned:
-a option -a, line 1
line 2
-b file option -b, line 1
line 2
""",
"""\
<document source="test data">
<paragraph>
Multiple lines in descriptions, aligned:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, line 1
line 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, line 1
line 2
"""],
["""\
Multiple lines in descriptions, not aligned:
-a option -a, line 1
line 2
-b file option -b, line 1
line 2
""",
"""\
<document source="test data">
<paragraph>
Multiple lines in descriptions, not aligned:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, line 1
line 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, line 1
line 2
"""],
["""\
Descriptions begin on next line:
-a
option -a, line 1
line 2
-b file
option -b, line 1
line 2
""",
"""\
<document source="test data">
<paragraph>
Descriptions begin on next line:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, line 1
line 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, line 1
line 2
"""],
["""\
Multiple body elements in descriptions:
-a option -a, para 1
para 2
-b file
option -b, para 1
para 2
""",
"""\
<document source="test data">
<paragraph>
Multiple body elements in descriptions:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, para 1
<paragraph>
para 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, para 1
<paragraph>
para 2
"""],
["""\
--option
empty item above, no blank line
""",
"""\
<document source="test data">
<paragraph>
--option
empty item above, no blank line
"""],
["""\
An option list using equals:
--long1=arg1 Description 1
--long2=arg2 Description 2
An option list using spaces:
--long1 arg1 Description 1
--long2 arg2 Description 2
An option list using mixed delimiters:
--long1=arg1 Description 1
--long2 arg2 Description 2
An option list using mixed delimiters in one line:
--long1=arg1, --long2 arg2 Description
""",
"""\
<document source="test data">
<paragraph>
An option list using equals:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter="=">
arg1
<description>
<paragraph>
Description 1
<option_list_item>
<option_group>
<option>
<option_string>
--long2
<option_argument delimiter="=">
arg2
<description>
<paragraph>
Description 2
<paragraph>
An option list using spaces:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter=" ">
arg1
<description>
<paragraph>
Description 1
<option_list_item>
<option_group>
<option>
<option_string>
--long2
<option_argument delimiter=" ">
arg2
<description>
<paragraph>
Description 2
<paragraph>
An option list using mixed delimiters:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter="=">
arg1
<description>
<paragraph>
Description 1
<option_list_item>
<option_group>
<option>
<option_string>
--long2
<option_argument delimiter=" ">
arg2
<description>
<paragraph>
Description 2
<paragraph>
An option list using mixed delimiters in one line:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter="=">
arg1
<option>
<option_string>
--long2
<option_argument delimiter=" ">
arg2
<description>
<paragraph>
Description
"""],
["""\
Some edge cases:
--option=arg arg too many arguments
--option=arg,arg not supported (yet?)
--option=arg=arg too many arguments
--option arg arg too many arguments
-a letter arg2 too many arguments
/A letter arg2 too many arguments
--option= argument missing
--=argument option missing
-- everything missing
- this should be a bullet list item
These next ones should be simple paragraphs:
-1
--option
--1
-1 and this one too.
""",
"""\
<document source="test data">
<paragraph>
Some edge cases:
<paragraph>
--option=arg arg too many arguments
<paragraph>
--option=arg,arg not supported (yet?)
<paragraph>
--option=arg=arg too many arguments
<paragraph>
--option arg arg too many arguments
<paragraph>
-a letter arg2 too many arguments
<paragraph>
/A letter arg2 too many arguments
<paragraph>
--option= argument missing
<paragraph>
--=argument option missing
<paragraph>
-- everything missing
<bullet_list bullet="-">
<list_item>
<paragraph>
this should be a bullet list item
<paragraph>
These next ones should be simple paragraphs:
<paragraph>
-1
<paragraph>
--option
<paragraph>
--1
<paragraph>
-1 and this one too.
"""],
["""\
Complex optargs:
--source-url=<URL> Use the supplied <URL> verbatim.
--output-encoding=<name[:handler]>, -o<name[:handler]>
Specify the text encoding for output.
-f <[path]filename> Send output to file.
-d <src dest> Use diff from <src> to <dest>.
--bogus=<x y z> Bogus 3D coordinates.
""",
"""\
<document source="test data">
<paragraph>
Complex optargs:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--source-url
<option_argument delimiter="=">
<URL>
<description>
<paragraph>
Use the supplied <URL> verbatim.
<option_list_item>
<option_group>
<option>
<option_string>
--output-encoding
<option_argument delimiter="=">
<name[:handler]>
<option>
<option_string>
-o
<option_argument delimiter="">
<name[:handler]>
<description>
<paragraph>
Specify the text encoding for output.
<option_list_item>
<option_group>
<option>
<option_string>
-f
<option_argument delimiter=" ">
<[path]filename>
<description>
<paragraph>
Send output to file.
<option_list_item>
<option_group>
<option>
<option_string>
-d
<option_argument delimiter=" ">
<src dest>
<description>
<paragraph>
Use diff from <src> to <dest>.
<option_list_item>
<option_group>
<option>
<option_string>
--bogus
<option_argument delimiter="=">
<x y z>
<description>
<paragraph>
Bogus 3D coordinates.
"""],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
| {
"content_hash": "43ed322ae845df2ee39cc8eb097ba8a9",
"timestamp": "",
"source": "github",
"line_count": 748,
"max_line_length": 61,
"avg_line_length": 26.0427807486631,
"alnum_prop": 0.39835728952772076,
"repo_name": "spreeker/democracygame",
"id": "15115bf7ba3b7a4a6ae27b63042a26f0f20cd15f",
"size": "19677",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "external_apps/docutils-snapshot/test/test_parsers/test_rst/test_option_lists.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Emacs Lisp",
"bytes": "147372"
},
{
"name": "JavaScript",
"bytes": "98320"
},
{
"name": "Python",
"bytes": "4363362"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
} |
import yaml
import os
import logging
import sys
__author__ = 'johlyh'
primary_config = 'libsolace.yaml',
try:
primary_config = os.environ['LIBSOLACE_CONFIG']
except Exception, e:
pass
__yamlfiles__ = [
"%s" % primary_config,
'/etc/libsolace/libsolace.yaml',
'/opt/libsolace/libsolace.yaml'
]
__doc__ = """
The settingsloader searches for a libsolace.yaml file in:
- libsolace.yaml
- /etc/libsolace/libsolace.yaml
- /opt/libsolace/libsolace.yaml
The environment variable: :envvar:`LIBSOLACE_CONFIG` can also be used to specify another file. e.g
LIBSOLACE_CONFIG="/tmp/my.yaml" ./bin/solace-provision.py ....
Examples:
>>> import libsolace.settingsloader as settings
>>> settings.CMDB_URL
'http://mydomain.com/path'
"""
logging.basicConfig(format='[%(module)s] %(filename)s:%(lineno)s %(asctime)s %(levelname)s %(message)s',
stream=sys.stdout)
logging.getLogger().setLevel(logging.INFO)
logging = logging.getLogger(__name__)
yaml_loaded = False
# defaults which are set / could not be present
defaults = {
"UPDATE_MOCK_TESTS": False,
"CMDB_URL": "http://someurl/site.xml",
"CMDB_FILE": "provision-example.yaml",
"CMDB_USER": "",
"CMDB_PASS": "",
"SOLACE_QUEUE_PLUGIN": "SolaceQueue"
}
for yaml_file in __yamlfiles__:
if not os.path.exists(yaml_file):
continue
logging.info("Using yaml file %s" % yaml_file)
stream = open(yaml_file, 'r')
yaml_settings = yaml.load(stream)
# set the defaults
for default in defaults:
logging.info("Setting default %s:%s" % (default, defaults[default]))
globals()[default] = defaults[default]
# TODO FIXME
# get each plugins "default" variables and add to globals
# get the real values if any
for variable in yaml_settings.keys():
logging.info("Setting config %s:%s" % (variable, yaml_settings[variable]))
globals()[variable] = yaml_settings[variable]
yaml_loaded = True
logging.debug("Yaml loaded successful")
logging.info("Loading plugins...")
for p in globals()['PLUGINS']:
try:
__import__(p, globals())
except Exception, e:
logging.error("Failed to import plugin %s" % p)
raise
break
if yaml_loaded is False:
msg = "Failed to find libpipeline.yaml in any of these locations: %s" % ",".join(__yamlfiles__)
logging.error(msg)
raise Exception(msg)
| {
"content_hash": "93a4d3da9e2d799cf50eb489019340a5",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 104,
"avg_line_length": 26.397849462365592,
"alnum_prop": 0.6419551934826884,
"repo_name": "unixunion/python-libsolace",
"id": "97c81f4ee239002ce559196e72a841885c58ad99",
"size": "2455",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libsolace/settingsloader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "8090"
},
{
"name": "Python",
"bytes": "400777"
},
{
"name": "Shell",
"bytes": "745"
}
],
"symlink_target": ""
} |
"""Examine success/fail history for Chrome/ium OS builds.
Used to check in a LKGM version for Chrome OS for other consumers.
"""
from __future__ import print_function
import distutils.version
import os
from chromite.cbuildbot import archive_lib
from chromite.cbuildbot import cbuildbot_config
from chromite.cbuildbot import constants
from chromite.cbuildbot import manifest_version
from chromite.cbuildbot import tree_status
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import gclient
from chromite.lib import gs
from chromite.lib import osutils
from chromite.lib import parallel
class LKGMNotFound(Exception):
"""Raised if a newer valid LKGM could not be found."""
class LKGMNotCommitted(Exception):
"""Raised if we could not submit a new LKGM."""
class ChromeCommitter(object):
"""Committer object responsible for obtaining a new LKGM and committing it."""
_COMMIT_MSG = ('Automated Commit: Committing new LKGM version %(version)s '
'for chromeos.')
_CANDIDATES_TO_CONSIDER = 10
_SLEEP_TIMEOUT = 30
_TREE_TIMEOUT = 7200
def __init__(self, checkout_dir, dryrun):
self._checkout_dir = checkout_dir
self._dryrun = dryrun
self._lkgm = None
self._old_lkgm = None
def CheckoutChromeLKGM(self):
"""Checkout chromeos LKGM file for chrome into tmp checkout dir."""
if not os.path.exists(self._checkout_dir):
cros_build_lib.RunCommand(
['git', 'clone', constants.CHROMIUM_GOB_URL,
self._checkout_dir])
else:
cros_build_lib.RunCommand(
['git', 'fetch', 'origin'], cwd=self._checkout_dir)
cros_build_lib.RunCommand(
['git', 'checkout', '-f', 'origin/master'], cwd=self._checkout_dir)
cros_build_lib.RunCommand(
['git', 'branch', '-D', 'lkgm-roll'], cwd=self._checkout_dir,
error_code_ok=True)
cros_build_lib.RunCommand(
['git', 'checkout', '-b', 'lkgm-roll', 'origin/master'],
cwd=self._checkout_dir)
self._old_lkgm = osutils.ReadFile(
os.path.join(self._checkout_dir, constants.PATH_TO_CHROME_LKGM))
@cros_build_lib.MemoizedSingleCall
def _GetLatestCanaryVersions(self):
"""Returns the latest CANDIDATES_TO_CONSIDER canary versions."""
gs_handle = gs.GSContext()
version_paths = gs_handle.LS(manifest_version.BUILD_STATUS_URL)
# Strip gs://<path> prefix and trailing /'s.
versions = [os.path.basename(v.rstrip('/')) for v in version_paths]
lv = distutils.version.LooseVersion
# We only care about canary versions which always end in 0.0.
canary_versions = [v for v in versions if v.endswith('.0.0')]
new_canary_versions = [v for v in canary_versions
if lv(v) > lv(self._old_lkgm)]
return sorted(new_canary_versions, key=lv,
reverse=True)[0:self._CANDIDATES_TO_CONSIDER]
def FindNewLKGM(self):
"""Finds a new LKGM for chrome from previous chromeos releases."""
versions = self._GetLatestCanaryVersions()
if not versions:
raise LKGMNotFound('No valid LKGM found newer than the old LKGM.')
canaries = cbuildbot_config.GetCanariesForChromeLKGM()
logging.info('Considering the following versions: %s', ' '.join(versions))
logging.info('Using scores from the following canaries: %s',
' '.join(canaries))
# Scores are based on passing builders.
version_scores = {}
for version in versions:
for builder in canaries:
status = manifest_version.BuildSpecsManager.GetBuildStatus(
builder, version, retries=0)
if status:
if status.Passed():
version_scores[version] = version_scores.get(version, 0) + 1
elif status.Failed():
# We don't consider builds with any reporting failures.
version_scores[version] = 0
break
logging.info('Version %s had score %d', version,
version_scores.get(version, 0))
# We want to get the version with the highest score. In case of a tie, we
# want to choose the highest version.
lkgm = max((v, k) for k, v in version_scores.iteritems())[1]
if not version_scores[lkgm] > 0:
raise LKGMNotFound('No valid LKGM found. Scores are too low.')
self._lkgm = lkgm
def CommitNewLKGM(self):
"""Commits the new LKGM file using our template commit message."""
lv = distutils.version.LooseVersion
if not self._lkgm and not lv(self._lkgm) < lv(self._old_lkgm):
raise LKGMNotFound('No valid LKGM found. Did you run FindNewLKGM?')
commit_msg = self._COMMIT_MSG % dict(version=self._lkgm)
try:
# Add the new versioned file.
osutils.WriteFile(
os.path.join(self._checkout_dir, constants.PATH_TO_CHROME_LKGM),
self._lkgm)
cros_build_lib.RunCommand(
['git', 'add', constants.PATH_TO_CHROME_LKGM], cwd=self._checkout_dir)
# Commit it!
cros_build_lib.RunCommand(
['git', 'commit', '-m', commit_msg],
cwd=self._checkout_dir)
except cros_build_lib.RunCommandError as e:
raise LKGMNotCommitted(
'Could not create git commit with new LKGM: %r' % e)
if not tree_status.IsTreeOpen(status_url=gclient.STATUS_URL,
period=self._SLEEP_TIMEOUT,
timeout=self._TREE_TIMEOUT):
raise LKGMNotCommitted('Chromium Tree is closed')
if not self._dryrun:
try:
cros_build_lib.RunCommand(
['git', 'cl', 'land', '-f', '--bypass-hooks', '-m', commit_msg],
cwd=self._checkout_dir)
except cros_build_lib.RunCommandError as e:
raise LKGMNotCommitted('Could not submit LKGM: %r' % e)
def UpdateLatestFilesForBot(self, config, versions):
"""Update the LATEST files, for a given bot, in Google Storage.
Args:
config: The builder config to update.
versions: Versions of ChromeOS to look at, sorted in descending order.
"""
base_url = archive_lib.GetBaseUploadURI(config)
acl = archive_lib.GetUploadACL(config)
latest_url = None
# gs.GSContext skips over all commands (including read-only checks)
# when dry_run is True, so we have to create two context objects.
# TODO(davidjames): Fix this.
gs_ctx = gs.GSContext()
copy_ctx = gs.GSContext(dry_run=self._dryrun)
for version in reversed(versions):
url = os.path.join(base_url, 'LATEST-%s' % version)
found = gs_ctx.Exists(url, print_cmd=False)
if not found and latest_url:
try:
copy_ctx.Copy(latest_url, url, version=0, acl=acl)
logging.info('Copied %s -> %s', latest_url, url)
except gs.GSContextPreconditionFailed:
found = True
if found:
logging.info('Found %s', url)
latest_url = url
def UpdateLatestFiles(self):
"""Update the LATEST files since LKGM, in Google Storage."""
ext_cfgs, int_cfgs = cbuildbot_config.FindFullConfigsForBoard(board=None)
versions = self._GetLatestCanaryVersions() + [self._old_lkgm]
tasks = [[cfg, versions] for cfg in ext_cfgs + int_cfgs]
parallel.RunTasksInProcessPool(self.UpdateLatestFilesForBot, tasks,
processes=100)
def _GetParser():
"""Returns the parser to use for this module."""
parser = commandline.ArgumentParser(usage=__doc__, caching=True)
parser.add_argument('--dryrun', action='store_true', default=False,
help="Find the next LKGM but don't commit it.")
parser.add_argument('--workdir', default=os.path.join(os.getcwd(), 'src'),
help=('Path to a checkout of chromium/src. '
'Defaults to PWD/src'))
return parser
def main(argv):
parser = _GetParser()
args = parser.parse_args(argv)
committer = ChromeCommitter(args.workdir, dryrun=args.dryrun)
committer.CheckoutChromeLKGM()
committer.UpdateLatestFiles()
committer.FindNewLKGM()
committer.CommitNewLKGM()
return 0
| {
"content_hash": "0875c2bce0bea7de43b50e1b1baf3dc8",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 80,
"avg_line_length": 36.94063926940639,
"alnum_prop": 0.6517923362175525,
"repo_name": "guorendong/iridium-browser-ubuntu",
"id": "ab53d2cd0566cb9f3ff9c1e0eaca7302bb9744ec",
"size": "8260",
"binary": false,
"copies": "1",
"ref": "refs/heads/ubuntu/precise",
"path": "third_party/chromite/scripts/cros_best_revision.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "8402"
},
{
"name": "Assembly",
"bytes": "256197"
},
{
"name": "Batchfile",
"bytes": "34966"
},
{
"name": "C",
"bytes": "15445429"
},
{
"name": "C++",
"bytes": "276628399"
},
{
"name": "CMake",
"bytes": "27829"
},
{
"name": "CSS",
"bytes": "867238"
},
{
"name": "Emacs Lisp",
"bytes": "3348"
},
{
"name": "Go",
"bytes": "13628"
},
{
"name": "Groff",
"bytes": "7777"
},
{
"name": "HTML",
"bytes": "20250399"
},
{
"name": "Java",
"bytes": "9950308"
},
{
"name": "JavaScript",
"bytes": "13873772"
},
{
"name": "LLVM",
"bytes": "1169"
},
{
"name": "Logos",
"bytes": "6893"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "Makefile",
"bytes": "179129"
},
{
"name": "Objective-C",
"bytes": "1871766"
},
{
"name": "Objective-C++",
"bytes": "9674498"
},
{
"name": "PHP",
"bytes": "42038"
},
{
"name": "PLpgSQL",
"bytes": "163248"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "474121"
},
{
"name": "Python",
"bytes": "11646662"
},
{
"name": "Ragel in Ruby Host",
"bytes": "104923"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "1151673"
},
{
"name": "Standard ML",
"bytes": "5034"
},
{
"name": "VimL",
"bytes": "4075"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
import serial
from codinghyde.ant.exceptions import DriverException
class Driver(object):
def __init__(self, device, debug=False):
self.device = device
self.debug = debug
self.is_open = False
def isOpen(self):
return self.is_open
def open(self):
if self.isOpen():
raise DriverException("Could not open device (already open).")
self._open()
self.is_open = True
def close(self):
if not self.isOpen():
raise DriverException("Could not close device (not open).")
self._close()
self.is_open = False
def read(self, count):
if not self.isOpen():
raise DriverException("Could not read from device (not open).")
if count <= 0:
raise DriverException("Could not read from device (zero request).")
data = self._read(count)
if self.debug:
self._dump(data, 'READ')
return data
def write(self, data):
if not self.isOpen():
raise DriverException("Could not write to device (not open).")
if len(data) <= 0:
raise DriverException("Could not write to device (no data).")
if self.debug:
self._dump(data, 'WRITE')
return self._write(data)
def _dump(self, data, title):
if len(data) == 0:
return
print '========== [{0}] =========='.format(title)
length = 8
line = 0
while data:
row = data[:length]
data = data[length:]
hex_data = ['%02X' % ord(byte) for byte in row]
print '%04X' % line, ' '.join(hex_data)
print ''
def _open(self):
raise DriverException("Not Implemented")
def _close(self):
raise DriverException("Not Implemented")
def _read(self, count):
raise DriverException("Not Implemented")
def _write(self, data):
raise DriverException("Not Implemented")
class USB1Driver(Driver):
def __init__(self, device, baud_rate=115200, debug=False):
Driver.__init__(self, device, debug)
self.baud = baud_rate
def _open(self):
try:
dev = serial.Serial(self.device, self.baud)
except serial.SerialException, e:
raise DriverException(str(e))
if not dev.isOpen():
raise DriverException('Could not open device')
self._serial = dev
self._serial.timeout = 0.01
def _close(self):
self._serial.close()
def _read(self, count):
return self._serial.read(count)
def _write(self, data):
try:
count = self._serial.write(data)
self._serial.flush()
except serial.SerialTimeoutException, e:
raise DriverException(str(e))
return count
| {
"content_hash": "b2c55292fcaf755102e8f9844389032e",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 79,
"avg_line_length": 25.836363636363636,
"alnum_prop": 0.555594651653765,
"repo_name": "mvillalba/codinghyde.ant",
"id": "07ea185fe6db34ceb08c72a5dd647e6f4a696880",
"size": "4131",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/codinghyde/ant/driver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "118395"
}
],
"symlink_target": ""
} |
from flask import Flask, render_template, request
import db
from bson import ObjectId
app = Flask(__name__)
def v(k, default=None):
return request.values.get(k, default)
@app.route("/", methods=['POST', 'GET'])
def index():
result = ''
if request.method == 'POST':
mongodb = 'mongodb://%s:%s/%s'%(v('server', "localhost"), v('port', "27017"), v('db'))
db.mongo.connect(mongodb)
if v('command') == 'find':
query = 'db.mongo.%s.%s.find(%s)'% (v('db'), v('collection'), v('query'))
elif v('command') == 'insert':
query = 'db.mongo.%s.%s.insert(%s)'% (v('db'), v('collection'), v('query'))
result = eval(query)
return render_template('index.html', result=result, server=v('server'),
db=v('db'), collection=v('collection'),
query=v('query'))
if __name__ == "__main__":
app.run('0.0.0.0', debug=True)
| {
"content_hash": "a468c7f6723a10d0da66c229ce5361be",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 88,
"avg_line_length": 35.38461538461539,
"alnum_prop": 0.5413043478260869,
"repo_name": "Bloodevil/easy_mongo",
"id": "b202688003a2d15969c9553d10582254ba554356",
"size": "920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "view.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1267"
}
],
"symlink_target": ""
} |
a = 4
def print_func():
a = 17
print "in print_func a = ", a
print_func()
print "a = ", a,"which is global variable assigned prior to the function print_func"
| {
"content_hash": "4674a107e04864a801fdb4ece7e31633",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 84,
"avg_line_length": 21.375,
"alnum_prop": 0.6257309941520468,
"repo_name": "janusnic/py-21v",
"id": "602b85dae4a0148efb83cb2571101ee1b644e6d7",
"size": "171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scope/1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11871"
}
],
"symlink_target": ""
} |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import socket
import socketserver
import struct
import threading
# from adynaton.constants import *
# from adynaton.utility import *
class AnyProtocol(object):
"""
Common Object for Any Protocol
"""
def __init__(self):
self.TCP_port = 12345
self.TCP_TLS_port = 12345
self.UDP_port = 12345
self.IPv4_listen_address = '127.0.0.1'
self.IPv6_listen_address = '::1'
self.target_server = '127.0.0.1'
self.timeout = 5
self.packet_format = '!H 2B 4H'
self.structure = None
self.packet = []
@staticmethod
def default_packet():
"""
If this ever gets perfected I would move it to constants.py so
others can use it better.
I am liking this less and less everyday.
"""
packet = [
{'RFC_name': 'AP',
'bits': 0, 'order': 0, 'name': 'Any Protocol', 'value': 0},
{'RFC_name': 'id',
'bits': 32, 'order': 1, 'name': 'Identifier', 'value': 0},
{'RFC_name': 'op',
'bits': 32, 'order': 2, 'name': 'Operation Mode', 'value': 0},
{'RFC_name': 'v',
'bits': 32, 'order': 3, 'name': 'Version', 'value': 0},
{'RFC_name': 'c',
'bits': 32, 'order': 4, 'name': 'Count', 'value': 0},
{'RFC_name': 'd',
'bits': 512, 'order': 5, 'name': 'Data', 'value': 0},
]
return packet
def stucture_to_packet(self):
"""
Method
"""
structure = struct.unpack(self.packet_format, self.structure)
self.packet = self.default_packet()
self.packet[1]['value'] = structure[0]
self.packet[2]['value'] = structure[1]
self.packet[3]['value'] = structure[2]
self.packet[4]['value'] = structure[3]
self.packet[5]['value'] = structure[4]
return self.packet
def packet_to_struct(self):
"""
Method
"""
self.structure = struct.pack(
self.packet_format,
self.packet[1]['value'],
self.packet[2]['value'],
self.packet[3]['value'],
self.packet[4]['value'],
self.packet[5]['value'],
)
return self.structure
def set_TCP_port(self, TCP_port):
"""
Method
"""
self.TCP_port = TCP_port
def set_TLS_port(self, TCP_TLS_port):
"""
Method
"""
self.TCP_TLS_port = TCP_TLS_port
def set_UDP_port(self, UDP_port):
"""
Method
"""
self.UDP_port = UDP_port
def set_listen_address(self, IPv4_address=None, IPv6_address=None):
"""
Method
"""
if IPv4_address:
self.IPv4_listen_address = IPv4_address
else:
self.IPv4_listen_address = '0.0.0.0'
if IPv6_address:
self.IPv6_listen_address = IPv6_address
else:
self.IPv6_listen_address = '::0'
class AnyProtocolClient(AnyProtocol):
"""
Client Object for Any Protocol
"""
def __init__(self):
AnyProtocol.__init__(self)
self.query = "blah"
def create_query(self, request=None):
"""
Method
"""
self.query = request
return self.query
def query_UDP_server(self):
"""
query is not the wrong word but there are likely better terms
"""
self.create_query()
client_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
client_socket.settimeout(self.timeout)
client_socket.sendto(
self.query, (self.target_server, self.UDP_port))
structure = client_socket.recvfrom(1024)[0]
client_socket.close()
result = struct.unpack(self.packet_format, structure)[8] >> 32
return result
class AnyProtocolServer(AnyProtocol):
"""
Server Object for Any Protocol
"""
def __init__(self):
AnyProtocol.__init__(self)
self.status_msg = {
'TCP': 'off',
'TCP_TLS': 'off',
'UDP': 'off',
}
self.log_level = 0
self.TCP_server = False
self.TCP_server_thread = False
self.TCP_TLS_server = False
self.TCP_TLS_server_thread = False
self.UDP_server = False
self.UDP_server_thread = False
def status(self):
"""
Method
"""
return self.status
def start(self):
"""
Method
"""
self.start_TCP()
self.start_TCP_TLS()
self.start_UDP()
def start_TCP(self):
"""
Add/Confirm dual stack IPv4 and IPv6
"""
self.TCP_server = AnyProtocolServerTCPThreading(
(self.IPv4_listen_address, self.TCP_port),
AnyProtocolServerTCPHandler)
self.TCP_server_thread = threading.Thread(
target=self.TCP_server.serve_forever)
self.TCP_server_thread.setDaemon(True)
self.TCP_server_thread.start()
self.status_msg['TCP'] = "\tTCP is running\n"
def start_TCP_TLS(self):
"""
Add/Confirm dual stack IPv4 and IPv6
"""
self.TCP_TLS_server = AnyProtocolServerTCPTLSThreading(
(self.IPv4_listen_address, self.TCP_TLS_port),
AnyProtocolServerTCPTLSHandler)
self.TCP_TLS_server_thread = threading.Thread(
target=self.TCP_TLS_server.serve_forever)
self.TCP_TLS_server_thread.setDaemon(True)
self.TCP_TLS_server_thread.start()
self.status_msg['TCP_TLS'] = "\tTCP with TLS is running\n"
def start_UDP(self):
"""
Add/Confirm dual stack IPv4 and IPv6
Note the udp autoselect if port 0 is chosen used in unit testing
"""
self.UDP_server = AnyProtocolServerUDPThreading(
(self.IPv4_listen_address, self.UDP_port),
AnyProtocolServerUDPHandler)
host, port = self.UDP_server.server_address
self.UDP_port = port
self.UDP_server_thread = threading.Thread(
target=self.UDP_server.serve_forever)
self.UDP_server_thread.setDaemon(True)
self.UDP_server_thread.start()
self.status_msg['UDP'] = "serving on %s at port %d " % (host, port)
def stop(self):
"""
Method
"""
self.TCP_server.shutdown()
self.TCP_server.server_close()
self.TCP_server_thread.join()
self.TCP_server_thread = None
self.status_msg['TCP'] = "TCP is off"
self.TCP_TLS_server.shutdown()
self.TCP_TLS_server.server_close()
self.TCP_TLS_server_thread.join()
self.TCP_TLS_server_thread = None
self.status_msg['TCP_TLS'] = "TCP with TLS is off"
self.UDP_server.shutdown()
self.UDP_server.server_close()
self.UDP_server_thread.join()
self.UDP_server_thread = None
self.status_msg['UDP'] = "UDP is off"
class AnyProtocolServerTCPHandler(
socketserver.BaseRequestHandler, AnyProtocol):
"""
Object for Any Protocol
"""
def setup(self):
"""
Method
"""
AnyProtocol.__init__(self)
def handle(self):
"""
Method
"""
print(self.request.recv(1024).strip())
print(str(self.client_address[0]))
class AnyProtocolServerTCPTLSHandler(
socketserver.BaseRequestHandler, AnyProtocol):
"""
Object for Any Protocol
"""
def setup(self):
"""
Method
"""
AnyProtocol.__init__(self)
def handle(self):
"""
Method
"""
print(self.request.recv(1024).strip())
print(str(self.client_address[0]))
class AnyProtocolServerUDPHandler(
socketserver.BaseRequestHandler, AnyProtocol):
"""
Object for Any Protocol
"""
def setup(self):
"""
Method
"""
AnyProtocol.__init__(self)
def handle(self):
"""
Method
"""
print(self.request[0])
print(self.client_address[0])
class AnyProtocolServerTCPThreading(
socketserver.ThreadingMixIn, socketserver.TCPServer):
"""
Object for Any Protocol
"""
pass
class AnyProtocolServerTCPTLSThreading(
socketserver.ThreadingMixIn, socketserver.TCPServer):
"""
Object for Any Protocol
"""
pass
class AnyProtocolServerUDPThreading(
socketserver.ThreadingMixIn, socketserver.UDPServer):
"""
Object for Any Protocol
"""
pass
| {
"content_hash": "5cd627d48c5b465aa1c6f94f18eca25b",
"timestamp": "",
"source": "github",
"line_count": 331,
"max_line_length": 75,
"avg_line_length": 28.229607250755286,
"alnum_prop": 0.5688142123287672,
"repo_name": "lathama/Adynaton",
"id": "0f0e331ae89a7d3f546c4208fbe1142856cc406a",
"size": "9344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "adynaton/pattern.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "168176"
}
],
"symlink_target": ""
} |
from zechat.app import create_app, create_manager
app = create_app()
manager = create_manager(app)
manager.run()
| {
"content_hash": "f438510e92110af6e32cfd0e7f0b2307",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 49,
"avg_line_length": 22.8,
"alnum_prop": 0.7543859649122807,
"repo_name": "mgax/zechat",
"id": "36e635ab35242302adc7c24a69660183a617325e",
"size": "137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "759"
},
{
"name": "CoffeeScript",
"bytes": "28933"
},
{
"name": "JavaScript",
"bytes": "246321"
},
{
"name": "Python",
"bytes": "26757"
}
],
"symlink_target": ""
} |
from Plex import *
letter = Range("AZaz")
digit = Range("09")
name = letter + Rep(letter | digit)
number = Rep1(digit)
space = Any(" \t\n")
class MyScanner(Scanner):
def begin_comment(self, text):
if self.nesting_level == 0:
self.begin('comment')
self.nesting_level = self.nesting_level + 1
def end_comment(self, text):
self.nesting_level = self.nesting_level - 1
if self.nesting_level == 0:
self.begin('')
lexicon = Lexicon([
(name, 'ident'),
(number, 'int'),
(space, IGNORE),
(Str("(*"), begin_comment),
State('comment', [
(Str("(*"), begin_comment),
(Str("*)"), end_comment),
(AnyChar, IGNORE)
])
])
def __init__(self, file, name):
Scanner.__init__(self, self.lexicon, file, name)
self.nesting_level = 0
filename = "example7.in"
f = open(filename, "r")
scanner = MyScanner(f, filename)
while 1:
token = scanner.read()
print token
if token[0] is None:
break
| {
"content_hash": "3f8b96caaba5aa768b26e0a83676eaa2",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 52,
"avg_line_length": 21.804347826086957,
"alnum_prop": 0.5712861415752741,
"repo_name": "probcomp/bayeslite",
"id": "1cb158b4edd97908b00e0529e3d3ab9ff34a59e6",
"size": "1022",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "external/plex/dist/examples/example7.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2007"
},
{
"name": "Python",
"bytes": "1079798"
},
{
"name": "Shell",
"bytes": "2560"
},
{
"name": "Yacc",
"bytes": "42578"
}
],
"symlink_target": ""
} |
"""Test case-sensitivity (PEP 235)."""
import sys
from test.test_importlib import util
importlib = util.import_importlib('importlib')
machinery = util.import_importlib('importlib.machinery')
import os
from test.support import os_helper
import unittest
import warnings
@util.case_insensitive_tests
class CaseSensitivityTest(util.CASEOKTestBase):
"""PEP 235 dictates that on case-preserving, case-insensitive file systems
that imports are case-sensitive unless the PYTHONCASEOK environment
variable is set."""
name = 'MoDuLe'
assert name != name.lower()
def finder(self, path):
return self.machinery.FileFinder(path,
(self.machinery.SourceFileLoader,
self.machinery.SOURCE_SUFFIXES),
(self.machinery.SourcelessFileLoader,
self.machinery.BYTECODE_SUFFIXES))
def sensitivity_test(self):
"""Look for a module with matching and non-matching sensitivity."""
sensitive_pkg = 'sensitive.{0}'.format(self.name)
insensitive_pkg = 'insensitive.{0}'.format(self.name.lower())
context = util.create_modules(insensitive_pkg, sensitive_pkg)
with context as mapping:
sensitive_path = os.path.join(mapping['.root'], 'sensitive')
insensitive_path = os.path.join(mapping['.root'], 'insensitive')
sensitive_finder = self.finder(sensitive_path)
insensitive_finder = self.finder(insensitive_path)
return self.find(sensitive_finder), self.find(insensitive_finder)
@unittest.skipIf(sys.flags.ignore_environment, 'ignore_environment flag was set')
def test_sensitive(self):
with os_helper.EnvironmentVarGuard() as env:
env.unset('PYTHONCASEOK')
self.caseok_env_changed(should_exist=False)
sensitive, insensitive = self.sensitivity_test()
self.assertIsNotNone(sensitive)
self.assertIn(self.name, sensitive.get_filename(self.name))
self.assertIsNone(insensitive)
@unittest.skipIf(sys.flags.ignore_environment, 'ignore_environment flag was set')
def test_insensitive(self):
with os_helper.EnvironmentVarGuard() as env:
env.set('PYTHONCASEOK', '1')
self.caseok_env_changed(should_exist=True)
sensitive, insensitive = self.sensitivity_test()
self.assertIsNotNone(sensitive)
self.assertIn(self.name, sensitive.get_filename(self.name))
self.assertIsNotNone(insensitive)
self.assertIn(self.name, insensitive.get_filename(self.name))
class CaseSensitivityTestPEP302(CaseSensitivityTest):
def find(self, finder):
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
return finder.find_module(self.name)
(Frozen_CaseSensitivityTestPEP302,
Source_CaseSensitivityTestPEP302
) = util.test_both(CaseSensitivityTestPEP302, importlib=importlib,
machinery=machinery)
class CaseSensitivityTestPEP451(CaseSensitivityTest):
def find(self, finder):
found = finder.find_spec(self.name)
return found.loader if found is not None else found
(Frozen_CaseSensitivityTestPEP451,
Source_CaseSensitivityTestPEP451
) = util.test_both(CaseSensitivityTestPEP451, importlib=importlib,
machinery=machinery)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "b0e673ad7fbc0411b3a5309c841772fc",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 85,
"avg_line_length": 38.41304347826087,
"alnum_prop": 0.6624221844934918,
"repo_name": "brython-dev/brython",
"id": "9d472707abe840e72301988a460163aa2838c49b",
"size": "3534",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "www/src/Lib/test/test_importlib/source/test_case_sensitivity.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "24308"
},
{
"name": "HTML",
"bytes": "5144999"
},
{
"name": "JavaScript",
"bytes": "4143100"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "Python",
"bytes": "22236375"
},
{
"name": "Roff",
"bytes": "21126"
},
{
"name": "VBScript",
"bytes": "481"
}
],
"symlink_target": ""
} |
import copy
from itertools import chain as iter_chain
from itertools import combinations as iter_combinations
import eventlet
import mock
import netaddr
from neutron_lib import constants as lib_constants
from neutron_lib import exceptions as exc
from oslo_config import cfg
from oslo_log import log
import oslo_messaging
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from testtools import matchers
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import agent as l3_agent
from neutron.agent.l3 import dvr_edge_router as dvr_router
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import legacy_router
from neutron.agent.l3 import link_local_allocator as lla
from neutron.agent.l3 import namespaces
from neutron.agent.l3 import router_info as l3router
from neutron.agent.l3 import router_processing_queue
from neutron.agent.linux import dibbler
from neutron.agent.linux import external_process
from neutron.agent.linux import interface
from neutron.agent.linux import iptables_manager
from neutron.agent.linux import pd
from neutron.agent.linux import ra
from neutron.agent.metadata import driver as metadata_driver
from neutron.agent import rpc as agent_rpc
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.conf.agent.l3 import config as l3_config
from neutron.conf.agent.l3 import ha as ha_conf
from neutron.conf import common as base_config
from neutron.extensions import portbindings
from neutron.plugins.common import constants as p_const
from neutron.tests import base
from neutron.tests.common import l3_test_common
_uuid = uuidutils.generate_uuid
HOSTNAME = 'myhost'
FAKE_ID = _uuid()
FAKE_ID_2 = _uuid()
FIP_PRI = 32768
class BasicRouterOperationsFramework(base.BaseTestCase):
def setUp(self):
super(BasicRouterOperationsFramework, self).setUp()
mock.patch('eventlet.spawn').start()
self.conf = agent_config.setup_conf()
self.conf.register_opts(base_config.core_opts)
log.register_options(self.conf)
self.conf.register_opts(agent_config.AGENT_STATE_OPTS, 'AGENT')
l3_config.register_l3_agent_config_opts(l3_config.OPTS, self.conf)
ha_conf.register_l3_agent_ha_opts(self.conf)
agent_config.register_interface_driver_opts_helper(self.conf)
agent_config.register_process_monitor_opts(self.conf)
agent_config.register_availability_zone_opts_helper(self.conf)
self.conf.register_opts(interface.OPTS)
self.conf.register_opts(external_process.OPTS)
self.conf.register_opts(pd.OPTS)
self.conf.register_opts(ra.OPTS)
self.conf.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
self.conf.set_override('send_arp_for_ha', 1)
self.conf.set_override('state_path', cfg.CONF.state_path)
self.conf.set_override('pd_dhcp_driver', '')
self.device_exists_p = mock.patch(
'neutron.agent.linux.ip_lib.device_exists')
self.device_exists = self.device_exists_p.start()
self.ensure_dir = mock.patch('neutron.common.utils.ensure_dir').start()
mock.patch('neutron.agent.linux.keepalived.KeepalivedManager'
'.get_full_config_file_path').start()
self.utils_exec_p = mock.patch(
'neutron.agent.linux.utils.execute')
self.utils_exec = self.utils_exec_p.start()
self.utils_replace_file_p = mock.patch(
'neutron.common.utils.replace_file')
self.utils_replace_file = self.utils_replace_file_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager')
self.external_process = self.external_process_p.start()
self.process_monitor = mock.patch(
'neutron.agent.linux.external_process.ProcessMonitor').start()
self.send_adv_notif_p = mock.patch(
'neutron.agent.linux.ip_lib.send_ip_addr_adv_notif')
self.send_adv_notif = self.send_adv_notif_p.start()
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
driver_cls = self.dvr_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
driver_cls.return_value = self.mock_driver
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
ip_rule = mock.patch('neutron.agent.linux.ip_lib.IPRule').start()
self.mock_rule = mock.MagicMock()
ip_rule.return_value = self.mock_rule
ip_dev = mock.patch('neutron.agent.linux.ip_lib.IPDevice').start()
self.mock_ip_dev = mock.MagicMock()
ip_dev.return_value = self.mock_ip_dev
self.l3pluginApi_cls_p = mock.patch(
'neutron.agent.l3.agent.L3PluginApi')
l3pluginApi_cls = self.l3pluginApi_cls_p.start()
self.plugin_api = mock.MagicMock()
l3pluginApi_cls.return_value = self.plugin_api
self.looping_call_p = mock.patch(
'oslo_service.loopingcall.FixedIntervalLoopingCall')
self.looping_call_p.start()
subnet_id_1 = _uuid()
subnet_id_2 = _uuid()
self.snat_ports = [{'subnets': [{'cidr': '152.2.0.0/16',
'gateway_ip': '152.2.0.1',
'id': subnet_id_1}],
'network_id': _uuid(),
'device_owner':
lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': subnet_id_1,
'ip_address': '152.2.0.13',
'prefixlen': 16}],
'id': _uuid(), 'device_id': _uuid()},
{'subnets': [{'cidr': '152.10.0.0/16',
'gateway_ip': '152.10.0.1',
'id': subnet_id_2}],
'network_id': _uuid(),
'device_owner':
lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': subnet_id_2,
'ip_address': '152.10.0.13',
'prefixlen': 16}],
'id': _uuid(), 'device_id': _uuid()}]
self.ri_kwargs = {'agent_conf': self.conf,
'interface_driver': self.mock_driver}
def _process_router_instance_for_agent(self, agent, ri, router):
ri.router = router
if not ri.radvd:
ri.radvd = ra.DaemonMonitor(router['id'],
ri.ns_name,
agent.process_monitor,
ri.get_internal_device_name,
self.conf)
ri.process(agent)
class TestBasicRouterOperations(BasicRouterOperationsFramework):
def test_request_id_changes(self):
a = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertNotEqual(a.context.request_id, a.context.request_id)
def test_init_ha_conf(self):
with mock.patch('os.path.dirname', return_value='/etc/ha/'):
l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.ensure_dir.assert_called_once_with('/etc/ha/')
def test_enqueue_state_change_router_not_found(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
non_existent_router = 42
# Make sure the exceptional code path has coverage
agent.enqueue_state_change(non_existent_router, 'master')
def test_enqueue_state_change_metadata_disable(self):
self.conf.set_override('enable_metadata_proxy', False)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = mock.Mock()
router_info = mock.MagicMock()
agent.router_info[router.id] = router_info
agent._update_metadata_proxy = mock.Mock()
agent.enqueue_state_change(router.id, 'master')
self.assertFalse(agent._update_metadata_proxy.call_count)
def test_periodic_sync_routers_task_raise_exception(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_router_ids.return_value = ['fake_id']
self.plugin_api.get_routers.side_effect = ValueError
self.assertRaises(ValueError,
agent.periodic_sync_routers_task,
agent.context)
self.assertTrue(agent.fullsync)
def test_l3_initial_report_state_done(self):
with mock.patch.object(l3_agent.L3NATAgentWithStateReport,
'periodic_sync_routers_task'),\
mock.patch.object(agent_rpc.PluginReportStateAPI,
'report_state') as report_state,\
mock.patch.object(eventlet, 'spawn_n'):
agent = l3_agent.L3NATAgentWithStateReport(host=HOSTNAME,
conf=self.conf)
self.assertTrue(agent.agent_state['start_flag'])
agent.after_start()
report_state.assert_called_once_with(agent.context,
agent.agent_state,
True)
self.assertIsNone(agent.agent_state.get('start_flag'))
def test_report_state_revival_logic(self):
with mock.patch.object(agent_rpc.PluginReportStateAPI,
'report_state') as report_state:
agent = l3_agent.L3NATAgentWithStateReport(host=HOSTNAME,
conf=self.conf)
report_state.return_value = n_const.AGENT_REVIVED
agent._report_state()
self.assertTrue(agent.fullsync)
agent.fullsync = False
report_state.return_value = n_const.AGENT_ALIVE
agent._report_state()
self.assertFalse(agent.fullsync)
def test_periodic_sync_routers_task_call_clean_stale_namespaces(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_routers.return_value = []
agent.periodic_sync_routers_task(agent.context)
self.assertFalse(agent.namespaces_manager._clean_stale)
def test_periodic_sync_routers_task_call_clean_stale_meta_proxies(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
stale_router_ids = [_uuid(), _uuid()]
active_routers = [{'id': _uuid()}, {'id': _uuid()}]
self.plugin_api.get_router_ids.return_value = [r['id'] for r
in active_routers]
self.plugin_api.get_routers.return_value = active_routers
namespace_list = [namespaces.NS_PREFIX + r_id
for r_id in stale_router_ids]
namespace_list += [namespaces.NS_PREFIX + r['id']
for r in active_routers]
self.mock_ip.get_namespaces.return_value = namespace_list
driver = metadata_driver.MetadataDriver
with mock.patch.object(
driver, 'destroy_monitored_metadata_proxy') as destroy_proxy:
agent.periodic_sync_routers_task(agent.context)
expected_calls = [mock.call(mock.ANY, r_id, agent.conf)
for r_id in stale_router_ids]
self.assertEqual(len(stale_router_ids), destroy_proxy.call_count)
destroy_proxy.assert_has_calls(expected_calls, any_order=True)
def test_router_info_create(self):
id = _uuid()
ri = l3router.RouterInfo(id, {}, **self.ri_kwargs)
self.assertTrue(ri.ns_name.endswith(id))
def test_router_info_create_with_router(self):
ns_id = _uuid()
subnet_id = _uuid()
ex_gw_port = {'id': _uuid(),
'network_id': _uuid(),
'fixed_ips': [{'ip_address': '19.4.4.4',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1'}]}
router = {
'id': _uuid(),
'enable_snat': True,
'routes': [],
'gw_port': ex_gw_port}
ri = l3router.RouterInfo(ns_id, router, **self.ri_kwargs)
self.assertTrue(ri.ns_name.endswith(ns_id))
self.assertEqual(router, ri.router)
def test_agent_create(self):
l3_agent.L3NATAgent(HOSTNAME, self.conf)
def _test_internal_network_action(self, action):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router_id = router['id']
ri = l3router.RouterInfo(router_id, router, **self.ri_kwargs)
port = {'network_id': _uuid(),
'id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'fixed_ips': [{'subnet_id': _uuid(),
'ip_address': '99.0.1.9',
'prefixlen': 24}]}
interface_name = ri.get_internal_device_name(port['id'])
if action == 'add':
self.device_exists.return_value = False
ri.internal_network_added(port)
self.assertEqual(1, self.mock_driver.plug.call_count)
self.assertEqual(1, self.mock_driver.init_router_port.call_count)
self.send_adv_notif.assert_called_once_with(ri.ns_name,
interface_name,
'99.0.1.9', mock.ANY)
elif action == 'remove':
self.device_exists.return_value = True
ri.internal_network_removed(port)
self.assertEqual(1, self.mock_driver.unplug.call_count)
else:
raise Exception("Invalid action %s" % action)
@staticmethod
def _fixed_ip_cidr(fixed_ip):
return '%s/%s' % (fixed_ip['ip_address'], fixed_ip['prefixlen'])
def _test_internal_network_action_dist(self, action):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router_id = router['id']
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent, HOSTNAME, router_id, router, **self.ri_kwargs)
subnet_id = _uuid()
port = {'network_id': _uuid(),
'id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'fixed_ips': [{'subnet_id': subnet_id,
'ip_address': '99.0.1.9',
'prefixlen': 24}],
'subnets': [{'id': subnet_id}]}
ri.router['gw_port_host'] = HOSTNAME
agent.host = HOSTNAME
agent.conf.agent_mode = 'dvr_snat'
sn_port = {'fixed_ips': [{'ip_address': '20.0.0.31',
'subnet_id': _uuid()}],
'subnets': [{'gateway_ip': '20.0.0.1'}],
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
ex_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': _uuid()}],
'subnets': [{'gateway_ip': '20.0.0.1'}],
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'id': _uuid(),
portbindings.HOST_ID: HOSTNAME,
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
ri.snat_ports = sn_port
ri.ex_gw_port = ex_gw_port
ri.snat_namespace = mock.Mock()
if action == 'add':
self.device_exists.return_value = False
ri.get_snat_port_for_internal_port = mock.Mock(
return_value=sn_port)
ri._snat_redirect_add = mock.Mock()
ri._set_subnet_arp_info = mock.Mock()
ri._internal_network_added = mock.Mock()
ri._set_subnet_arp_info = mock.Mock()
ri.internal_network_added(port)
self.assertEqual(1, ri._snat_redirect_add.call_count)
self.assertEqual(2, ri._internal_network_added.call_count)
ri._set_subnet_arp_info.assert_called_once_with(subnet_id)
ri._internal_network_added.assert_called_with(
dvr_snat_ns.SnatNamespace.get_snat_ns_name(ri.router['id']),
sn_port['network_id'],
sn_port['id'],
sn_port['fixed_ips'],
sn_port['mac_address'],
ri._get_snat_int_device_name(sn_port['id']),
dvr_snat_ns.SNAT_INT_DEV_PREFIX,
mtu=None)
elif action == 'remove':
self.device_exists.return_value = False
ri.get_snat_port_for_internal_port = mock.Mock(
return_value=sn_port)
ri._delete_arp_cache_for_internal_port = mock.Mock()
ri._snat_redirect_modify = mock.Mock()
ri.internal_network_removed(port)
self.assertEqual(
1, ri._delete_arp_cache_for_internal_port.call_count)
ri._snat_redirect_modify.assert_called_with(
sn_port, port,
ri.get_internal_device_name(port['id']),
is_add=False)
def test_agent_add_internal_network(self):
self._test_internal_network_action('add')
def test_agent_add_internal_network_dist(self):
self._test_internal_network_action_dist('add')
def test_agent_remove_internal_network(self):
self._test_internal_network_action('remove')
def test_agent_remove_internal_network_dist(self):
self._test_internal_network_action_dist('remove')
def _add_external_gateway(self, ri, router, ex_gw_port, interface_name,
use_fake_fip=False,
no_subnet=False, no_sub_gw=None,
dual_stack=False):
self.device_exists.return_value = False
if no_sub_gw is None:
no_sub_gw = []
if use_fake_fip:
fake_fip = {'floatingips': [{'id': _uuid(),
'floating_ip_address': '192.168.1.34',
'fixed_ip_address': '192.168.0.1',
'port_id': _uuid()}]}
router[lib_constants.FLOATINGIP_KEY] = fake_fip['floatingips']
ri.external_gateway_added(ex_gw_port, interface_name)
if not router.get('distributed'):
self.assertEqual(1, self.mock_driver.plug.call_count)
self.assertEqual(1, self.mock_driver.init_router_port.call_count)
if no_subnet and not dual_stack:
self.assertEqual(0, self.send_adv_notif.call_count)
ip_cidrs = []
kwargs = {'preserve_ips': [],
'namespace': 'qrouter-' + router['id'],
'extra_subnets': [],
'clean_connections': True}
else:
exp_arp_calls = [mock.call(ri.ns_name, interface_name,
'20.0.0.30', mock.ANY)]
if dual_stack and not no_sub_gw:
exp_arp_calls += [mock.call(ri.ns_name, interface_name,
'2001:192:168:100::2',
mock.ANY)]
self.send_adv_notif.assert_has_calls(exp_arp_calls)
ip_cidrs = ['20.0.0.30/24']
if dual_stack:
if not no_sub_gw:
ip_cidrs.append('2001:192:168:100::2/64')
kwargs = {'preserve_ips': ['192.168.1.34/32'],
'namespace': 'qrouter-' + router['id'],
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'clean_connections': True}
self.mock_driver.init_router_port.assert_called_with(
interface_name, ip_cidrs, **kwargs)
else:
ri._create_dvr_gateway.assert_called_once_with(
ex_gw_port, interface_name)
def _test_external_gateway_action(self, action, router, dual_stack=False):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ex_net_id = _uuid()
sn_port = self.snat_ports[1]
# Special setup for dvr routers
if router.get('distributed'):
agent.conf.agent_mode = 'dvr_snat'
agent.host = HOSTNAME
ri = dvr_router.DvrEdgeRouter(agent,
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
ri._create_dvr_gateway = mock.Mock()
ri.get_snat_interfaces = mock.Mock(return_value=self.snat_ports)
ri.snat_ports = self.snat_ports
ri._create_snat_namespace()
ri.fip_ns = agent.get_fip_ns(ex_net_id)
ri.internal_ports = self.snat_ports
else:
ri = l3router.RouterInfo(
router['id'], router,
**self.ri_kwargs)
ri.use_ipv6 = False
subnet_id = _uuid()
fixed_ips = [{'subnet_id': subnet_id,
'ip_address': '20.0.0.30',
'prefixlen': 24}]
subnets = [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}]
if dual_stack:
ri.use_ipv6 = True
subnet_id_v6 = _uuid()
fixed_ips.append({'subnet_id': subnet_id_v6,
'ip_address': '2001:192:168:100::2',
'prefixlen': 64})
subnets.append({'id': subnet_id_v6,
'cidr': '2001:192:168:100::/64',
'gateway_ip': '2001:192:168:100::1'})
ex_gw_port = {'fixed_ips': fixed_ips,
'subnets': subnets,
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'id': _uuid(),
'network_id': ex_net_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
ex_gw_port_no_sub = {'fixed_ips': [],
'id': _uuid(),
'network_id': ex_net_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
interface_name = ri.get_external_device_name(ex_gw_port['id'])
if action == 'add':
self._add_external_gateway(ri, router, ex_gw_port, interface_name,
use_fake_fip=True,
dual_stack=dual_stack)
elif action == 'add_no_sub':
ri.use_ipv6 = True
self._add_external_gateway(ri, router, ex_gw_port_no_sub,
interface_name,
no_subnet=True)
elif action == 'add_no_sub_v6_gw':
ri.use_ipv6 = True
self.conf.set_override('ipv6_gateway',
'fe80::f816:3eff:fe2e:1')
if dual_stack:
use_fake_fip = True
# Remove v6 entries
del ex_gw_port['fixed_ips'][-1]
del ex_gw_port['subnets'][-1]
else:
use_fake_fip = False
ex_gw_port = ex_gw_port_no_sub
self._add_external_gateway(ri, router, ex_gw_port,
interface_name, no_subnet=True,
no_sub_gw='fe80::f816:3eff:fe2e:1',
use_fake_fip=use_fake_fip,
dual_stack=dual_stack)
elif action == 'remove':
self.device_exists.return_value = True
ri.get_snat_port_for_internal_port = mock.Mock(
return_value=sn_port)
ri._snat_redirect_remove = mock.Mock()
ri.router['gw_port'] = ""
ri.external_gateway_removed(ex_gw_port, interface_name)
if not router.get('distributed'):
self.mock_driver.unplug.assert_called_once_with(
interface_name,
bridge=agent.conf.external_network_bridge,
namespace=mock.ANY,
prefix=mock.ANY)
else:
ri._snat_redirect_remove.assert_called_with(
sn_port, sn_port,
ri.get_internal_device_name(sn_port['id']))
ri.get_snat_port_for_internal_port.assert_called_with(
mock.ANY, ri.snat_ports)
else:
raise Exception("Invalid action %s" % action)
def _test_external_gateway_updated(self, dual_stack=False):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.use_ipv6 = False
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(
self, ri, dual_stack=dual_stack)
fake_fip = {'floatingips': [{'id': _uuid(),
'floating_ip_address': '192.168.1.34',
'fixed_ip_address': '192.168.0.1',
'port_id': _uuid()}]}
router[lib_constants.FLOATINGIP_KEY] = fake_fip['floatingips']
ri.external_gateway_updated(ex_gw_port, interface_name)
self.assertEqual(1, self.mock_driver.plug.call_count)
self.assertEqual(1, self.mock_driver.init_router_port.call_count)
exp_arp_calls = [mock.call(ri.ns_name, interface_name,
'20.0.0.30', mock.ANY)]
if dual_stack:
ri.use_ipv6 = True
exp_arp_calls += [mock.call(ri.ns_name, interface_name,
'2001:192:168:100::2', mock.ANY)]
self.send_adv_notif.assert_has_calls(exp_arp_calls)
ip_cidrs = ['20.0.0.30/24']
gateway_ips = ['20.0.0.1']
if dual_stack:
ip_cidrs.append('2001:192:168:100::2/64')
gateway_ips.append('2001:192:168:100::1')
kwargs = {'preserve_ips': ['192.168.1.34/32'],
'namespace': 'qrouter-' + router['id'],
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'clean_connections': True}
self.mock_driver.init_router_port.assert_called_with(interface_name,
ip_cidrs,
**kwargs)
def test_external_gateway_updated(self):
self._test_external_gateway_updated()
def test_external_gateway_updated_dual_stack(self):
self._test_external_gateway_updated(dual_stack=True)
def test_dvr_edge_router_init_for_snat_namespace_object(self):
router = {'id': _uuid()}
ri = dvr_router.DvrEdgeRouter(mock.Mock(),
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
# Make sure that ri.snat_namespace object is created when the
# router is initialized
self.assertIsNotNone(ri.snat_namespace)
def test_ext_gw_updated_calling_snat_ns_delete_if_gw_port_host_none(
self):
"""Test to check the impact of snat_namespace object.
This function specifically checks the impact of the snat
namespace object value on external_gateway_removed for deleting
snat_namespace when the gw_port_host mismatches or none.
"""
router = l3_test_common.prepare_router_data(num_internal_ports=2)
ri = dvr_router.DvrEdgeRouter(mock.Mock(),
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
with mock.patch.object(dvr_snat_ns.SnatNamespace,
'delete') as snat_ns_delete:
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(
self, ri)
router['gw_port_host'] = ''
ri._snat_redirect_remove = mock.Mock()
ri.external_gateway_updated(ex_gw_port, interface_name)
if router['gw_port_host'] != ri.host:
self.assertFalse(ri._snat_redirect_remove.called)
self.assertEqual(1, snat_ns_delete.call_count)
@mock.patch.object(namespaces.Namespace, 'delete')
def test_snat_ns_delete_not_called_when_snat_namespace_does_not_exist(
self, mock_ns_del):
"""Test to check the impact of snat_namespace object.
This function specifically checks the impact of the snat
namespace object initialization without the actual creation
of snat_namespace. When deletes are issued to the snat
namespace based on the snat namespace object existence, it
should be checking for the valid namespace existence before
it tries to delete.
"""
router = l3_test_common.prepare_router_data(num_internal_ports=2)
ri = dvr_router.DvrEdgeRouter(mock.Mock(),
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
# Make sure we set a return value to emulate the non existence
# of the namespace.
self.mock_ip.netns.exists.return_value = False
self.assertIsNotNone(ri.snat_namespace)
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(self,
ri)
ri._external_gateway_removed = mock.Mock()
ri.external_gateway_removed(ex_gw_port, interface_name)
self.assertFalse(mock_ns_del.called)
def _test_ext_gw_updated_dvr_edge_router(self, host_match,
snat_hosted_before=True):
"""
Helper to test external gw update for edge router on dvr_snat agent
:param host_match: True if new gw host should be the same as agent host
:param snat_hosted_before: True if agent has already been hosting
snat for the router
"""
router = l3_test_common.prepare_router_data(num_internal_ports=2)
ri = dvr_router.DvrEdgeRouter(mock.Mock(),
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
if snat_hosted_before:
ri._create_snat_namespace()
snat_ns_name = ri.snat_namespace.name
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(self,
ri)
ri._external_gateway_added = mock.Mock()
router['gw_port_host'] = ri.host if host_match else (ri.host + 'foo')
ri.external_gateway_updated(ex_gw_port, interface_name)
if not host_match:
self.assertFalse(ri._external_gateway_added.called)
if snat_hosted_before:
# host mismatch means that snat was rescheduled to another
# agent, hence need to verify that gw port was unplugged and
# snat namespace was deleted
self.mock_driver.unplug.assert_called_with(
interface_name,
bridge=self.conf.external_network_bridge,
namespace=snat_ns_name,
prefix=l3_agent.EXTERNAL_DEV_PREFIX)
else:
if not snat_hosted_before:
self.assertIsNotNone(ri.snat_namespace)
self.assertTrue(ri._external_gateway_added.called)
def test_ext_gw_updated_dvr_edge_router(self):
self._test_ext_gw_updated_dvr_edge_router(host_match=True)
def test_ext_gw_updated_dvr_edge_router_host_mismatch(self):
self._test_ext_gw_updated_dvr_edge_router(host_match=False)
def test_ext_gw_updated_dvr_dvr_edge_router_snat_rescheduled(self):
self._test_ext_gw_updated_dvr_edge_router(host_match=True,
snat_hosted_before=False)
def test_agent_add_external_gateway(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
self._test_external_gateway_action('add', router)
def test_agent_add_external_gateway_dual_stack(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
self._test_external_gateway_action('add', router, dual_stack=True)
def test_agent_add_external_gateway_dist(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
self._test_external_gateway_action('add', router)
def test_agent_add_external_gateway_dist_dual_stack(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
self._test_external_gateway_action('add', router, dual_stack=True)
def test_agent_add_external_gateway_no_subnet(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2,
v6_ext_gw_with_sub=False)
self._test_external_gateway_action('add_no_sub', router)
def test_agent_add_external_gateway_no_subnet_with_ipv6_gw(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2,
v6_ext_gw_with_sub=False)
self._test_external_gateway_action('add_no_sub_v6_gw', router)
def test_agent_add_external_gateway_dual_stack_no_subnet_w_ipv6_gw(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2,
v6_ext_gw_with_sub=False)
self._test_external_gateway_action('add_no_sub_v6_gw',
router, dual_stack=True)
def test_agent_remove_external_gateway(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
self._test_external_gateway_action('remove', router)
def test_agent_remove_external_gateway_dual_stack(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
self._test_external_gateway_action('remove', router, dual_stack=True)
def test_agent_remove_external_gateway_dist(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
self._test_external_gateway_action('remove', router)
def test_agent_remove_external_gateway_dist_dual_stack(self):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
self._test_external_gateway_action('remove', router, dual_stack=True)
def _verify_snat_mangle_rules(self, nat_rules, mangle_rules, router,
negate=False):
interfaces = router[lib_constants.INTERFACE_KEY]
source_cidrs = []
for iface in interfaces:
for subnet in iface['subnets']:
prefix = subnet['cidr'].split('/')[1]
source_cidr = "%s/%s" % (iface['fixed_ips'][0]['ip_address'],
prefix)
source_cidrs.append(source_cidr)
source_nat_ip = router['gw_port']['fixed_ips'][0]['ip_address']
interface_name = ('qg-%s' % router['gw_port']['id'])[:14]
expected_rules = [
'! -i %s ! -o %s -m conntrack ! --ctstate DNAT -j ACCEPT' %
(interface_name, interface_name),
'-o %s -j SNAT --to-source %s' % (interface_name, source_nat_ip),
'-m mark ! --mark 0x2/%s -m conntrack --ctstate DNAT '
'-j SNAT --to-source %s' %
(n_const.ROUTER_MARK_MASK, source_nat_ip)]
for r in nat_rules:
if negate:
self.assertNotIn(r.rule, expected_rules)
else:
self.assertIn(r.rule, expected_rules)
expected_rules = [
'-i %s -j MARK --set-xmark 0x2/%s' %
(interface_name, n_const.ROUTER_MARK_MASK),
'-o %s -m connmark --mark 0x0/%s -j CONNMARK '
'--save-mark --nfmask %s --ctmask %s' %
(interface_name,
l3router.ADDRESS_SCOPE_MARK_MASK,
l3router.ADDRESS_SCOPE_MARK_MASK,
l3router.ADDRESS_SCOPE_MARK_MASK)]
for r in mangle_rules:
if negate:
self.assertNotIn(r.rule, expected_rules)
else:
self.assertIn(r.rule, expected_rules)
def test_get_snat_port_for_internal_port(self):
router = l3_test_common.prepare_router_data(num_internal_ports=4)
ri = dvr_router.DvrEdgeRouter(mock.sentinel.agent,
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
test_port = {
'mac_address': '00:12:23:34:45:56',
'fixed_ips': [{'subnet_id': l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0]),
'ip_address': '101.12.13.14'}]}
internal_ports = ri.router.get(lib_constants.INTERFACE_KEY, [])
# test valid case
with mock.patch.object(ri, 'get_snat_interfaces') as get_interfaces:
get_interfaces.return_value = [test_port]
res_port = ri.get_snat_port_for_internal_port(internal_ports[0])
self.assertEqual(test_port, res_port)
# test invalid case
test_port['fixed_ips'][0]['subnet_id'] = 1234
res_ip = ri.get_snat_port_for_internal_port(internal_ports[0])
self.assertNotEqual(test_port, res_ip)
self.assertIsNone(res_ip)
def test_process_cent_router(self):
router = l3_test_common.prepare_router_data()
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
self._test_process_router(ri, agent)
def test_process_dist_router(self):
router = l3_test_common.prepare_router_data()
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(agent,
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
ri.router['distributed'] = True
ri.router['_snat_router_interfaces'] = [{
'fixed_ips': [{'subnet_id': subnet_id,
'ip_address': '1.2.3.4'}]}]
ri.router['gw_port_host'] = None
self._test_process_router(ri, agent)
def _test_process_router(self, ri, agent):
router = ri.router
agent.host = HOSTNAME
fake_fip_id = 'fake_fip_id'
ri.create_dvr_fip_interfaces = mock.Mock()
ri.process_floating_ip_addresses = mock.Mock()
ri.process_floating_ip_nat_rules = mock.Mock()
ri.process_floating_ip_addresses.return_value = {
fake_fip_id: 'ACTIVE'}
ri.external_gateway_added = mock.Mock()
ri.external_gateway_updated = mock.Mock()
ri.process_address_scope = mock.Mock()
fake_floatingips1 = {'floatingips': [
{'id': fake_fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'port_id': _uuid(),
'host': HOSTNAME}]}
ri.process(agent)
ri.process_floating_ip_addresses.assert_called_with(mock.ANY)
ri.process_floating_ip_addresses.reset_mock()
ri.process_floating_ip_nat_rules.assert_called_with()
ri.process_floating_ip_nat_rules.reset_mock()
ri.external_gateway_added.reset_mock()
# remap floating IP to a new fixed ip
fake_floatingips2 = copy.deepcopy(fake_floatingips1)
fake_floatingips2['floatingips'][0]['fixed_ip_address'] = '7.7.7.8'
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips2['floatingips']
ri.process(agent)
ri.process_floating_ip_addresses.assert_called_with(mock.ANY)
ri.process_floating_ip_addresses.reset_mock()
ri.process_floating_ip_nat_rules.assert_called_with()
ri.process_floating_ip_nat_rules.reset_mock()
self.assertEqual(0, ri.external_gateway_added.call_count)
self.assertEqual(0, ri.external_gateway_updated.call_count)
ri.external_gateway_added.reset_mock()
ri.external_gateway_updated.reset_mock()
# change the ex_gw_port a bit to test gateway update
new_gw_port = copy.deepcopy(ri.router['gw_port'])
ri.router['gw_port'] = new_gw_port
old_ip = (netaddr.IPAddress(ri.router['gw_port']
['fixed_ips'][0]['ip_address']))
ri.router['gw_port']['fixed_ips'][0]['ip_address'] = str(old_ip + 1)
ri.process(agent)
ri.process_floating_ip_addresses.reset_mock()
ri.process_floating_ip_nat_rules.reset_mock()
self.assertEqual(0, ri.external_gateway_added.call_count)
self.assertEqual(1, ri.external_gateway_updated.call_count)
# remove just the floating ips
del router[lib_constants.FLOATINGIP_KEY]
ri.process(agent)
ri.process_floating_ip_addresses.assert_called_with(mock.ANY)
ri.process_floating_ip_addresses.reset_mock()
ri.process_floating_ip_nat_rules.assert_called_with()
ri.process_floating_ip_nat_rules.reset_mock()
# now no ports so state is torn down
del router[lib_constants.INTERFACE_KEY]
del router['gw_port']
ri.process(agent)
self.assertEqual(1, self.send_adv_notif.call_count)
distributed = ri.router.get('distributed', False)
self.assertEqual(distributed, ri.process_floating_ip_addresses.called)
self.assertEqual(distributed, ri.process_floating_ip_nat_rules.called)
def _test_process_floating_ip_addresses_add(self, ri, agent):
floating_ips = ri.get_floating_ips()
fip_id = floating_ips[0]['id']
device = self.mock_ip_dev
device.addr.list.return_value = []
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
ex_gw_port = {'id': _uuid(), 'network_id': mock.sentinel.ext_net_id}
ri.add_floating_ip = mock.Mock(
return_value=lib_constants.FLOATINGIP_STATUS_ACTIVE)
with mock.patch.object(lla.LinkLocalAllocator, '_write'):
if ri.router['distributed']:
ri.fip_ns = agent.get_fip_ns(ex_gw_port['network_id'])
ri.create_dvr_fip_interfaces(ex_gw_port)
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: lib_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
ri.add_floating_ip.assert_called_once_with(
floating_ips[0], mock.sentinel.interface_name, device)
@mock.patch.object(lla.LinkLocalAllocator, '_write')
def test_create_dvr_fip_interfaces_if_fipnamespace_exist(self, lla_write):
fake_network_id = _uuid()
subnet_id = _uuid()
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'floating_ip_address': '20.0.0.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid(),
'host': HOSTNAME}]}
agent_gateway_port = (
[{'fixed_ips': [
{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [
{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': fake_network_id,
'mac_address': 'ca:fe:de:ad:be:ef'}]
)
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router[n_const.FLOATINGIP_AGENT_INTF_KEY] = agent_gateway_port
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent, HOSTNAME, router['id'], router, **self.ri_kwargs)
ext_gw_port = ri.router.get('gw_port')
ri.fip_ns = agent.get_fip_ns(ext_gw_port['network_id'])
ri.dist_fip_count = 0
agent.process_router_add = mock.Mock()
ri.fip_ns.create_rtr_2_fip_link = mock.Mock()
with mock.patch.object(ri, 'get_floating_ips') as fips, \
mock.patch.object(ri.fip_ns,
'create') as create_fip, \
mock.patch.object(ri, 'get_floating_agent_gw_interface'
) as fip_gw_port:
fips.return_value = fake_floatingips
fip_gw_port.return_value = agent_gateway_port[0]
ri.create_dvr_fip_interfaces(ext_gw_port)
self.assertTrue(fip_gw_port.called)
self.assertTrue(fips.called)
self.assertTrue(create_fip.called)
self.assertEqual(agent_gateway_port[0],
ri.fip_ns.agent_gateway_port)
# Now let us associate the fip to the router
ri.floating_ip_added_dist(fips, "192.168.0.1/32")
self.assertEqual(1, ri.dist_fip_count)
# Now let us disassociate the fip from the router
ri.floating_ip_removed_dist("192.168.0.1/32")
self.assertEqual(0, ri.dist_fip_count)
# Calling create_dvr_fip_interfaces again to make sure
# that the fip namespace create is not called again.
# If the create is not called again, that would contain
# the duplicate rules configuration in the fip namespace.
ri.create_dvr_fip_interfaces(ext_gw_port)
self.assertTrue(fip_gw_port.called)
self.assertTrue(fips.called)
create_fip.assert_called_once_with()
self.assertEqual(2, ri.fip_ns.create_rtr_2_fip_link.call_count)
@mock.patch.object(lla.LinkLocalAllocator, '_write')
def test_create_dvr_fip_interfaces_for_late_binding(self, lla_write):
fake_network_id = _uuid()
fake_subnet_id = _uuid()
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'floating_ip_address': '20.0.0.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid(),
'host': HOSTNAME}]}
agent_gateway_port = (
{'fixed_ips': [
{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': fake_subnet_id}],
'subnets': [
{'id': fake_subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': fake_network_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
)
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router[n_const.FLOATINGIP_AGENT_INTF_KEY] = []
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent, HOSTNAME, router['id'], router, **self.ri_kwargs)
ext_gw_port = ri.router.get('gw_port')
ri.fip_ns = agent.get_fip_ns(ext_gw_port['network_id'])
ri.dist_fip_count = 0
ri.fip_ns.subscribe = mock.Mock()
with mock.patch.object(agent.plugin_rpc,
'get_agent_gateway_port') as fip_gw_port:
fip_gw_port.return_value = agent_gateway_port
ri.create_dvr_fip_interfaces(ext_gw_port)
self.assertTrue(fip_gw_port.called)
self.assertEqual(agent_gateway_port,
ri.fip_ns.agent_gateway_port)
@mock.patch.object(lla.LinkLocalAllocator, '_write')
def test_create_dvr_fip_interfaces(self, lla_write):
fake_network_id = _uuid()
subnet_id = _uuid()
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'floating_ip_address': '20.0.0.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid(),
'host': HOSTNAME}]}
agent_gateway_port = (
[{'fixed_ips': [
{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [
{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': fake_network_id,
'mac_address': 'ca:fe:de:ad:be:ef'}]
)
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router[n_const.FLOATINGIP_AGENT_INTF_KEY] = agent_gateway_port
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent, HOSTNAME, router['id'], router, **self.ri_kwargs)
ext_gw_port = ri.router.get('gw_port')
ri.fip_ns = agent.get_fip_ns(ext_gw_port['network_id'])
ri.dist_fip_count = 0
ri.fip_ns.subscribe = mock.Mock()
ri.fip_ns.agent_router_gateway = mock.Mock()
agent.process_router_add = mock.Mock()
with mock.patch.object(ri, 'get_floating_ips') as fips, \
mock.patch.object(ri, 'get_floating_agent_gw_interface'
) as fip_gw_port:
fips.return_value = fake_floatingips
fip_gw_port.return_value = agent_gateway_port[0]
ri.create_dvr_fip_interfaces(ext_gw_port)
self.assertTrue(fip_gw_port.called)
self.assertTrue(fips.called)
self.assertEqual(agent_gateway_port[0],
ri.fip_ns.agent_gateway_port)
self.assertTrue(ri.rtr_fip_subnet)
@mock.patch.object(lla.LinkLocalAllocator, '_write')
def test_create_dvr_fip_interfaces_for_restart_l3agent_case(self,
lla_write):
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'floating_ip_address': '20.0.0.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid(),
'host': HOSTNAME}]}
agent_gateway_port = (
[{'fixed_ips': [
{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': 'subnet_id'}],
'subnets': [
{'id': 'subnet_id',
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': 'fake_network_id',
'mac_address': 'ca:fe:de:ad:be:ef'}]
)
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router[n_const.FLOATINGIP_AGENT_INTF_KEY] = agent_gateway_port
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent, HOSTNAME, router['id'], router, **self.ri_kwargs)
ext_gw_port = ri.router.get('gw_port')
ri.fip_ns = agent.get_fip_ns(ext_gw_port['network_id'])
ri.fip_ns.subscribe = mock.Mock(return_value=True)
ri.fip_ns.agent_router_gateway = mock.Mock()
ri.rtr_fip_subnet = None
ri.dist_fip_count = 0
with mock.patch.object(ri, 'get_floating_ips') as fips,\
mock.patch.object(ri, 'get_floating_agent_gw_interface'
) as fip_gw_port:
fips.return_value = fake_floatingips
fip_gw_port.return_value = agent_gateway_port[0]
ri.create_dvr_fip_interfaces(ext_gw_port)
self.assertTrue(fip_gw_port.called)
self.assertTrue(fips.called)
self.assertEqual(agent_gateway_port[0],
ri.fip_ns.agent_gateway_port)
self.assertTrue(ri.rtr_fip_subnet)
def test_process_router_cent_floating_ip_add(self):
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'status': 'DOWN',
'floating_network_id': _uuid(),
'port_id': _uuid(),
'host': HOSTNAME}]}
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
ri.get_external_device_name = mock.Mock(return_value='exgw')
self._test_process_floating_ip_addresses_add(ri, agent)
def test_process_router_snat_disabled(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(enable_snat=True)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# Process with NAT
ri.process(agent)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
orig_mangle_rules = ri.iptables_manager.ipv4['mangle'].rules[:]
# Reprocess without NAT
router['enable_snat'] = False
# Reassign the router object to RouterInfo
ri.router = router
ri.process(agent)
# For some reason set logic does not work well with
# IpTablesRule instances
nat_rules_delta = [r for r in orig_nat_rules
if r not in ri.iptables_manager.ipv4['nat'].rules]
self.assertEqual(1, len(nat_rules_delta))
mangle_rules_delta = [
r for r in orig_mangle_rules
if r not in ri.iptables_manager.ipv4['mangle'].rules]
self.assertEqual(1, len(mangle_rules_delta))
self._verify_snat_mangle_rules(nat_rules_delta, mangle_rules_delta,
router)
self.assertEqual(1, self.send_adv_notif.call_count)
def test_process_router_snat_enabled(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(enable_snat=False)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# Process without NAT
ri.process(agent)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
orig_mangle_rules = ri.iptables_manager.ipv4['mangle'].rules[:]
# Reprocess with NAT
router['enable_snat'] = True
# Reassign the router object to RouterInfo
ri.router = router
ri.process(agent)
# For some reason set logic does not work well with
# IpTablesRule instances
nat_rules_delta = [r for r in ri.iptables_manager.ipv4['nat'].rules
if r not in orig_nat_rules]
self.assertEqual(1, len(nat_rules_delta))
mangle_rules_delta = [
r for r in ri.iptables_manager.ipv4['mangle'].rules
if r not in orig_mangle_rules]
self.assertEqual(1, len(mangle_rules_delta))
self._verify_snat_mangle_rules(nat_rules_delta, mangle_rules_delta,
router)
self.assertEqual(1, self.send_adv_notif.call_count)
def _test_update_routing_table(self, is_snat_host=True):
router = l3_test_common.prepare_router_data()
uuid = router['id']
s_netns = 'snat-' + uuid
q_netns = 'qrouter-' + uuid
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '19.4.4.200'}
calls = [mock.call('replace', fake_route1, q_netns)]
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent,
HOSTNAME,
uuid,
router,
**self.ri_kwargs)
ri._update_routing_table = mock.Mock()
with mock.patch.object(ri, '_is_this_snat_host') as snat_host:
snat_host.return_value = is_snat_host
ri.update_routing_table('replace', fake_route1)
if is_snat_host:
ri._update_routing_table('replace', fake_route1, s_netns)
calls += [mock.call('replace', fake_route1, s_netns)]
ri._update_routing_table.assert_has_calls(calls, any_order=True)
def test_process_update_snat_routing_table(self):
self._test_update_routing_table()
def test_process_not_update_snat_routing_table(self):
self._test_update_routing_table(is_snat_host=False)
def test_process_router_interface_added(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# Process with NAT
ri.process(agent)
# Add an interface and reprocess
l3_test_common.router_append_interface(router)
# Reassign the router object to RouterInfo
ri.router = router
ri.process(agent)
# send_ip_addr_adv_notif is called both times process is called
self.assertEqual(2, self.send_adv_notif.call_count)
def _test_process_ipv6_only_or_dual_stack_gw(self, dual_stack=False):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(ip_version=6,
dual_stack=dual_stack)
# Get NAT rules without the gw_port
gw_port = router['gw_port']
router['gw_port'] = None
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
self._process_router_instance_for_agent(agent, ri, router)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# Get NAT rules with the gw_port
router['gw_port'] = gw_port
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
p = ri.external_gateway_nat_fip_rules
s = ri.external_gateway_nat_snat_rules
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['external_gateway_nat_fip_rules',
'external_gateway_nat_snat_rules']]
)
with mock.patch.multiple(ri, **attrs_to_mock) as mocks:
mocks['external_gateway_nat_fip_rules'].side_effect = p
mocks['external_gateway_nat_snat_rules'].side_effect = s
self._process_router_instance_for_agent(agent, ri, router)
new_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# NAT rules should only change for dual_stack operation
if dual_stack:
self.assertTrue(
mocks['external_gateway_nat_fip_rules'].called)
self.assertTrue(
mocks['external_gateway_nat_snat_rules'].called)
self.assertNotEqual(orig_nat_rules, new_nat_rules)
else:
self.assertFalse(
mocks['external_gateway_nat_fip_rules'].called)
self.assertFalse(
mocks['external_gateway_nat_snat_rules'].called)
self.assertEqual(orig_nat_rules, new_nat_rules)
def test_process_ipv6_only_gw(self):
self._test_process_ipv6_only_or_dual_stack_gw()
def test_process_dual_stack_gw(self):
self._test_process_ipv6_only_or_dual_stack_gw(dual_stack=True)
def _process_router_ipv6_interface_added(
self, router, ra_mode=None, addr_mode=None):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# Process with NAT
ri.process(agent)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# Add an IPv6 interface and reprocess
l3_test_common.router_append_interface(router, count=1,
ip_version=6, ra_mode=ra_mode,
addr_mode=addr_mode)
# Reassign the router object to RouterInfo
self._process_router_instance_for_agent(agent, ri, router)
# IPv4 NAT rules should not be changed by adding an IPv6 interface
nat_rules_delta = [r for r in ri.iptables_manager.ipv4['nat'].rules
if r not in orig_nat_rules]
self.assertFalse(nat_rules_delta)
return ri
def _radvd_expected_call_external_process(self, ri, enable=True):
expected_calls = [mock.call(uuid=ri.router['id'],
service='radvd',
default_cmd_callback=mock.ANY,
namespace=ri.ns_name,
conf=mock.ANY,
run_as_root=True)]
if enable:
expected_calls.append(mock.call().enable(reload_cfg=True))
else:
expected_calls.append(mock.call().disable())
return expected_calls
def _process_router_ipv6_subnet_added(self, router,
ipv6_subnet_modes=None, dns_nameservers=None, network_mtu=0):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
agent.external_gateway_added = mock.Mock()
self._process_router_instance_for_agent(agent, ri, router)
# Add an IPv6 interface with len(ipv6_subnet_modes) subnets
# and reprocess
l3_test_common.router_append_subnet(
router,
count=len(ipv6_subnet_modes),
ip_version=6,
ipv6_subnet_modes=ipv6_subnet_modes,
dns_nameservers=dns_nameservers,
network_mtu=network_mtu)
# Reassign the router object to RouterInfo
self._process_router_instance_for_agent(agent, ri, router)
return ri
def _assert_ri_process_enabled(self, ri):
"""Verify that process was enabled for a router instance."""
expected_calls = self._radvd_expected_call_external_process(ri)
self.assertEqual(expected_calls, self.external_process.mock_calls)
def _assert_ri_process_disabled(self, ri):
"""Verify that process was disabled for a router instance."""
expected_calls = self._radvd_expected_call_external_process(ri, False)
self.assertEqual(expected_calls, self.external_process.mock_calls)
def test_process_router_ipv6_interface_added(self):
router = l3_test_common.prepare_router_data()
ri = self._process_router_ipv6_interface_added(router)
self._assert_ri_process_enabled(ri)
# Expect radvd configured without prefix
self.assertNotIn('prefix', self.utils_replace_file.call_args[0][1])
def test_process_router_ipv6_slaac_interface_added(self):
router = l3_test_common.prepare_router_data()
ri = self._process_router_ipv6_interface_added(
router, ra_mode=lib_constants.IPV6_SLAAC)
self._assert_ri_process_enabled(ri)
# Expect radvd configured with prefix
radvd_config_str = self.utils_replace_file.call_args[0][1]
self.assertIn('prefix', radvd_config_str)
self.assertIn('AdvAutonomous on', radvd_config_str)
def test_process_router_ipv6_dhcpv6_stateful_interface_added(self):
router = l3_test_common.prepare_router_data()
ri = self._process_router_ipv6_interface_added(
router, ra_mode=lib_constants.DHCPV6_STATEFUL)
self._assert_ri_process_enabled(ri)
# Expect radvd configured with prefix
radvd_config_str = self.utils_replace_file.call_args[0][1]
self.assertIn('prefix', radvd_config_str)
self.assertIn('AdvAutonomous off', radvd_config_str)
def test_process_router_ipv6_subnets_added(self):
router = l3_test_common.prepare_router_data()
ri = self._process_router_ipv6_subnet_added(router, ipv6_subnet_modes=[
{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC},
{'ra_mode': lib_constants.DHCPV6_STATELESS,
'address_mode': lib_constants.DHCPV6_STATELESS},
{'ra_mode': lib_constants.DHCPV6_STATEFUL,
'address_mode': lib_constants.DHCPV6_STATEFUL}])
self._assert_ri_process_enabled(ri)
radvd_config_str = self.utils_replace_file.call_args[0][1]
# Assert we have a prefix from IPV6_SLAAC and a prefix from
# DHCPV6_STATELESS on one interface
self.assertEqual(3, radvd_config_str.count("prefix"))
self.assertEqual(1, radvd_config_str.count("interface"))
self.assertEqual(2, radvd_config_str.count("AdvAutonomous on"))
self.assertEqual(1, radvd_config_str.count("AdvAutonomous off"))
def test_process_router_ipv6_subnets_added_to_existing_port(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
agent.external_gateway_added = mock.Mock()
self._process_router_instance_for_agent(agent, ri, router)
# Add the first subnet on a new interface
l3_test_common.router_append_subnet(
router, count=1,
ip_version=6, ipv6_subnet_modes=[
{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC}])
self._process_router_instance_for_agent(agent, ri, router)
self._assert_ri_process_enabled(ri)
radvd_config = self.utils_replace_file.call_args[0][1].split()
self.assertEqual(1, len(ri.internal_ports[1]['subnets']))
self.assertEqual(1, len(ri.internal_ports[1]['fixed_ips']))
self.assertEqual(1, radvd_config.count("prefix"))
self.assertEqual(1, radvd_config.count("interface"))
# Reset mocks to verify radvd enabled and configured correctly
# after second subnet added to interface
self.external_process.reset_mock()
self.utils_replace_file.reset_mock()
# Add the second subnet on the same interface
interface_id = router[lib_constants.INTERFACE_KEY][1]['id']
l3_test_common.router_append_subnet(
router, count=1,
ip_version=6,
ipv6_subnet_modes=[
{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC}],
interface_id=interface_id)
self._process_router_instance_for_agent(agent, ri, router)
# radvd should have been enabled again and the interface
# should have two prefixes
self._assert_ri_process_enabled(ri)
radvd_config = self.utils_replace_file.call_args[0][1].split()
self.assertEqual(2, len(ri.internal_ports[1]['subnets']))
self.assertEqual(2, len(ri.internal_ports[1]['fixed_ips']))
self.assertEqual(2, radvd_config.count("prefix"))
self.assertEqual(1, radvd_config.count("interface"))
def test_process_router_ipv6v4_interface_added(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# Process with NAT
ri.process(agent)
# Add an IPv4 and IPv6 interface and reprocess
l3_test_common.router_append_interface(router, count=1, ip_version=4)
l3_test_common.router_append_interface(router, count=1, ip_version=6)
# Reassign the router object to RouterInfo
self._process_router_instance_for_agent(agent, ri, router)
self._assert_ri_process_enabled(ri)
def test_process_router_interface_removed(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# Process with NAT
ri.process(agent)
# Add an interface and reprocess
del router[lib_constants.INTERFACE_KEY][1]
# Reassign the router object to RouterInfo
ri.router = router
ri.process(agent)
# send_ip_addr_adv_notif is called both times process is called
self.assertEqual(2, self.send_adv_notif.call_count)
def test_process_router_ipv6_interface_removed(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
self._process_router_instance_for_agent(agent, ri, router)
# Add an IPv6 interface and reprocess
l3_test_common.router_append_interface(router, count=1, ip_version=6)
self._process_router_instance_for_agent(agent, ri, router)
self._assert_ri_process_enabled(ri)
# Reset the calls so we can check for disable radvd
self.external_process.reset_mock()
self.process_monitor.reset_mock()
# Remove the IPv6 interface and reprocess
del router[lib_constants.INTERFACE_KEY][1]
self._process_router_instance_for_agent(agent, ri, router)
self._assert_ri_process_disabled(ri)
def test_process_router_ipv6_subnet_removed(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
agent.external_gateway_added = mock.Mock()
self._process_router_instance_for_agent(agent, ri, router)
# Add an IPv6 interface with two subnets and reprocess
l3_test_common.router_append_subnet(
router, count=2, ip_version=6,
ipv6_subnet_modes=([{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC}]
* 2))
self._process_router_instance_for_agent(agent, ri, router)
self._assert_ri_process_enabled(ri)
# Reset mocks to check for modified radvd config
self.utils_replace_file.reset_mock()
self.external_process.reset_mock()
# Remove one subnet from the interface and reprocess
interfaces = copy.deepcopy(router[lib_constants.INTERFACE_KEY])
del interfaces[1]['subnets'][0]
del interfaces[1]['fixed_ips'][0]
router[lib_constants.INTERFACE_KEY] = interfaces
self._process_router_instance_for_agent(agent, ri, router)
# Assert radvd was enabled again and that we only have one
# prefix on the interface
self._assert_ri_process_enabled(ri)
radvd_config = self.utils_replace_file.call_args[0][1].split()
self.assertEqual(1, len(ri.internal_ports[1]['subnets']))
self.assertEqual(1, len(ri.internal_ports[1]['fixed_ips']))
self.assertEqual(1, radvd_config.count("interface"))
self.assertEqual(1, radvd_config.count("prefix"))
def test_process_router_internal_network_added_unexpected_error(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
with mock.patch.object(
ri,
'internal_network_added') as internal_network_added:
# raise RuntimeError to simulate that an unexpected exception
# occurs
internal_network_added.side_effect = RuntimeError
self.assertRaises(RuntimeError, ri.process, agent)
self.assertNotIn(
router[lib_constants.INTERFACE_KEY][0], ri.internal_ports)
# The unexpected exception has been fixed manually
internal_network_added.side_effect = None
# periodic_sync_routers_task finds out that _rpc_loop failed to
# process the router last time, it will retry in the next run.
ri.process(agent)
# We were able to add the port to ri.internal_ports
self.assertIn(
router[lib_constants.INTERFACE_KEY][0], ri.internal_ports)
def test_process_router_internal_network_removed_unexpected_error(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
# add an internal port
ri.process(agent)
with mock.patch.object(
ri,
'internal_network_removed') as internal_net_removed:
# raise RuntimeError to simulate that an unexpected exception
# occurs
internal_net_removed.side_effect = RuntimeError
ri.internal_ports[0]['admin_state_up'] = False
# The above port is set to down state, remove it.
self.assertRaises(RuntimeError, ri.process, agent)
self.assertIn(
router[lib_constants.INTERFACE_KEY][0], ri.internal_ports)
# The unexpected exception has been fixed manually
internal_net_removed.side_effect = None
# periodic_sync_routers_task finds out that _rpc_loop failed to
# process the router last time, it will retry in the next run.
ri.process(agent)
# We were able to remove the port from ri.internal_ports
self.assertNotIn(
router[lib_constants.INTERFACE_KEY][0], ri.internal_ports)
def test_process_router_floatingip_nochange(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=1)
fip1 = {'id': _uuid(), 'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7', 'status': 'ACTIVE',
'port_id': router[lib_constants.INTERFACE_KEY][0]['id']}
fip2 = copy.copy(fip1)
fip2.update({'id': _uuid(), 'status': 'DOWN',
'floating_ip_address': '9.9.9.9'})
router[lib_constants.FLOATINGIP_KEY] = [fip1, fip2]
ri = legacy_router.LegacyRouter(router['id'], router,
**self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
with mock.patch.object(
agent.plugin_rpc, 'update_floatingip_statuses'
) as mock_update_fip_status,\
mock.patch.object(ri, 'get_router_cidrs') as mock_get_cidrs:
mock_get_cidrs.return_value = set(
[fip1['floating_ip_address'] + '/32'])
ri.process(agent)
# make sure only the one that wasn't in existing cidrs was sent
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id, {fip2['id']: 'ACTIVE'})
def test_process_router_floatingip_status_update_if_processed(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=1)
fip1 = {'id': _uuid(), 'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7', 'status': 'ACTIVE',
'port_id': router[lib_constants.INTERFACE_KEY][0]['id']}
fip2 = copy.copy(fip1)
fip2.update({'id': _uuid(), 'status': 'DOWN', })
router[lib_constants.FLOATINGIP_KEY] = [fip1, fip2]
ri = legacy_router.LegacyRouter(router['id'], router,
**self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
with mock.patch.object(
agent.plugin_rpc, 'update_floatingip_statuses'
) as mock_update_fip_status,\
mock.patch.object(ri, 'get_router_cidrs') as mock_get_cidrs:
mock_get_cidrs.return_value = set()
ri.process(agent)
# make sure both was sent since not existed in existing cidrs
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id, {fip1['id']: 'ACTIVE',
fip2['id']: 'ACTIVE'})
def test_process_router_floatingip_disabled(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
with mock.patch.object(
agent.plugin_rpc,
'update_floatingip_statuses') as mock_update_fip_status:
fip_id = _uuid()
router = l3_test_common.prepare_router_data(num_internal_ports=1)
router[lib_constants.FLOATINGIP_KEY] = [
{'id': fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'status': 'DOWN',
'port_id': router[lib_constants.INTERFACE_KEY][0]['id']}]
ri = legacy_router.LegacyRouter(router['id'],
router,
**self.ri_kwargs)
ri.external_gateway_added = mock.Mock()
ri.process(agent)
# Assess the call for putting the floating IP up was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: lib_constants.FLOATINGIP_STATUS_ACTIVE})
mock_update_fip_status.reset_mock()
# Process the router again, this time without floating IPs
router[lib_constants.FLOATINGIP_KEY] = []
ri.router = router
ri.process(agent)
# Assess the call for putting the floating IP up was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: lib_constants.FLOATINGIP_STATUS_DOWN})
def test_process_router_floatingip_exception(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
with mock.patch.object(
agent.plugin_rpc,
'update_floatingip_statuses') as mock_update_fip_status:
fip_id = _uuid()
router = l3_test_common.prepare_router_data(num_internal_ports=1)
router[lib_constants.FLOATINGIP_KEY] = [
{'id': fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'port_id': router[lib_constants.INTERFACE_KEY][0]['id']}]
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.process_floating_ip_addresses = mock.Mock(
side_effect=RuntimeError)
ri.external_gateway_added = mock.Mock()
ri.process(agent)
# Assess the call for putting the floating IP into Error
# was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: lib_constants.FLOATINGIP_STATUS_ERROR})
def test_process_external_iptables_exception(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
with mock.patch.object(
agent.plugin_rpc,
'update_floatingip_statuses') as mock_update_fip_status:
fip_id = _uuid()
router = l3_test_common.prepare_router_data(num_internal_ports=1)
router[lib_constants.FLOATINGIP_KEY] = [
{'id': fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'port_id': router[lib_constants.INTERFACE_KEY][0]['id']}]
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
ri.iptables_manager._apply = mock.Mock(side_effect=Exception)
ri.process_external(agent)
# Assess the call for putting the floating IP into Error
# was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: lib_constants.FLOATINGIP_STATUS_ERROR})
self.assertEqual(1, ri.iptables_manager._apply.call_count)
def test_handle_router_snat_rules_distributed_without_snat_manager(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(
agent,
HOSTNAME,
'foo_router_id',
{},
**self.ri_kwargs)
ri.iptables_manager = mock.MagicMock()
ri._is_this_snat_host = mock.Mock(return_value=True)
ri.get_ex_gw_port = mock.Mock(return_value=None)
ri._handle_router_snat_rules(None, mock.ANY)
self.assertIsNone(ri.snat_iptables_manager)
self.assertFalse(ri.iptables_manager.called)
def test_handle_router_snat_rules_add_back_jump(self):
ri = l3router.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.iptables_manager = mock.MagicMock()
port = {'fixed_ips': [{'ip_address': '192.168.1.4'}]}
ri._handle_router_snat_rules(port, "iface")
nat = ri.iptables_manager.ipv4['nat']
nat.empty_chain.assert_any_call('snat')
nat.add_rule.assert_any_call('snat', '-j $float-snat')
for call in nat.mock_calls:
name, args, kwargs = call
if name == 'add_rule':
self.assertEqual(('snat', '-j $float-snat'), args)
self.assertEqual({}, kwargs)
break
def test_handle_router_snat_rules_add_rules(self):
ri = l3router.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ex_gw_port = {'fixed_ips': [{'ip_address': '192.168.1.4'}]}
ri.router = {'distributed': False}
ri._handle_router_snat_rules(ex_gw_port, "iface")
nat_rules = list(map(str, ri.iptables_manager.ipv4['nat'].rules))
wrap_name = ri.iptables_manager.wrap_name
jump_float_rule = "-A %s-snat -j %s-float-snat" % (wrap_name,
wrap_name)
snat_rule1 = ("-A %s-snat -o iface -j SNAT --to-source %s") % (
wrap_name, ex_gw_port['fixed_ips'][0]['ip_address'])
snat_rule2 = ("-A %s-snat -m mark ! --mark 0x2/%s "
"-m conntrack --ctstate DNAT "
"-j SNAT --to-source %s") % (
wrap_name, n_const.ROUTER_MARK_MASK,
ex_gw_port['fixed_ips'][0]['ip_address'])
self.assertIn(jump_float_rule, nat_rules)
self.assertIn(snat_rule1, nat_rules)
self.assertIn(snat_rule2, nat_rules)
self.assertThat(nat_rules.index(jump_float_rule),
matchers.LessThan(nat_rules.index(snat_rule1)))
mangle_rules = list(map(str, ri.iptables_manager.ipv4['mangle'].rules))
mangle_rule = ("-A %s-mark -i iface "
"-j MARK --set-xmark 0x2/%s" %
(wrap_name, n_const.ROUTER_MARK_MASK))
self.assertIn(mangle_rule, mangle_rules)
def test_process_router_delete_stale_internal_devices(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
stale_devlist = [l3_test_common.FakeDev('qr-a1b2c3d4-e5'),
l3_test_common.FakeDev('qr-b2c3d4e5-f6')]
stale_devnames = [dev.name for dev in stale_devlist]
get_devices_return = []
get_devices_return.extend(stale_devlist)
self.mock_ip.get_devices.return_value = get_devices_return
router = l3_test_common.prepare_router_data(enable_snat=True,
num_internal_ports=1)
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
internal_ports = ri.router.get(lib_constants.INTERFACE_KEY, [])
self.assertEqual(1, len(internal_ports))
internal_port = internal_ports[0]
with mock.patch.object(ri, 'internal_network_removed'
) as internal_network_removed,\
mock.patch.object(ri, 'internal_network_added'
) as internal_network_added,\
mock.patch.object(ri, 'external_gateway_removed'
) as external_gateway_removed,\
mock.patch.object(ri, 'external_gateway_added'
) as external_gateway_added:
ri.process(agent)
self.assertEqual(1, external_gateway_added.call_count)
self.assertFalse(external_gateway_removed.called)
self.assertFalse(internal_network_removed.called)
internal_network_added.assert_called_once_with(internal_port)
self.assertEqual(len(stale_devnames),
self.mock_driver.unplug.call_count)
calls = [mock.call(stale_devname,
namespace=ri.ns_name,
prefix=l3_agent.INTERNAL_DEV_PREFIX)
for stale_devname in stale_devnames]
self.mock_driver.unplug.assert_has_calls(calls, any_order=True)
def test_process_router_delete_stale_external_devices(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
stale_devlist = [l3_test_common.FakeDev('qg-a1b2c3d4-e5')]
stale_devnames = [dev.name for dev in stale_devlist]
router = l3_test_common.prepare_router_data(enable_snat=True,
num_internal_ports=1)
del router['gw_port']
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
self.mock_ip.get_devices.return_value = stale_devlist
ri.process(agent)
self.mock_driver.unplug.assert_called_with(
stale_devnames[0],
bridge="",
namespace=ri.ns_name,
prefix=l3_agent.EXTERNAL_DEV_PREFIX)
def test_router_deleted(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.router_deleted(None, FAKE_ID)
self.assertEqual(1, agent._queue.add.call_count)
def test_routers_updated(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.routers_updated(None, [FAKE_ID])
self.assertEqual(1, agent._queue.add.call_count)
def test_removed_from_agent(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.router_removed_from_agent(None, {'router_id': FAKE_ID})
self.assertEqual(1, agent._queue.add.call_count)
def test_added_to_agent(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.router_added_to_agent(None, [FAKE_ID])
self.assertEqual(1, agent._queue.add.call_count)
def test_destroy_namespace(self):
namespace = 'qrouter-bar'
self.mock_ip.get_namespaces.return_value = [namespace]
self.mock_ip.get_devices.return_value = [
l3_test_common.FakeDev('qr-aaaa'),
l3_test_common.FakeDev('rfp-aaaa')]
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ns = namespaces.RouterNamespace(
'bar', self.conf, agent.driver, agent.use_ipv6)
ns.create()
ns.delete()
self.mock_driver.unplug.assert_called_once_with('qr-aaaa',
prefix='qr-',
namespace='qrouter'
'-bar')
self.mock_ip.del_veth.assert_called_once_with('rfp-aaaa')
def test_destroy_router_namespace(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ns = namespaces.Namespace(
'qrouter-bar', self.conf, agent.driver, agent.use_ipv6)
ns.create()
ns.delete()
self.mock_ip.netns.delete.assert_called_once_with("qrouter-bar")
def _configure_metadata_proxy(self, enableflag=True):
if not enableflag:
self.conf.set_override('enable_metadata_proxy', False)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
router = {'id': router_id,
'external_gateway_info': {},
'routes': [],
'distributed': False}
driver = metadata_driver.MetadataDriver
with mock.patch.object(
driver, 'destroy_monitored_metadata_proxy') as destroy_proxy:
with mock.patch.object(
driver, 'spawn_monitored_metadata_proxy') as spawn_proxy:
agent._process_added_router(router)
if enableflag:
spawn_proxy.assert_called_with(
mock.ANY,
mock.ANY,
self.conf.metadata_port,
mock.ANY,
router_id=router_id
)
else:
self.assertFalse(spawn_proxy.call_count)
agent._router_removed(router_id)
if enableflag:
destroy_proxy.assert_called_with(mock.ANY,
router_id,
mock.ANY)
else:
self.assertFalse(destroy_proxy.call_count)
def test_enable_metadata_proxy(self):
self._configure_metadata_proxy()
def test_disable_metadata_proxy_spawn(self):
self._configure_metadata_proxy(enableflag=False)
def _test_process_routers_update_rpc_timeout(self, ext_net_call=False,
ext_net_call_failed=False):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.fullsync = False
agent._process_router_if_compatible = mock.Mock()
if ext_net_call_failed:
agent._process_router_if_compatible.side_effect = (
oslo_messaging.MessagingTimeout)
agent._queue = mock.Mock()
agent._resync_router = mock.Mock()
update = mock.Mock()
update.router = None
agent._queue.each_update_to_next_router.side_effect = [
[(None, update)]]
agent._process_router_update()
self.assertFalse(agent.fullsync)
self.assertEqual(ext_net_call,
agent._process_router_if_compatible.called)
agent._resync_router.assert_called_with(update)
def test_process_routers_update_rpc_timeout_on_get_routers(self):
self.plugin_api.get_routers.side_effect = (
oslo_messaging.MessagingTimeout)
self._test_process_routers_update_rpc_timeout()
def test_process_routers_update_resyncs_failed_router(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
# Attempting to configure the router will fail
agent._process_router_if_compatible = mock.MagicMock()
agent._process_router_if_compatible.side_effect = RuntimeError()
# Queue an update from a full sync
update = router_processing_queue.RouterUpdate(
42,
router_processing_queue.PRIORITY_SYNC_ROUTERS_TASK,
router=mock.Mock(),
timestamp=timeutils.utcnow())
agent._queue.add(update)
agent._process_router_update()
# The update contained the router object, get_routers won't be called
self.assertFalse(agent.plugin_rpc.get_routers.called)
# The update failed, assert that get_routers was called
agent._process_router_update()
self.assertTrue(agent.plugin_rpc.get_routers.called)
def test_process_routers_update_rpc_timeout_on_get_ext_net(self):
self._test_process_routers_update_rpc_timeout(ext_net_call=True,
ext_net_call_failed=True)
def _test_process_routers_update_router_deleted(self, error=False):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
update = mock.Mock()
update.router = None
update.action = 1 # ROUTER_DELETED
router_info = mock.MagicMock()
agent.router_info[update.id] = router_info
router_processor = mock.Mock()
agent._queue.each_update_to_next_router.side_effect = [
[(router_processor, update)]]
agent._resync_router = mock.Mock()
if error:
agent._safe_router_removed = mock.Mock()
agent._safe_router_removed.return_value = False
agent._process_router_update()
if error:
self.assertFalse(router_processor.fetched_and_processed.called)
agent._resync_router.assert_called_with(update)
else:
router_info.delete.assert_called_once_with(agent)
self.assertFalse(agent.router_info)
self.assertFalse(agent._resync_router.called)
router_processor.fetched_and_processed.assert_called_once_with(
update.timestamp)
def test_process_routers_update_router_deleted_success(self):
self._test_process_routers_update_router_deleted()
def test_process_routers_update_router_deleted_error(self):
self._test_process_routers_update_router_deleted(True)
def test_process_router_if_compatible_with_no_ext_net_in_conf(self):
self.conf.set_override('external_network_bridge', 'br-ex')
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
self.plugin_api.get_external_network_id.assert_called_with(
agent.context)
def test_process_router_if_compatible_with_cached_ext_net(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
agent.target_ex_net_id = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
self.assertFalse(self.plugin_api.get_external_network_id.called)
def test_process_router_if_compatible_with_stale_cached_ext_net(self):
self.conf.set_override('external_network_bridge', 'br-ex')
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
agent.target_ex_net_id = 'bbb'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
self.plugin_api.get_external_network_id.assert_called_with(
agent.context)
def test_process_router_if_compatible_w_no_ext_net_and_2_net_plugin(self):
self.conf.set_override('external_network_bridge', 'br-ex')
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent.router_info = {}
self.plugin_api.get_external_network_id.side_effect = (
exc.TooManyExternalNetworks())
self.assertRaises(exc.TooManyExternalNetworks,
agent._process_router_if_compatible,
router)
self.assertNotIn(router['id'], agent.router_info)
def test_process_router_if_compatible_with_ext_net_in_conf(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'bbb'}}
agent.router_info = {}
self.conf.set_override('gateway_external_network_id', 'aaa')
self.assertRaises(n_exc.RouterNotCompatibleWithAgent,
agent._process_router_if_compatible,
router)
self.assertNotIn(router['id'], agent.router_info)
def test_process_router_if_compatible_with_no_bridge_no_ext_net(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent.router_info = {}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
def test_nonexistent_interface_driver(self):
self.conf.set_override('interface_driver', None)
self.assertRaises(SystemExit, l3_agent.L3NATAgent,
HOSTNAME, self.conf)
self.conf.set_override('interface_driver', 'wrong.driver')
self.assertRaises(SystemExit, l3_agent.L3NATAgent,
HOSTNAME, self.conf)
@mock.patch.object(namespaces.RouterNamespace, 'delete')
@mock.patch.object(dvr_snat_ns.SnatNamespace, 'delete')
def _cleanup_namespace_test(self,
stale_namespace_list,
router_list,
other_namespaces,
mock_snat_ns,
mock_router_ns):
good_namespace_list = [namespaces.NS_PREFIX + r['id']
for r in router_list]
good_namespace_list += [dvr_snat_ns.SNAT_NS_PREFIX + r['id']
for r in router_list]
self.mock_ip.get_namespaces.return_value = (stale_namespace_list +
good_namespace_list +
other_namespaces)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertTrue(agent.namespaces_manager._clean_stale)
pm = self.external_process.return_value
pm.reset_mock()
with agent.namespaces_manager as ns_manager:
for r in router_list:
ns_manager.keep_router(r['id'])
qrouters = [n for n in stale_namespace_list
if n.startswith(namespaces.NS_PREFIX)]
self.assertEqual(len(qrouters), mock_router_ns.call_count)
self.assertEqual(
len(stale_namespace_list) - len(qrouters),
mock_snat_ns.call_count)
self.assertFalse(agent.namespaces_manager._clean_stale)
def test_cleanup_namespace(self):
stale_namespaces = [namespaces.NS_PREFIX + 'foo',
namespaces.NS_PREFIX + 'bar',
dvr_snat_ns.SNAT_NS_PREFIX + 'foo']
other_namespaces = ['unknown']
self._cleanup_namespace_test(stale_namespaces,
[],
other_namespaces)
def test_cleanup_namespace_with_registered_router_ids(self):
stale_namespaces = [namespaces.NS_PREFIX + 'cccc',
namespaces.NS_PREFIX + 'eeeee',
dvr_snat_ns.SNAT_NS_PREFIX + 'fffff']
router_list = [{'id': 'foo', 'distributed': False},
{'id': 'aaaa', 'distributed': False}]
other_namespaces = ['qdhcp-aabbcc', 'unknown']
self._cleanup_namespace_test(stale_namespaces,
router_list,
other_namespaces)
def test_create_dvr_gateway(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data()
ri = dvr_router.DvrEdgeRouter(agent,
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
port_id = _uuid()
subnet_id = _uuid()
dvr_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': port_id,
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
interface_name = ri._get_snat_int_device_name(port_id)
self.device_exists.return_value = False
with mock.patch.object(ri, 'get_snat_interfaces') as get_interfaces:
get_interfaces.return_value = self.snat_ports
ri._create_dvr_gateway(dvr_gw_port, interface_name)
# check 2 internal ports are plugged
# check 1 ext-gw-port is plugged
self.assertEqual(3, self.mock_driver.plug.call_count)
self.assertEqual(3, self.mock_driver.init_router_port.call_count)
def test_process_address_scope(self):
router = l3_test_common.prepare_router_data()
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = dvr_router.DvrEdgeRouter(agent,
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
ri.get_ex_gw_port = mock.Mock(return_value=None)
# Make sure the code doesn't crash if ri.snat_iptables_manager is None.
ri.process_address_scope()
with mock.patch.object(ri, '_add_address_scope_mark') as mocked_func:
ri.snat_iptables_manager = iptables_manager.IptablesManager(
namespace=mock.ANY, use_ipv6=False)
ri.snat_iptables_manager.defer_apply_off = mock.Mock()
ri.process_address_scope()
self.assertEqual(2, mocked_func.call_count)
def test_get_service_plugin_list(self):
service_plugins = [p_const.L3_ROUTER_NAT]
self.plugin_api.get_service_plugin_list.return_value = service_plugins
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertEqual(service_plugins, agent.neutron_service_plugins)
self.assertTrue(self.plugin_api.get_service_plugin_list.called)
def test_get_service_plugin_list_failed(self):
raise_rpc = oslo_messaging.RemoteError()
self.plugin_api.get_service_plugin_list.side_effect = raise_rpc
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertIsNone(agent.neutron_service_plugins)
self.assertTrue(self.plugin_api.get_service_plugin_list.called)
def test_get_service_plugin_list_retried(self):
raise_timeout = oslo_messaging.MessagingTimeout()
# Raise a timeout the first 2 times it calls
# get_service_plugin_list then return a empty tuple
self.plugin_api.get_service_plugin_list.side_effect = (
raise_timeout, tuple()
)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertEqual(tuple(), agent.neutron_service_plugins)
def test_external_gateway_removed_ext_gw_port_no_fip_ns(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = 'dvr_snat'
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
external_net_id = router['gw_port']['network_id']
ri = dvr_router.DvrEdgeRouter(
agent, HOSTNAME, router['id'], router, **self.ri_kwargs)
ri.remove_floating_ip = mock.Mock()
agent._fetch_external_net_id = mock.Mock(return_value=external_net_id)
ri.ex_gw_port = ri.router['gw_port']
del ri.router['gw_port']
ri.fip_ns = None
nat = ri.iptables_manager.ipv4['nat']
nat.clear_rules_by_tag = mock.Mock()
nat.add_rule = mock.Mock()
ri.snat_namespace = mock.Mock()
ri.external_gateway_removed(
ri.ex_gw_port,
ri.get_external_device_name(ri.ex_gw_port['id']))
self.assertFalse(ri.remove_floating_ip.called)
def test_spawn_radvd(self):
router = l3_test_common.prepare_router_data(ip_version=6)
conffile = '/fake/radvd.conf'
pidfile = '/fake/radvd.pid'
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
# we don't want the whole process manager to be mocked to be
# able to catch execute() calls
self.external_process_p.stop()
self.ip_cls_p.stop()
get_conf_file_name = 'neutron.agent.linux.utils.get_conf_file_name'
get_pid_file_name = ('neutron.agent.linux.external_process.'
'ProcessManager.get_pid_file_name')
utils_execute = 'neutron.agent.common.utils.execute'
mock.patch(get_conf_file_name).start().return_value = conffile
mock.patch(get_pid_file_name).start().return_value = pidfile
execute = mock.patch(utils_execute).start()
radvd = ra.DaemonMonitor(
router['id'],
namespaces.RouterNamespace._get_ns_name(router['id']),
agent.process_monitor,
l3_test_common.FakeDev,
self.conf)
radvd.enable(router['_interfaces'])
cmd = execute.call_args[0][0]
self.assertIn('radvd', cmd)
_join = lambda *args: ' '.join(args)
cmd = _join(*cmd)
self.assertIn(_join('-C', conffile), cmd)
self.assertIn(_join('-p', pidfile), cmd)
self.assertIn(_join('-m', 'syslog'), cmd)
def test_generate_radvd_mtu_conf(self):
router = l3_test_common.prepare_router_data()
ipv6_subnet_modes = [{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC}]
network_mtu = '1446'
ri = self._process_router_ipv6_subnet_added(router,
ipv6_subnet_modes,
None,
network_mtu)
expected = "AdvLinkMTU 1446"
ri.agent_conf.set_override('advertise_mtu', False)
ri.radvd._generate_radvd_conf(router[lib_constants.INTERFACE_KEY])
self.assertNotIn(expected, self.utils_replace_file.call_args[0][1])
# Verify that MTU is advertised when advertise_mtu is True
ri.agent_conf.set_override('advertise_mtu', True)
ri.radvd._generate_radvd_conf(router[lib_constants.INTERFACE_KEY])
self.assertIn(expected, self.utils_replace_file.call_args[0][1])
def test_generate_radvd_conf_other_and_managed_flag(self):
# expected = {ra_mode: (AdvOtherConfigFlag, AdvManagedFlag), ...}
expected = {lib_constants.IPV6_SLAAC: (False, False),
lib_constants.DHCPV6_STATELESS: (True, False),
lib_constants.DHCPV6_STATEFUL: (False, True)}
modes = [lib_constants.IPV6_SLAAC, lib_constants.DHCPV6_STATELESS,
lib_constants.DHCPV6_STATEFUL]
mode_combos = list(iter_chain(*[[list(combo) for combo in
iter_combinations(modes, i)] for i in range(1, len(modes) + 1)]))
for mode_list in mode_combos:
ipv6_subnet_modes = [{'ra_mode': mode, 'address_mode': mode}
for mode in mode_list]
router = l3_test_common.prepare_router_data()
ri = self._process_router_ipv6_subnet_added(router,
ipv6_subnet_modes)
ri.radvd._generate_radvd_conf(router[lib_constants.INTERFACE_KEY])
def assertFlag(flag):
return (self.assertIn if flag else self.assertNotIn)
other_flag, managed_flag = (
any(expected[mode][0] for mode in mode_list),
any(expected[mode][1] for mode in mode_list))
assertFlag(other_flag)('AdvOtherConfigFlag on;',
self.utils_replace_file.call_args[0][1])
assertFlag(managed_flag)('AdvManagedFlag on;',
self.utils_replace_file.call_args[0][1])
def test_generate_radvd_intervals(self):
self.conf.set_override('min_rtr_adv_interval', 22)
self.conf.set_override('max_rtr_adv_interval', 66)
router = l3_test_common.prepare_router_data()
ipv6_subnet_modes = [{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC}]
ri = self._process_router_ipv6_subnet_added(router,
ipv6_subnet_modes)
ri.radvd._generate_radvd_conf(router[lib_constants.INTERFACE_KEY])
self.assertIn("MinRtrAdvInterval 22",
self.utils_replace_file.call_args[0][1])
self.assertIn("MaxRtrAdvInterval 66",
self.utils_replace_file.call_args[0][1])
def test_generate_radvd_rdnss_conf(self):
router = l3_test_common.prepare_router_data()
ipv6_subnet_modes = [{'ra_mode': lib_constants.IPV6_SLAAC,
'address_mode': lib_constants.IPV6_SLAAC}]
dns_list = ['fd01:1::100', 'fd01:1::200', 'fd01::300', 'fd01::400']
ri = self._process_router_ipv6_subnet_added(router,
ipv6_subnet_modes,
dns_nameservers=dns_list)
ri.radvd._generate_radvd_conf(router[lib_constants.INTERFACE_KEY])
# Verify that radvd configuration file includes RDNSS entries
expected = "RDNSS "
for dns in dns_list[0:ra.MAX_RDNSS_ENTRIES]:
expected += "%s " % dns
self.assertIn(expected, self.utils_replace_file.call_args[0][1])
def _pd_expected_call_external_process(self, requestor, ri, enable=True):
expected_calls = []
if enable:
expected_calls.append(mock.call(uuid=requestor,
service='dibbler',
default_cmd_callback=mock.ANY,
namespace=ri.ns_name,
conf=mock.ANY,
pid_file=mock.ANY))
expected_calls.append(mock.call().enable(reload_cfg=False))
else:
expected_calls.append(mock.call(uuid=requestor,
service='dibbler',
namespace=ri.ns_name,
conf=mock.ANY,
pid_file=mock.ANY))
expected_calls.append(mock.call().disable(
get_stop_command=mock.ANY))
return expected_calls
def _pd_setup_agent_router(self):
router = l3_test_common.prepare_router_data()
ri = l3router.RouterInfo(router['id'], router, **self.ri_kwargs)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.external_gateway_added = mock.Mock()
ri.process(agent)
agent._router_added(router['id'], router)
# Make sure radvd monitor is created
if not ri.radvd:
ri.radvd = ra.DaemonMonitor(router['id'],
ri.ns_name,
agent.process_monitor,
ri.get_internal_device_name,
self.conf)
return agent, router, ri
def _pd_remove_gw_interface(self, intfs, agent, router, ri):
expected_pd_update = {}
expected_calls = []
for intf in intfs:
requestor_id = self._pd_get_requestor_id(intf, router, ri)
expected_calls += (self._pd_expected_call_external_process(
requestor_id, ri, False))
for subnet in intf['subnets']:
expected_pd_update[subnet['id']] = (
n_const.PROVISIONAL_IPV6_PD_PREFIX)
# Implement the prefix update notifier
# Keep track of the updated prefix
self.pd_update = {}
def pd_notifier(context, prefix_update):
self.pd_update = prefix_update
for subnet_id, prefix in six.iteritems(prefix_update):
for intf in intfs:
for subnet in intf['subnets']:
if subnet['id'] == subnet_id:
# Update the prefix
subnet['cidr'] = prefix
break
# Remove the gateway interface
agent.pd.notifier = pd_notifier
agent.pd.remove_gw_interface(router['id'])
self._pd_assert_dibbler_calls(expected_calls,
self.external_process.mock_calls[-len(expected_calls):])
self.assertEqual(expected_pd_update, self.pd_update)
def _pd_remove_interfaces(self, intfs, agent, router, ri):
expected_pd_update = []
expected_calls = []
for intf in intfs:
# Remove the router interface
router[lib_constants.INTERFACE_KEY].remove(intf)
requestor_id = self._pd_get_requestor_id(intf, router, ri)
expected_calls += (self._pd_expected_call_external_process(
requestor_id, ri, False))
for subnet in intf['subnets']:
expected_pd_update += [{subnet['id']:
n_const.PROVISIONAL_IPV6_PD_PREFIX}]
# Implement the prefix update notifier
# Keep track of the updated prefix
self.pd_update = []
def pd_notifier(context, prefix_update):
self.pd_update.append(prefix_update)
for intf in intfs:
for subnet in intf['subnets']:
if subnet['id'] in prefix_update:
# Update the prefix
subnet['cidr'] = prefix_update[subnet['id']]
# Process the router for removed interfaces
agent.pd.notifier = pd_notifier
ri.process(agent)
# The number of external process calls takes radvd into account.
# This is because there is no ipv6 interface any more after removing
# the interfaces, and radvd will be killed because of that
self._pd_assert_dibbler_calls(expected_calls,
self.external_process.mock_calls[-len(expected_calls) - 2:])
self._pd_assert_radvd_calls(ri, False)
self.assertEqual(expected_pd_update, self.pd_update)
def _pd_get_requestor_id(self, intf, router, ri):
ifname = ri.get_internal_device_name(intf['id'])
for subnet in intf['subnets']:
return dibbler.PDDibbler(router['id'],
subnet['id'], ifname).requestor_id
def _pd_assert_dibbler_calls(self, expected, actual):
'''Check the external process calls for dibbler are expected
in the case of multiple pd-enabled router ports, the exact sequence
of these calls are not deterministic. It's known, though, that each
external_process call is followed with either an enable() or disable()
'''
num_ext_calls = len(expected) // 2
expected_ext_calls = []
actual_ext_calls = []
expected_action_calls = []
actual_action_calls = []
for c in range(num_ext_calls):
expected_ext_calls.append(expected[c * 2])
actual_ext_calls.append(actual[c * 2])
expected_action_calls.append(expected[c * 2 + 1])
actual_action_calls.append(actual[c * 2 + 1])
self.assertEqual(expected_action_calls, actual_action_calls)
for exp in expected_ext_calls:
for act in actual_ext_calls:
if exp == act:
break
else:
msg = "Unexpected dibbler external process call."
self.fail(msg)
def _pd_assert_radvd_calls(self, ri, enable=True):
exp_calls = self._radvd_expected_call_external_process(ri, enable)
self.assertEqual(exp_calls,
self.external_process.mock_calls[-len(exp_calls):])
def _pd_get_prefixes(self, agent, router, ri,
existing_intfs, new_intfs, mock_get_prefix):
# First generate the prefixes that will be used for each interface
prefixes = {}
expected_pd_update = {}
expected_calls = []
for ifno, intf in enumerate(existing_intfs + new_intfs):
requestor_id = self._pd_get_requestor_id(intf, router, ri)
prefixes[requestor_id] = "2001:cafe:cafe:%d::/64" % ifno
if intf in new_intfs:
subnet_id = (intf['subnets'][0]['id'] if intf['subnets']
else None)
expected_pd_update[subnet_id] = prefixes[requestor_id]
expected_calls += (
self._pd_expected_call_external_process(requestor_id, ri))
# Implement the prefix update notifier
# Keep track of the updated prefix
self.pd_update = {}
def pd_notifier(context, prefix_update):
self.pd_update = prefix_update
for subnet_id, prefix in six.iteritems(prefix_update):
for intf in new_intfs:
for subnet in intf['subnets']:
if subnet['id'] == subnet_id:
# Update the prefix
subnet['cidr'] = prefix
break
# Start the dibbler client
agent.pd.notifier = pd_notifier
agent.pd.process_prefix_update()
# Get the prefix and check that the neutron server is notified
def get_prefix(pdo):
key = '%s:%s:%s' % (pdo.router_id, pdo.subnet_id, pdo.ri_ifname)
return prefixes[key]
mock_get_prefix.side_effect = get_prefix
agent.pd.process_prefix_update()
# Make sure that the updated prefixes are expected
self._pd_assert_dibbler_calls(expected_calls,
self.external_process.mock_calls[-len(expected_calls):])
self.assertEqual(expected_pd_update, self.pd_update)
def _pd_add_gw_interface(self, agent, router, ri):
gw_ifname = ri.get_external_device_name(router['gw_port']['id'])
agent.pd.add_gw_interface(router['id'], gw_ifname)
@mock.patch.object(dibbler.PDDibbler, 'get_prefix', autospec=True)
@mock.patch.object(dibbler.os, 'getpid', return_value=1234)
@mock.patch.object(pd.PrefixDelegation, '_is_lla_active',
return_value=True)
@mock.patch.object(dibbler.os, 'chmod')
@mock.patch.object(dibbler.shutil, 'rmtree')
@mock.patch.object(pd.PrefixDelegation, '_get_sync_data')
def test_pd_add_remove_subnet(self, mock1, mock2, mock3, mock4,
mock_getpid, mock_get_prefix):
'''Add and remove one pd-enabled subnet
Remove the interface by deleting it from the router
'''
# Initial setup
agent, router, ri = self._pd_setup_agent_router()
# Create one pd-enabled subnet and add router interface
intfs = l3_test_common.router_append_pd_enabled_subnet(router)
ri.process(agent)
# No client should be started since there is no gateway port
self.assertFalse(self.external_process.call_count)
self.assertFalse(mock_get_prefix.call_count)
# Add the gateway interface
self._pd_add_gw_interface(agent, router, ri)
# Get one prefix
self._pd_get_prefixes(agent, router, ri, [], intfs, mock_get_prefix)
# Update the router with the new prefix
ri.process(agent)
# Check that radvd is started and the router port is configured
# with the new prefix
self._pd_assert_radvd_calls(ri)
# Now remove the interface
self._pd_remove_interfaces(intfs, agent, router, ri)
@mock.patch.object(dibbler.PDDibbler, 'get_prefix', autospec=True)
@mock.patch.object(dibbler.os, 'getpid', return_value=1234)
@mock.patch.object(pd.PrefixDelegation, '_is_lla_active',
return_value=True)
@mock.patch.object(dibbler.os, 'chmod')
@mock.patch.object(dibbler.shutil, 'rmtree')
@mock.patch.object(pd.PrefixDelegation, '_get_sync_data')
def test_pd_remove_gateway(self, mock1, mock2, mock3, mock4,
mock_getpid, mock_get_prefix):
'''Add one pd-enabled subnet and remove the gateway port
Remove the gateway port and check the prefix is removed
'''
# Initial setup
agent, router, ri = self._pd_setup_agent_router()
# Create one pd-enabled subnet and add router interface
intfs = l3_test_common.router_append_pd_enabled_subnet(router)
ri.process(agent)
# Add the gateway interface
self._pd_add_gw_interface(agent, router, ri)
# Get one prefix
self._pd_get_prefixes(agent, router, ri, [], intfs, mock_get_prefix)
# Update the router with the new prefix
ri.process(agent)
# Check that radvd is started
self._pd_assert_radvd_calls(ri)
# Now remove the gw interface
self._pd_remove_gw_interface(intfs, agent, router, ri)
# There will be a router update
ri.process(agent)
@mock.patch.object(dibbler.PDDibbler, 'get_prefix', autospec=True)
@mock.patch.object(dibbler.os, 'getpid', return_value=1234)
@mock.patch.object(pd.PrefixDelegation, '_is_lla_active',
return_value=True)
@mock.patch.object(dibbler.os, 'chmod')
@mock.patch.object(dibbler.shutil, 'rmtree')
@mock.patch.object(pd.PrefixDelegation, '_get_sync_data')
def test_pd_add_remove_2_subnets(self, mock1, mock2, mock3, mock4,
mock_getpid, mock_get_prefix):
'''Add and remove two pd-enabled subnets
Remove the interfaces by deleting them from the router
'''
# Initial setup
agent, router, ri = self._pd_setup_agent_router()
# Create 2 pd-enabled subnets and add router interfaces
intfs = l3_test_common.router_append_pd_enabled_subnet(router, count=2)
ri.process(agent)
# No client should be started
self.assertFalse(self.external_process.call_count)
self.assertFalse(mock_get_prefix.call_count)
# Add the gateway interface
self._pd_add_gw_interface(agent, router, ri)
# Get prefixes
self._pd_get_prefixes(agent, router, ri, [], intfs, mock_get_prefix)
# Update the router with the new prefix
ri.process(agent)
# Check that radvd is started and the router port is configured
# with the new prefix
self._pd_assert_radvd_calls(ri)
# Now remove the interface
self._pd_remove_interfaces(intfs, agent, router, ri)
@mock.patch.object(dibbler.PDDibbler, 'get_prefix', autospec=True)
@mock.patch.object(dibbler.os, 'getpid', return_value=1234)
@mock.patch.object(pd.PrefixDelegation, '_is_lla_active',
return_value=True)
@mock.patch.object(dibbler.os, 'chmod')
@mock.patch.object(dibbler.shutil, 'rmtree')
@mock.patch.object(pd.PrefixDelegation, '_get_sync_data')
def test_pd_remove_gateway_2_subnets(self, mock1, mock2, mock3, mock4,
mock_getpid, mock_get_prefix):
'''Add one pd-enabled subnet, followed by adding another one
Remove the gateway port and check the prefix is removed
'''
# Initial setup
agent, router, ri = self._pd_setup_agent_router()
# Add the gateway interface
self._pd_add_gw_interface(agent, router, ri)
# Create 1 pd-enabled subnet and add router interface
intfs = l3_test_common.router_append_pd_enabled_subnet(router, count=1)
ri.process(agent)
# Get prefixes
self._pd_get_prefixes(agent, router, ri, [], intfs, mock_get_prefix)
# Update the router with the new prefix
ri.process(agent)
# Check that radvd is started
self._pd_assert_radvd_calls(ri)
# Now add another interface
# Create one pd-enabled subnet and add router interface
intfs1 = l3_test_common.router_append_pd_enabled_subnet(router,
count=1)
ri.process(agent)
# Get prefixes
self._pd_get_prefixes(agent, router, ri, intfs,
intfs1, mock_get_prefix)
# Update the router with the new prefix
ri.process(agent)
# Check that radvd is notified for the new prefix
self._pd_assert_radvd_calls(ri)
# Now remove the gw interface
self._pd_remove_gw_interface(intfs + intfs1, agent, router, ri)
ri.process(agent)
def _verify_address_scopes_iptables_rule(self, mock_iptables_manager):
filter_calls = [mock.call.add_chain('scope'),
mock.call.add_rule('FORWARD', '-j $scope')]
v6_mangle_calls = [mock.call.add_chain('scope'),
mock.call.add_rule('PREROUTING', '-j $scope'),
mock.call.add_rule(
'PREROUTING',
'-m connmark ! --mark 0x0/0xffff0000 '
'-j CONNMARK --restore-mark '
'--nfmask 0xffff0000 --ctmask 0xffff0000')]
v4_mangle_calls = (v6_mangle_calls +
[mock.call.add_chain('floatingip'),
mock.call.add_chain('float-snat'),
mock.call.add_rule('PREROUTING', '-j $floatingip'),
mock.call.add_rule(
'float-snat',
'-m connmark --mark 0x0/0xffff0000 '
'-j CONNMARK --save-mark '
'--nfmask 0xffff0000 --ctmask 0xffff0000')])
mock_iptables_manager.ipv4['filter'].assert_has_calls(filter_calls)
mock_iptables_manager.ipv6['filter'].assert_has_calls(filter_calls)
mock_iptables_manager.ipv4['mangle'].assert_has_calls(v4_mangle_calls,
any_order=True)
mock_iptables_manager.ipv6['mangle'].assert_has_calls(v6_mangle_calls,
any_order=True)
def test_initialize_address_scope_iptables_rules(self):
id = _uuid()
with mock.patch('neutron.agent.linux.iptables_manager.'
'IptablesManager'):
ri = l3router.RouterInfo(id, {}, **self.ri_kwargs)
self._verify_address_scopes_iptables_rule(ri.iptables_manager)
def test_initialize_address_scope_iptables_rules_dvr(self):
router = l3_test_common.prepare_router_data()
with mock.patch('neutron.agent.linux.iptables_manager.'
'IptablesManager'):
ri = dvr_router.DvrEdgeRouter(mock.Mock(),
HOSTNAME,
router['id'],
router,
**self.ri_kwargs)
self._verify_address_scopes_iptables_rule(ri.iptables_manager)
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(
self, ri)
router['gw_port_host'] = ri.host
ri._external_gateway_added = mock.Mock()
ri._create_dvr_gateway(ex_gw_port, interface_name)
self._verify_address_scopes_iptables_rule(
ri.snat_iptables_manager)
| {
"content_hash": "9dc1fcd0bf7b01e18c35f7debeee5f38",
"timestamp": "",
"source": "github",
"line_count": 2844,
"max_line_length": 79,
"avg_line_length": 46.13045007032349,
"alnum_prop": 0.5627577270475247,
"repo_name": "sebrandon1/neutron",
"id": "e0ba242648b6440ed50278ac2435d6d80fbffa29",
"size": "131823",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/agent/l3/test_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "9903006"
},
{
"name": "Shell",
"bytes": "14339"
}
],
"symlink_target": ""
} |
import unittest
import json
from math import sqrt
import msgpackrpc
from jubatus.recommender.client import Recommender
from jubatus.recommender.types import *
from jubatus_test.test_util import TestUtil
from jubatus.common import Datum
host = "127.0.0.1"
port = 21003
timeout = 10
class RecommenderTest(unittest.TestCase):
def setUp(self):
self.config = {
"method": "inverted_index",
"converter": {
"string_filter_types": {},
"string_filter_rules": [],
"num_filter_types": {},
"num_filter_rules": [],
"string_types": {},
"string_rules": [{"key": "*", "type": "str", "sample_weight": "bin", "global_weight": "bin"}],
"num_types": {},
"num_rules": [{"key": "*", "type": "num"}]
},
"parameter": {}
}
TestUtil.write_file('config_recommender.json', json.dumps(self.config))
self.srv = TestUtil.fork_process('recommender', port, 'config_recommender.json')
try:
self.cli = Recommender(host, port, "name")
except:
TestUtil.kill_process(self.srv)
raise
def tearDown(self):
if self.cli:
self.cli.get_client().close()
TestUtil.kill_process(self.srv)
def test_get_client(self):
self.assertTrue(isinstance(self.cli.get_client(), msgpackrpc.client.Client))
def test_get_config(self):
config = self.cli.get_config()
self.assertEqual(json.dumps(json.loads(config), sort_keys=True), json.dumps(self.config, sort_keys=True))
def test_complete_row(self):
self.cli.clear_row("complete_row")
d = Datum({"skey1": "val1", "skey2": "val2", "nkey1": 1.0, "nkey2": 2.0})
self.cli.update_row("complete_row", d)
d1 = self.cli.complete_row_from_id("complete_row")
d2 = self.cli.complete_row_from_datum(d)
def test_similar_row(self):
self.cli.clear_row("similar_row")
d = Datum({"skey1": "val1", "skey2": "val2", "nkey1": 1.0, "nkey2": 2.0})
self.cli.update_row("similar_row", d)
s1 = self.cli.similar_row_from_id("similar_row", 10)
s2 = self.cli.similar_row_from_datum(d, 10)
def test_decode_row(self):
self.cli.clear_row("decode_row")
d = Datum({"skey1": "val1", "skey2": "val2", "nkey1": 1.0, "nkey2": 2.0})
self.cli.update_row("decode_row", d)
decoded_row = self.cli.decode_row("decode_row")
self.assertEqual(json.dumps(d.string_values), json.dumps(decoded_row.string_values))
self.assertEqual(json.dumps(d.num_values), json.dumps(decoded_row.num_values))
def test_get_row(self):
self.cli.clear()
d = Datum({"skey1": "val1", "skey2": "val2", "nkey1": 1.0, "nkey2": 2.0})
self.cli.update_row("get_row", d)
row_names = self.cli.get_all_rows()
self.assertEqual(row_names, ["get_row"])
def test_clear(self):
self.cli.clear()
def test_calcs(self):
d = Datum({"skey1": "val1", "skey2": "val2", "nkey1": 1.0, "nkey2": 2.0})
self.assertAlmostEqual(self.cli.calc_similarity(d, d), 1, 6)
self.assertAlmostEqual(self.cli.calc_l2norm(d), sqrt(1*1 + 1*1+ 1*1 + 2*2), 6)
def test_clear(self):
self.cli.clear()
def test_save(self):
self.assertEqual(len(self.cli.save("recommender.save_test.model")), 1)
def test_load(self):
model_name = "recommender.load_test.model"
self.cli.save(model_name)
self.assertEqual(self.cli.load(model_name), True)
def test_get_status(self):
self.cli.get_status()
if __name__ == '__main__':
test_suite = unittest.TestLoader().loadTestsFromTestCase(RecommenderTest)
unittest.TextTestRunner().run(test_suite)
| {
"content_hash": "baa8ae6e03bc7ab4fd4c3a9ede2e8b22",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 113,
"avg_line_length": 35.94392523364486,
"alnum_prop": 0.5834633385335414,
"repo_name": "jubatus/jubatus-python-client",
"id": "afe0cc99b3a55086e035e7907848a984819b0a41",
"size": "3869",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/jubatus_test/recommender/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "56266"
},
{
"name": "Shell",
"bytes": "1227"
}
],
"symlink_target": ""
} |
from paddle.trainer.PyDataProvider2 import *
# Define a py data provider
@provider(input_types={
'pixel': dense_vector(28 * 28),
'label': integer_value(10)
})
def process(settings, filename): # settings is not used currently.
imgf = filename + "-images-idx3-ubyte"
labelf = filename + "-labels-idx1-ubyte"
f = open(imgf, "rb")
l = open(labelf, "rb")
f.read(16)
l.read(8)
# Define number of samples for train/test
if "train" in filename:
n = 60000
else:
n = 10000
for i in range(n):
label = ord(l.read(1))
pixels = []
for j in range(28 * 28):
pixels.append(float(ord(f.read(1))) / 255.0)
yield {"pixel": pixels, 'label': label}
f.close()
l.close()
| {
"content_hash": "ef2bb40f128d420149a20dccac4d661a",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 67,
"avg_line_length": 24.0625,
"alnum_prop": 0.574025974025974,
"repo_name": "zuowang/Paddle",
"id": "32af29730a7365df1a98fe54a2edf8850ee93e8d",
"size": "770",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/mnist/mnist_provider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "210539"
},
{
"name": "C++",
"bytes": "2694261"
},
{
"name": "CMake",
"bytes": "90483"
},
{
"name": "Cuda",
"bytes": "409839"
},
{
"name": "M4",
"bytes": "39963"
},
{
"name": "Perl",
"bytes": "11412"
},
{
"name": "Python",
"bytes": "826795"
},
{
"name": "Shell",
"bytes": "59063"
}
],
"symlink_target": ""
} |
import pytest
@pytest.mark.slow
def test_transactions(mambuapi):
loans = mambuapi.get_loan(None)
if len(loans) > 0:
assert mambuapi.get_transactions(loans[0]['id']) is not None
@pytest.mark.slow
def test_create_transaction(mambuapi, unapproved_loan):
result = mambuapi.approve(unapproved_loan['id'])
assert result['accountState'] == 'APPROVED'
assert result['loanAmount'] == '1200'
@pytest.mark.slow
def test_standalone_types(mambuapi, loan_dict, unapproved_loan):
loan_id = unapproved_loan['id']
amount = loan_dict['tranches'][0]['amount']
result = mambuapi.approve(loan_id)
assert result['accountState'] == 'APPROVED'
result = mambuapi.undo_approval(loan_id)
assert result['accountState'] == 'PENDING_APPROVAL'
result = mambuapi.approve(loan_id)
assert result['accountState'] == 'APPROVED'
result = mambuapi.disburse(loan_id)
assert result['balance'] == result['amount'] == amount
assert result['principalPaid'] == amount
assert result['type'] == 'DISBURSMENT'
result = mambuapi.lock(loan_id)
assert result[0]['type'] == 'INTEREST_LOCKED'
result = mambuapi.unlock(loan_id)
assert result[0]['type'] == 'INTEREST_UNLOCKED'
@pytest.mark.slow
def test_disburse_apply_fee(mambuapi, approved_loan):
loan_id = approved_loan['id']
_fee = 10
result = mambuapi.disburse(loan_id)
assert result['type'] == 'DISBURSMENT'
loan_after = mambuapi.get_loan(loan_id)
net = float(approved_loan['principalBalance']) + float(result['balance'])
assert net == float(loan_after['principalBalance'])
result = mambuapi.apply_fee(loan_id, _fee)
assert result['amount'] == str(_fee)
loan_after_fee = mambuapi.get_loan(loan_id)
net = float(approved_loan['feesDue']) + _fee
assert net == float(loan_after_fee['feesDue'])
@pytest.mark.slow
def test_disburse_with_fee(mambuapi, approved_loan):
loan_id = approved_loan['id']
datestr = approved_loan['tranches'][0]['expectedDisbursementDate']
fee = 4.00
result = mambuapi.disburse_with_fee(loan_id, fee, datestr)
loan_after = mambuapi.get_loan(loan_id)
assert float(approved_loan['principalBalance']
) + float(result[0]['balance']) == float(
loan_after['principalBalance'])
assert float(approved_loan['feesDue']
) + fee == float(loan_after['feesDue'])
@pytest.mark.slow
def test_disburse_today(mambuapi, approved_loan_start_today):
loan = approved_loan_start_today
loan_id = loan['id']
result = mambuapi.disburse(loan_id)
loan_after = mambuapi.get_loan(loan_id)
net = float(loan['principalBalance']) + float(result['balance'])
assert net == float(loan_after['principalBalance'])
@pytest.mark.slow
def test_disburse_set_repayment(mambuapi, approved_loan):
loan_id = approved_loan['id']
mambuapi.disburse(loan_id, first_repayment_date='2015-09-25')
loans = mambuapi.get_repayments_due_on_date('2015-09-25')
assert loan_id in map(lambda x: x['id'], loans)
| {
"content_hash": "b9e1d1f7417a5e0fde1d64db3beb87b0",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 77,
"avg_line_length": 36.55421686746988,
"alnum_prop": 0.6733684904416611,
"repo_name": "paze-me/mambu",
"id": "83b4bb0da8ae6195504f91b59a32c55dc8fbb7a3",
"size": "3034",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_loan_transaction.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "59018"
}
],
"symlink_target": ""
} |
"""
coop_edition template tags
used for magic form
"""
from six import string_types
from django import template
from django.forms.formsets import BaseFormSet
from django.template import Context
from django.template.base import TextNode, VariableNode, FilterExpression
from django.template.context_processors import csrf
from django.template.loader import get_template, TemplateDoesNotExist
from django.template.loader_tags import IncludeNode
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from coop_html_editor.templatetags.html_editor_utils import InlineHtmlEditNode, InlineHtmlMultipleEditNode
from coop_cms.models import PieceOfHtml, BaseArticle, Fragment, FragmentType, FragmentFilter
from coop_cms.moves import make_context
from coop_cms.settings import get_article_class
from coop_cms.utils import get_text_from_template, slugify
register = template.Library()
class DummyEngine(object):
"""Used for monkey patching Context"""
debug = False
string_if_invalid = ''
class DummyEditableForm(object):
"""Used for monkey patching Context"""
is_inline_editable = True
class PieceOfHtmlEditNode(InlineHtmlEditNode):
"""Template node for editing a PieceOfHtml"""
def render(self, context):
"""convert to html"""
form = context.get('form', None) or context.get('formset', None)
if form:
context.dicts[0]['inline_html_edit'] = _is_inline_editable(form)
return super(PieceOfHtmlEditNode, self).render(context)
@register.tag
def coop_piece_of_html(parser, token):
"""template tag"""
args = token.split_contents()
div_id = args[1]
read_only = False
extra_id = ""
if len(args) > 2:
for item in args[2:]:
if 0 == item.find("extra_id="):
extra_id = item.replace("extra_id=", '')
read_only = (args[2] == "read-only")
lookup_args = {'div_id': div_id}
if extra_id:
lookup_args.update({'extra_id': extra_id})
return PieceOfHtmlEditNode(PieceOfHtml, lookup_args, 'content', read_only)
class FragmentEditNode(InlineHtmlMultipleEditNode):
"""Template Node for Fragment edition"""
def __init__(self, lookup, kwargs=None):
super(FragmentEditNode, self).__init__(Fragment, lookup, 'content')
self._edit_mode = False
self.fragment_filter = None
self.kwargs = kwargs or {}
self.fragment_type = None
def _get_objects(self, lookup):
"""get the fragment"""
self.fragment_type = FragmentType.objects.get_or_create(name=lookup['name'])[0]
queryset = Fragment.objects.filter(type=self.fragment_type)
if 'extra_id' in lookup:
self.fragment_filter = FragmentFilter.objects.get_or_create(extra_id=lookup['extra_id'])[0]
queryset = queryset.filter(filter=self.fragment_filter)
return queryset
def _get_object_lookup(self, obj):
"""get object lookup"""
return {"id": obj.id}
def _pre_object_render(self, obj):
"""call before rendering an object"""
return '<div class="coop-fragment {0}" rel="{1}">'.format(obj.css_class, obj.id)
def _post_object_render(self, obj):
"""call after rendering an object"""
return '</div>'
def _object_render(self, idx, obj, context):
"""convert object to html"""
value = getattr(obj, self._field_name)
template_name = self.kwargs.get('template_name', '')
if template_name:
template_name = self._resolve_arg(template_name, context)
template_ = get_template(template_name)
objects_count = self.get_objects_to_render_count()
object_content = template_.render(
make_context(
None,
{
'css_class': obj.css_class,
'name': obj.name,
'slug': slugify(obj.name),
'id': obj.id,
'index': idx,
'objects_count': objects_count,
'fragment': self._render_value(context, self._get_object_lookup(obj), value),
'form': DummyEditableForm() if self._edit_mode else None,
},
)
)
else:
object_content = self._pre_object_render(obj)
object_content += self._render_value(context, self._get_object_lookup(obj), value)
object_content += self._post_object_render(obj)
return object_content
def render(self, context):
"""convert to html"""
self._edit_mode = False
form = context.get('form', None) or context.get('formset', None)
if getattr(form, 'is_inline_editable', False):
context.dicts[0]['inline_html_edit'] = True
self._edit_mode = True
html = super(FragmentEditNode, self).render(context)
filter_id = self.fragment_filter.id if self.fragment_filter else ""
if self._edit_mode:
html_layout = '<div style="display: none; visibility: hidden;" class="coop-fragment-type" '
html_layout += 'rel="{0}" data-filter="{2}">{1}</div>'
pre_html = html_layout.format(
self.fragment_type.id, self.fragment_type.name, filter_id
)
else:
pre_html = ''
return pre_html + html
@register.tag
def coop_fragments(parser, token):
"""templatetag"""
args = token.split_contents()
lookup = {'name': args[1]}
extra_id_found = False
if len(args) > 2:
args2 = args[2]
if args2.find("=") < 0:
lookup["extra_id"] = args2
extra_id_found = True
kwargs = {}
start_index = 2 if extra_id_found else 1
for arg in args[start_index:]:
if arg.find("=") > 0:
key, value = arg.split('=')
if key == "extra_id" and not extra_id_found:
lookup["extra_id"] = value
extra_id_found = True
else:
kwargs[key] = value
return FragmentEditNode(lookup, kwargs)
class ArticleSummaryEditNode(InlineHtmlEditNode):
"""edit the article summary"""
def render(self, context):
"""to html"""
form = context.get('form', None)
if form and getattr(form, 'is_inline_editable', False):
context.dicts[0]['inline_html_edit'] = True
return super(ArticleSummaryEditNode, self).render(context)
@register.tag
def article_summary_edit(parser, token):
"""template tag"""
article_class = get_article_class()
article_id = token.split_contents()[1]
return ArticleSummaryEditNode(article_class, {'id': article_id}, 'summary')
class ArticleTitleNode(template.Node):
"""article title tag"""
def render(self, context):
"""to html"""
is_edition_mode = context.get('form', None) is not None
article = context.get('article')
return "{0}{1}{2}{3}".format(
article.title,
_(" [EDITION]") if is_edition_mode else "",
_(" [DRAFT]") if article.publication == BaseArticle.PUBLISHED else "",
_(" [ARCHIVED]") if article.publication == BaseArticle.ARCHIVED else "",
)
@register.tag
def article_title(parser, token):
"""article title tag"""
return ArticleTitleNode()
class CmsFormMediaNode(template.Node):
"""generate html for getting required js and css"""
def render(self, context):
form = context.get('form', None)
formset = context.get('formset', None)
if form or formset:
template_ = template.Template("{{form.media}}")
html = template_.render(template.Context({'form': form or formset}))
# django 1.5 fix : " are escaped as " and cause script tag
# for aloha to be broken
return html.replace(""", '"')
else:
return ""
@register.tag
def cms_form_media(parser, token):
"""generate html for getting required js and css"""
return CmsFormMediaNode()
def _extract_if_node_args(parser, token):
"""utility for if else endif type of tags"""
nodelist_true = parser.parse(('else', 'endif'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endif',))
parser.delete_first_token()
else:
nodelist_false = template.NodeList()
return nodelist_true, nodelist_false
def _is_inline_editable(form):
"""
:param form: form or formset
:return: True if edit mode, False if not
"""
if isinstance(form, BaseFormSet):
for form_item in form:
if getattr(form_item, 'is_inline_editable', False):
return True
else:
if getattr(form, 'is_inline_editable', False):
return True
return False
class IfCmsEditionNode(template.Node):
"""Do something if edition mode"""
def __init__(self, nodelist_true, nodelist_false):
self.nodelist_true = nodelist_true
self.nodelist_false = nodelist_false
def __iter__(self):
for node in self.nodelist_true:
yield node
for node in self.nodelist_false:
yield node
def _check_condition(self, context):
"""check condition of the if"""
form = context.get('form', None) or context.get('formset', None)
if form:
return _is_inline_editable(form)
return False
def render(self, context):
"""to html"""
if self._check_condition(context):
return self.nodelist_true.render(context)
else:
return self.nodelist_false.render(context)
@register.tag
def if_cms_edition(parser, token):
"""Do something if edition mode"""
nodelist_true, nodelist_false = _extract_if_node_args(parser, token)
return IfCmsEditionNode(nodelist_true, nodelist_false)
class IfNotCmsEditionNode(IfCmsEditionNode):
"""Do something if not edition mode"""
def _check_condition(self, context):
return not super(IfNotCmsEditionNode, self)._check_condition(context)
@register.tag
def if_not_cms_edition(parser, token):
"""Do something if not edition mode"""
nodelist_true, nodelist_false = _extract_if_node_args(parser, token)
return IfNotCmsEditionNode(nodelist_true, nodelist_false)
CMS_FORM_TEMPLATE = """
<form id="cms_form" enctype="multipart/form-data" method="POST" action="{{post_url}}">{% csrf_token %}
{% include "coop_cms/_form_error.html" with errs=form.non_field_errors %}
{{inner}} <input type="submit" style="display: none"> </form>
"""
class SafeWrapper(object):
"""This manages display of object in edit or non-edit context"""
def __init__(self, wrapped, logo_size=None, logo_crop=None):
self._wrapped = wrapped
self._logo_size = logo_size
self._logo_crop = logo_crop
def __getattr__(self, field):
value = getattr(self._wrapped, field)
if field == 'logo':
src = getattr(self._wrapped, 'logo_thumbnail')(False, self._logo_size, self._logo_crop)
if src:
try:
template_ = get_template("coop_cms/widgets/_img_logo.html")
value = template_.render(
make_context(
None,
{
'url': src.url,
'extra_classes': get_text_from_template("coop_cms/widgets/_imageedit_cssclass.html")
}
)
)
except TemplateDoesNotExist:
value = '<img class="logo" src="{0}" />'.format(src.url)
else:
value = ''
return mark_safe(value)
elif callable(value):
try:
return value()
except KeyError:
pass
else:
if isinstance(value, string_types):
return mark_safe(value)
return value
class FormWrapper(object):
"""This manages display of object in edit or non-edit context"""
def __init__(self, form, obj, logo_size=None, logo_crop=None):
self._form = form
self._obj = obj
if logo_size:
self._form.set_logo_size(logo_size, logo_crop)
def __getitem__(self, field, logo_size=None):
"""get attribute"""
if field in self._form.fields.keys():
template_ = template.Template("""
{%% with form.%s.errors as errs %%}
{%% include "coop_cms/_form_error.html" %%}{%% endwith %%}{{form.%s}}
""" % (field, field))
return template_.render(template.Context({'form': self._form}))
else:
return getattr(self._obj, field)
class CmsEditNode(template.Node):
"""cms_edit -> manages edition of object"""
def __init__(self, nodelist_content, var_name, logo_size=None, logo_crop=None):
self.var_name = var_name
self.nodelist_content = nodelist_content
self._logo_size = logo_size.strip("'").strip('"') if logo_size else None
self._logo_crop = logo_crop.strip("'").strip('"') if logo_crop else None
self._render_logo_size = self._logo_size and (self._logo_size == logo_size)
self._render_logo_crop = self._logo_crop and (self._logo_crop == logo_crop)
self.post_url = ""
def __iter__(self):
for node in self.nodelist_content:
yield node
def _render_nodes(self, context, inner_context, safe_context):
"""Replace nested nodes with proper content"""
managed_node_types = [
TextNode,
template.defaulttags.IfNode,
IfCmsEditionNode,
IfNotCmsEditionNode,
template.defaulttags.ForNode,
]
nodes_content = ""
for node in self.nodelist_content:
if any([isinstance(node, node_type) for node_type in managed_node_types]):
local_context = Context(safe_context)
if hasattr(context, 'template'):
local_context.template = context.template
content = node.render(local_context)
elif node.__class__.__name__ == 'MediaListNode':
content = node.render(context)
safe_context[node.var_name] = context.get(node.var_name)
inner_context[node.var_name] = context.get(node.var_name)
elif node.__class__.__name__ == 'AssignmentNode':
content = node.render(context)
safe_context[node.target_var] = context.get(node.target_var)
inner_context[node.target_var] = context.get(node.target_var)
elif isinstance(node, IncludeNode):
# monkey patching for django 1.8
if isinstance(node.template, FilterExpression):
template_name = node.template.resolve(context)
node.template = get_template(template_name)
context_dict = inner_context.copy()
if node.extra_context:
for filter_expression in node.extra_context:
value = node.extra_context[filter_expression].resolve(context)
context_dict[filter_expression] = value
the_context = make_context(None, context_dict)
content = node.template.render(the_context)
elif isinstance(node, template.loader_tags.BlockNode):
safe_context_var = Context(safe_context)
safe_context_var.render_context['block_context'] = context.render_context.get('block_context', None)
safe_context_var.template = getattr(node, 'template', None) or template.Template("")
#safe_context_var.template.engine = DummyEngine()
content = node.render(safe_context_var)
elif isinstance(node, VariableNode):
if node.filter_expression.filters:
content = node.render(Context(context))
else:
the_context = Context(safe_context)
the_context.template = getattr(node, 'template', None) or template.Template("")
#the_context.template.engine = DummyEngine()
content = node.render(the_context)
else:
# monkey patching for django 1.8+
the_context = Context(inner_context)
the_context.template = getattr(node, 'template', None) or template.Template("")
#the_context.template.engine = DummyEngine()
content = node.render(the_context)
nodes_content += content
return nodes_content
def _get_obj(self, context):
"""return the edited object if exists"""
return context.get(self.var_name, None) if self.var_name else None
def _make_inner_context(self, context):
"""the context used for rendering the templatetag content"""
inner_context = {}
for ctx_value in context.dicts:
inner_context.update(ctx_value)
obj = self._get_obj(context)
if self.var_name:
inner_context[self.var_name] = obj
formset = context.get('formset', None)
objects = context.get('objects', None)
if formset:
inner_context['formset'] = formset
if objects is not None:
inner_context['objects'] = objects
return inner_context
def _make_outer_context(self, context):
"""the context used for rendering the whole page"""
obj = self._get_obj(context)
self.post_url = obj.get_edit_url() if obj else context.get('coop_cms_edit_url')
outer_context = {'post_url': self.post_url}
return outer_context
def render(self, context):
"""to html"""
request = context.get('request')
if self._render_logo_size:
self._logo_size = context.get(self._logo_size, None)
if self._render_logo_crop:
self._logo_crop = context.get(self._logo_crop, None)
# the context used for rendering the templatetag content
inner_context = self._make_inner_context(context)
# the context used for rendering the whole page
outer_context = self._make_outer_context(context)
# copy of the inner_context to be modified
safe_context = inner_context.copy()
form = context.get('form', None)
obj = self._get_obj(context)
formset = context.get('formset', None)
objects = context.get('objects', None)
is_inline_editable = False
if form:
is_inline_editable = _is_inline_editable(form)
elif formset:
is_inline_editable = _is_inline_editable(formset)
if is_inline_editable:
node_template = template.Template(CMS_FORM_TEMPLATE)
if form:
safe_context[self.var_name] = FormWrapper(
form, obj, logo_size=self._logo_size, logo_crop=self._logo_crop
)
else:
safe_context['objects'] = [
FormWrapper(form_, obj_, logo_size=self._logo_size, logo_crop=self._logo_crop)
for (form_, obj_) in zip(formset, objects)
]
outer_context.update(csrf(request))
else:
node_template = template.Template("{{inner|safe}}")
if obj:
safe_context[self.var_name] = SafeWrapper(
obj, logo_size=self._logo_size, logo_crop=self._logo_crop)
else:
safe_context['objects'] = [
SafeWrapper(obj_, logo_size=self._logo_size, logo_crop=self._logo_crop) for obj_ in objects
]
inner_value = self._render_nodes(context, inner_context, safe_context)
outer_context['inner'] = mark_safe(inner_value) if (form or formset) else inner_value
return node_template.render(Context(outer_context))
@register.tag
def cms_edit(parser, token):
"""template tag"""
args = token.split_contents()[1:]
data = {}
var_name = args[0] if len(args) else ''
for arg in args[1:]:
key, value = arg.split('=')
data[key] = value
nodelist = parser.parse(('end_cms_edit', ))
parser.next_token()
return CmsEditNode(nodelist, var_name, **data)
class CmsNoSpace(template.Node):
"""remove space"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
html = self.nodelist.render(context).strip()
return ' '.join(html.split())
@register.tag
def cms_nospace(parser, token):
"""remove spaces"""
nodelist = parser.parse(('end_cms_nospace', ))
parser.delete_first_token()
return CmsNoSpace(nodelist)
| {
"content_hash": "d65aa405fc609965404a8d69f9cfc288",
"timestamp": "",
"source": "github",
"line_count": 588,
"max_line_length": 116,
"avg_line_length": 35.81802721088435,
"alnum_prop": 0.5831631926309292,
"repo_name": "ljean/coop_cms",
"id": "f21fb1dabe40ce76872752a512f40d50c3c92a4b",
"size": "21085",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coop_cms/templatetags/coop_edition.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "54725"
},
{
"name": "HTML",
"bytes": "98489"
},
{
"name": "JavaScript",
"bytes": "296202"
},
{
"name": "Python",
"bytes": "985749"
}
],
"symlink_target": ""
} |
"""
Tests for projectq.backends._printer.py.
"""
import io
import pytest
from projectq import MainEngine
from projectq.backends import _printer
from projectq.cengines import DummyEngine, InstructionFilter, NotYetMeasuredError
from projectq.meta import LogicalQubitIDTag
from projectq.ops import NOT, Allocate, Command, H, Measure, T
from projectq.types import WeakQubitRef
def test_command_printer_is_available():
inline_cmd_printer = _printer.CommandPrinter()
cmd_printer = _printer.CommandPrinter()
def available_cmd(self, cmd):
return cmd.gate == H
filter = InstructionFilter(available_cmd)
eng = MainEngine(backend=cmd_printer, engine_list=[inline_cmd_printer, filter])
qubit = eng.allocate_qubit()
cmd0 = Command(eng, H, (qubit,))
cmd1 = Command(eng, T, (qubit,))
assert inline_cmd_printer.is_available(cmd0)
assert not inline_cmd_printer.is_available(cmd1)
assert cmd_printer.is_available(cmd0)
assert cmd_printer.is_available(cmd1)
def test_command_printer_accept_input(monkeypatch):
cmd_printer = _printer.CommandPrinter()
eng = MainEngine(backend=cmd_printer, engine_list=[DummyEngine()])
number_input = io.StringIO('1\n')
monkeypatch.setattr('sys.stdin', number_input)
qubit = eng.allocate_qubit()
Measure | qubit
assert int(qubit) == 1
number_input = io.StringIO('0\n')
monkeypatch.setattr('sys.stdin', number_input)
qubit = eng.allocate_qubit()
NOT | qubit
Measure | qubit
assert int(qubit) == 0
def test_command_printer_measure_no_control():
qb1 = WeakQubitRef(engine=None, idx=1)
qb2 = WeakQubitRef(engine=None, idx=2)
printer = _printer.CommandPrinter()
printer.is_last_engine = True
with pytest.raises(ValueError):
printer._print_cmd(Command(engine=None, gate=Measure, qubits=([qb1],), controls=[qb2]))
def test_command_printer_no_input_default_measure():
cmd_printer = _printer.CommandPrinter(accept_input=False)
eng = MainEngine(backend=cmd_printer, engine_list=[DummyEngine()])
qubit = eng.allocate_qubit()
NOT | qubit
Measure | qubit
assert int(qubit) == 0
def test_command_printer_measure_mapped_qubit():
eng = MainEngine(_printer.CommandPrinter(accept_input=False), [])
qb1 = WeakQubitRef(engine=eng, idx=1)
qb2 = WeakQubitRef(engine=eng, idx=2)
cmd0 = Command(engine=eng, gate=Allocate, qubits=([qb1],))
cmd1 = Command(
engine=eng,
gate=Measure,
qubits=([qb1],),
controls=[],
tags=[LogicalQubitIDTag(2)],
)
with pytest.raises(NotYetMeasuredError):
int(qb1)
with pytest.raises(NotYetMeasuredError):
int(qb2)
eng.send([cmd0, cmd1])
eng.flush()
with pytest.raises(NotYetMeasuredError):
int(qb1)
assert int(qb2) == 0
| {
"content_hash": "5a5caf34219a40c352b4ae11e057dba2",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 95,
"avg_line_length": 30.717391304347824,
"alnum_prop": 0.6854210898796886,
"repo_name": "ProjectQ-Framework/ProjectQ",
"id": "658fb78c74724261c47c99b26ac54fd753b59c50",
"size": "3446",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "projectq/backends/_printer_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "158833"
},
{
"name": "Python",
"bytes": "1483141"
}
],
"symlink_target": ""
} |
def test_1(input_list):
new_list = []
for i in range(len(input_list)):
new_list[i] = input_list[i]
print("list is {}, new_list is {}".format(input_list, new_list))
return new_list
test_1([1, 2, 3, 4])
# situation 2: allocate enough space for the new_list, which is not significant or meaningful,
# but at least could be a solution in this case
def test_2(input_list):
new_list = [0, 0, 0, 0] # initializing by allocating enough memory
for i in range(len(input_list)):
new_list[i] = input_list[i]
print("list is {}, new_list is {}".format(input_list, new_list))
return new_list
test_2([2, 3, 4, 5])
# situation 3: better to use append(), which I think allocates memory space dynamically
def test_3(input_list):
new_list = []
for i in range(len(input_list)):
new_list.append(input_list[i])
print("list is {}, new_list is {}".format(input_list, new_list))
return new_list
test_3([3, 4, 5, 6])
| {
"content_hash": "6ffdc97b2a08aa90d65e4e610c1e4453",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 94,
"avg_line_length": 34.241379310344826,
"alnum_prop": 0.6163141993957704,
"repo_name": "rush2catch/algorithms-leetcode",
"id": "e30e5a4d7c9fe5b81cf38f1fe9395a2f0d7bce59",
"size": "1514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Basic Data Structures/array/array_index_error.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "162421"
}
],
"symlink_target": ""
} |
"""
This should probably be renamed guifront.py This defines all of the visual
components to the GUI It is invoked from guiback, which handles the nonvisual
logic.
BUGS:
* Copying the ungrouped imageset raises an error. Should have the option
to copy or move it. Other special imageset should not have this option.
Should gray out an option if it is not available.
"""
from __future__ import absolute_import, division, print_function
from six.moves import zip, map, filter # NOQA
from os.path import isdir
import sys
from ibeis import constants as const
import functools
from guitool.__PYQT__ import QtCore
from guitool.__PYQT__ import QtWidgets
from guitool.__PYQT__.QtCore import Qt
from guitool import slot_, checks_qt_error, ChangeLayoutContext, BlockContext # NOQA
import guitool as gt
from ibeis.other import ibsfuncs
from ibeis.gui import guiheaders as gh
from ibeis.gui import guimenus
import six
from ibeis.viz.interact import interact_annotations2
from ibeis.gui.guiheaders import (IMAGE_TABLE, IMAGE_GRID, ANNOTATION_TABLE, NAME_TABLE, NAMES_TREE, IMAGESET_TABLE) # NOQA
from ibeis.gui.models_and_views import (IBEISStripeModel, IBEISTableView,
IBEISItemModel, IBEISTreeView,
ImagesetTableModel, ImagesetTableView,
IBEISTableWidget, IBEISTreeWidget,
ImagesetTableWidget)
from plottool import color_funcs
import utool as ut
import plottool as pt
print, rrr, profile = ut.inject2(__name__, '[newgui]')
VERBOSE_GUI = ut.VERBOSE or ut.get_argflag(('--verbose-gui', '--verbgui'))
WITH_GUILOG = ut.get_argflag('--guilog')
#############################
###### Tab Widgets #######
#############################
class APITabWidget(QtWidgets.QTabWidget):
"""
Holds the table-tabs
use setCurrentIndex to change the selection
"""
def __init__(tabwgt, parent=None, horizontalStretch=1):
#QtWidgets.QTabWidget.__init__(tabwgt, parent)
super(APITabWidget, tabwgt).__init__(parent)
tabwgt.ibswgt = parent
tabwgt._sizePolicy = gt.newSizePolicy(
tabwgt, horizontalStretch=horizontalStretch)
tabwgt.setSizePolicy(tabwgt._sizePolicy)
#tabwgt.currentChanged.connect(tabwgt.setCurrentIndex)
tabwgt.currentChanged.connect(tabwgt._on_tabletab_change)
tabwgt.current_tblname = None
@slot_(int)
def _on_tabletab_change(tabwgt, index):
""" Switch to the current imageset tab """
if VERBOSE_GUI:
print('[apitab] _onchange(index=%r)' % (index,))
tblname = tabwgt.ibswgt.tblname_list[index]
tabwgt.current_tblname = tblname
if VERBOSE_GUI:
print('[apitab] _onchange(tblname=%r)' % (tblname,))
tabwgt.ibswgt.back._clear_selection()
view = tabwgt.ibswgt.views[tblname]
selected = view.selectionModel().selection()
deselected = QtCore.QItemSelection()
tabwgt.ibswgt.update_selection(selected, deselected)
class ImageSetTabWidget(QtWidgets.QTabWidget):
"""
Handles tabs that contain individual image sets.
"""
def __init__(imageset_tabwgt, parent=None, horizontalStretch=1):
QtWidgets.QTabWidget.__init__(imageset_tabwgt, parent)
imageset_tabwgt.ibswgt = parent
imageset_tabwgt.setTabsClosable(True)
imageset_tabwgt.setMaximumSize(9999, gt.get_cplat_tab_height())
imageset_tabwgt.tabbar = imageset_tabwgt.tabBar()
imageset_tabwgt.tabbar.setMovable(False)
imageset_tabwgt.setStyleSheet('border: none;')
imageset_tabwgt.tabbar.setStyleSheet('border: none;')
sizePolicy = gt.newSizePolicy(imageset_tabwgt, horizontalStretch=horizontalStretch)
imageset_tabwgt.setSizePolicy(sizePolicy)
imageset_tabwgt.tabCloseRequested.connect(imageset_tabwgt._close_tab)
imageset_tabwgt.currentChanged.connect(imageset_tabwgt._on_imagesettab_change)
imageset_tabwgt.imgsetid_list = []
# TURNING ON / OFF ALL IMAGES
# imageset_tabwgt._add_imageset_tab(-1, const.ALL_IMAGE_IMAGESETTEXT)
@slot_(int)
def _on_imagesettab_change(imageset_tabwgt, index):
""" Switch to the current imageset tab """
if VERBOSE_GUI:
print('[imageset_tab_widget] _onchange(index=%r)' % (index,))
if 0 <= index and index < len(imageset_tabwgt.imgsetid_list):
imgsetid = imageset_tabwgt.imgsetid_list[index]
if VERBOSE_GUI:
print('[IMAGESETTAB.ONCHANGE] imgsetid = %r' % (imgsetid,))
imageset_tabwgt.ibswgt._change_imageset(imgsetid)
else:
imageset_tabwgt.ibswgt._change_imageset(-1)
@slot_(int)
def _close_tab(imageset_tabwgt, index):
print('[imageset_tab_widget] _close_tab(index=%r)' % (index,))
if imageset_tabwgt.imgsetid_list[index] is not None:
imageset_tabwgt.imgsetid_list.pop(index)
imageset_tabwgt.removeTab(index)
@slot_()
def _close_all_tabs(imageset_tabwgt):
if VERBOSE_GUI:
print('[imageset_tab_widget] _close_all_tabs()')
while len(imageset_tabwgt.imgsetid_list) > 0:
index = 0
imageset_tabwgt.imgsetid_list.pop(index)
imageset_tabwgt.removeTab(index)
@slot_(int)
def _close_tab_with_imgsetid(imageset_tabwgt, imgsetid):
print('[imageset_tab_widget] _close_tab_with_imgsetid(imgsetid=%r)' % (imgsetid))
try:
index = imageset_tabwgt.imgsetid_list.index(imgsetid)
imageset_tabwgt._close_tab(index)
except:
pass
def _add_imageset_tab(imageset_tabwgt, imgsetid, imagesettext):
if VERBOSE_GUI:
print('[_add_imageset_tab] imgsetid=%r, imagesettext=%r' % (
imgsetid, imagesettext))
if imgsetid not in imageset_tabwgt.imgsetid_list:
if VERBOSE_GUI:
print('[_add_imageset_tab] adding new image tab')
imageset_tabwgt.imgsetid_list.append(imgsetid)
index = len(imageset_tabwgt.imgsetid_list) - 1
tab_name = str(imagesettext)
# Only has a tab, doesn't actually contain anything
hack_newtab = QtWidgets.QWidget(parent=imageset_tabwgt)
imageset_tabwgt.addTab(hack_newtab, tab_name)
else:
if VERBOSE_GUI:
print('[_add_imageset_tab] using existing image tab')
index = imageset_tabwgt.imgsetid_list.index(imgsetid)
if VERBOSE_GUI:
print('[_add_imageset_tab] setCurrentIndex(index=%r)' % (index,))
imageset_tabwgt.setCurrentIndex(index)
# Dont call this, it is triggered twice
#imageset_tabwgt._on_imagesettab_change(index)
def _update_imageset_tab_name(imageset_tabwgt, imgsetid, imagesettext):
for index, _id in enumerate(imageset_tabwgt.imgsetid_list):
if imgsetid == _id:
imageset_tabwgt.setTabText(index, imagesettext)
#############################
######## Main Widget ########
#############################
class IBEISMainWindow(QtWidgets.QMainWindow):
quitSignal = QtCore.pyqtSignal()
dropSignal = QtCore.pyqtSignal(list)
def __init__(mainwin, back=None, ibs=None, parent=None):
super(IBEISMainWindow, mainwin).__init__(parent)
# Menus
try:
mainwin.setUnifiedTitleAndToolBarOnMac(False)
except AttributeError as ex:
ut.printex(ex, 'setUnifiedTitleAndToolBarOnMac is not working', iswarning=True)
guimenus.setup_menus(mainwin, back)
# Central Widget
mainwin.ibswgt = IBEISGuiWidget(back=back, ibs=ibs, parent=mainwin)
mainwin.setCentralWidget(mainwin.ibswgt)
mainwin.setAcceptDrops(True)
if back is not None:
mainwin.quitSignal.connect(back.quit)
else:
raise AssertionError('need backend')
mainwin.dropSignal.connect(mainwin.ibswgt.imagesDropped)
#
mainwin.resize(900, 600)
@slot_()
def closeEvent(mainwin, event):
event.accept()
mainwin.quitSignal.emit()
def dragEnterEvent(self, event):
if event.mimeData().hasUrls:
event.accept()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasUrls:
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
links = []
for url in event.mimeData().urls():
links.append(str(url.toLocalFile()))
self.dropSignal.emit(links)
else:
event.ignore()
@slot_()
def expand_names_tree(mainwin):
view = mainwin.ibswgt.views[gh.NAMES_TREE]
view.expandAll()
#############################
##### IBEIS GUI Widget ######
#############################
IBEIS_WIDGET_BASE = QtWidgets.QWidget
class IBEISGuiWidget(IBEIS_WIDGET_BASE):
"""
CommandLine:
# Testing
python -m ibeis --db NNP_Master3 --onlyimgtbl
python -m ibeis --db PZ_Master1 --onlyimgtbl
"""
def __init__(ibswgt, back=None, ibs=None, parent=None):
super(IBEISGuiWidget, ibswgt).__init__(parent)
ibswgt.ibs = ibs
ibswgt.back = back
# Structures that will hold models and views
ibswgt.models = {}
ibswgt.views = {}
# FIXME: Duplicate models
# Create models and views
# Define the abstract item models and views for the tables
ibswgt.tblname_list = []
ibswgt.modelview_defs = []
# NEW DYNAMIC WAY OF USING TABS AND API VIEWS
# ADD IMAGE TABLE
if True:
ibswgt.tblname_list.append(IMAGE_TABLE)
ibswgt.modelview_defs.append((IMAGE_TABLE, IBEISTableWidget,
IBEISItemModel, IBEISTableView))
# ADD IMAGE GRID
USE_GRID = False
if USE_GRID and not ut.get_argflag('--onlyimgtbl'):
ibswgt.tblname_list.append(IMAGE_GRID)
ibswgt.modelview_defs.append((IMAGE_GRID, IBEISTableWidget,
IBEISStripeModel, IBEISTableView))
# ADD ANNOT GRID
if not (ut.get_argflag('--noannottbl') or ut.get_argflag('--onlyimgtbl')):
ibswgt.tblname_list.append(gh.ANNOTATION_TABLE)
ibswgt.modelview_defs.append((gh.ANNOTATION_TABLE,
IBEISTableWidget, IBEISItemModel,
IBEISTableView))
# ADD NAME TREE
if not (ut.get_argflag('--nonametree') or ut.get_argflag('--onlyimgtbl')):
ibswgt.tblname_list.append(NAMES_TREE)
ibswgt.modelview_defs.append((NAMES_TREE, IBEISTreeWidget,
IBEISItemModel, IBEISTreeView))
# ADD IMAGESET TABLE
ibswgt.super_tblname_list = ibswgt.tblname_list + [IMAGESET_TABLE]
ibswgt.modelview_defs.append((IMAGESET_TABLE, ImagesetTableWidget,
ImagesetTableModel, ImagesetTableView))
# DO INITALIZATION
# Create and layout components
ibswgt._init_components()
ibswgt._connect_signals_and_slots()
ibswgt.connect_ibeis_control(ibswgt.ibs)
# Lazy load data every so often
ibswgt.data_load_timer = QtCore.QTimer()
ibswgt.data_load_timer.timeout.connect(ibswgt.data_load_loop)
ibswgt.data_load_freq = 1000
#ibswgt.tt = None
ibswgt.data_load_timer.start(ibswgt.data_load_freq)
#@QtCore.pyqtSlot
def data_load_loop(ibswgt):
# Get the current view
view = ibswgt._tables_tab_widget.currentWidget()
model = view.model()
if model.canFetchMore(None):
model.fetchMore(None)
#if ibswgt.tt is None:
# ellapsed = ibswgt.data_load_freq
#else:
# ellapsed = ut.toc(ibswgt.tt)
#print('Load more data for %r' % (view,))
##freq = ibswgt.data_load_freq / (1000 * 2)
##frac = freq / ellapsed
##print('ibswgt.data_load_freq = %r' % (freq,))
##print('ellapsed = %r' % (ellapsed,))
##print('frac = %r' % (frac,))
##if model.batch_size is not None:
# #new_batch_size = model.batch_size
# #new_batch_size = int(new_batch_size * frac * 2)
# #new_batch_size = max(new_batch_size * 2, 2)
# #new_batch_size = min(model.batch_size * 2, new_batch_size)
# #new_batch_size = min(100, new_batch_size)
# #model.batch_size = new_batch_size
# #print('model.batch_size = %r' % (model.batch_size,))
##ibswgt.tt = ut.tic()
#else:
# ibswgt.tt = None
def _init_components(ibswgt):
""" Defines gui components and inits layout """
# Layout
ibswgt.vlayout = QtWidgets.QVBoxLayout(ibswgt)
ibswgt.setLayout(ibswgt.vlayout)
ibswgt.vsplitter = gt.newSplitter(ibswgt, orientation=Qt.Vertical)
ibswgt.hsplitter = gt.newSplitter(ibswgt, orientation=Qt.Horizontal,
verticalStretch=18)
#
# Tables Tab
ibswgt._tables_tab_widget = APITabWidget(parent=ibswgt, horizontalStretch=81)
for tblname, WidgetClass, ModelClass, ViewClass in ibswgt.modelview_defs:
# Make view first to pass as parent
ibswgt.views[tblname] = ViewClass(parent=ibswgt)
# FIXME: It is very bad to give the model a view. Only the view
# should have a model
ibswgt.models[tblname] = ModelClass(parent=ibswgt.views[tblname])
# Connect models and views
for tblname in ibswgt.super_tblname_list:
ibswgt.views[tblname].setModel(ibswgt.models[tblname])
# Add Image, ANNOTATION, and Names as tabs
for tblname in ibswgt.tblname_list:
ibswgt._tables_tab_widget.addTab(ibswgt.views[tblname], tblname)
# Force full loading
ibswgt.models[IMAGE_TABLE].batch_size = 1000
ibswgt.models[ANNOTATION_TABLE].batch_size = 1000
ibswgt.models[NAMES_TREE].batch_size = 2
# Custom ImageSet Tab Wiget
ibswgt.imageset_tabwgt = ImageSetTabWidget(parent=ibswgt)
ibswgt.vlayout.addWidget(ibswgt.vsplitter)
ibswgt.vsplitter.addWidget(ibswgt.hsplitter)
ibswgt.status_wgt = status_wgt = gt.newWidget(
ibswgt.vsplitter, orientation=Qt.Vertical, spacing=3, margin=0,
name='StatusWidget')
ibswgt.vsplitter.addWidget(status_wgt)
# On the LEFT add the the table of ImageSets
imgset_table_view = ibswgt.views[IMAGESET_TABLE]
imgset_table_view.setSizePolicy(gt.newSizePolicy(hSizePolicy='Expanding',
hStretch=2))
# On the RIGHT add the DataTables and tabs
right_hack_wgt = gt.newWidget()
right_hack_wgt.addWidget(ibswgt.imageset_tabwgt)
right_hack_wgt.addWidget(ibswgt._tables_tab_widget)
right_hack_wgt.setSizePolicy(gt.newSizePolicy(horizontalStretch=5))
# Hack because the tables aren't actually belonging to the tabs
# They are just controlled by the change
ibswgt.hsplitter.addWidget(imgset_table_view)
ibswgt.hsplitter.addWidget(right_hack_wgt)
_NEWLBL = functools.partial(gt.newLabel, ibswgt)
_NEWBUT = functools.partial(gt.newButton, ibswgt)
# _COMBO = functools.partial(gt.newComboBox, ibswgt)
_NEWTEXT = functools.partial(gt.newLineEdit, ibswgt, verticalStretch=1)
primary_fontkw = dict(bold=True, pointSize=11)
secondary_fontkw = dict(bold=False, pointSize=9)
# advanced_fontkw = dict(bold=False, pointSize=8, italic=True)
identify_color = (255, 150, 0)
ibswgt.tablename_to_status_widget_index = {
IMAGESET_TABLE: 1,
IMAGE_TABLE: 3,
IMAGE_GRID: 3,
gh.ANNOTATION_TABLE: 5,
NAMES_TREE: 7,
NAME_TABLE: 7,
}
ibswgt.key_to_objnice = {
IMAGESET_TABLE: 'ImageSet',
IMAGE_TABLE: 'Image',
ANNOTATION_TABLE: 'Annotation',
NAMES_TREE: 'Name',
}
ibswgt.status_widget_list = [
_NEWLBL('Selected ImageSet: ', fontkw=secondary_fontkw, align='right'),
_NEWTEXT(enabled=True, readOnly=True),
##
_NEWLBL('Selected Image: ', fontkw=secondary_fontkw, align='right'),
_NEWTEXT(enabled=True, readOnly=False,
editingFinishedSlot=ibswgt.select_image_text_editing_finished),
##
_NEWLBL('Selected Annotation: ', fontkw=secondary_fontkw, align='right'),
_NEWTEXT(enabled=True, readOnly=False,
editingFinishedSlot=ibswgt.select_annot_text_editing_finished),
##
_NEWLBL('Selected Name: ', fontkw=secondary_fontkw, align='right'),
_NEWTEXT(enabled=True, readOnly=False,
editingFinishedSlot=ibswgt.select_name_text_editing_finished),
]
back = ibswgt.back
ibswgt.batch_intra_occurrence_query_button = _NEWBUT(
'4) ID Encounters',
functools.partial(
back.compute_queries,
daids_mode=const.INTRA_OCCUR_KEY,
query_is_known=None,
use_prioritized_name_subset=False,
cfgdict={'can_match_samename': False, 'use_k_padding': False}
),
bgcolor=color_funcs.adjust_hsv_of_rgb255(identify_color,
-0.01, -0.7, 0.0),
fgcolor=(0, 0, 0),
fontkw=primary_fontkw
)
ibswgt.batch_vsexemplar_query_button = _NEWBUT(
'5) ID Exemplars',
functools.partial(
back.compute_queries,
daids_mode=const.VS_EXEMPLARS_KEY,
use_prioritized_name_subset=True,
query_is_known=None,
cfgdict={'can_match_samename': False, 'use_k_padding': False},
),
bgcolor=color_funcs.adjust_hsv_of_rgb255(identify_color,
-0.02, -0.7, 0.0),
fgcolor=(0, 0, 0),
fontkw=primary_fontkw
)
ibswgt.import_button = _NEWBUT(
'1) Import',
# back.import_images_from_dir,
back.import_button_click,
bgcolor=(235, 200, 200), fontkw=primary_fontkw)
ibswgt.imageset_button = _NEWBUT(
'2) Group',
ibswgt.back.do_group_occurrence_step,
bgcolor=(255, 255, 150), fontkw=primary_fontkw)
ibswgt.detect_button = _NEWBUT(
'3) Detect',
ibswgt.back.run_detection_step,
bgcolor=(150, 255, 150),
fontkw=primary_fontkw
)
ibswgt.reviewed_button = _NEWBUT(
'6) Complete',
ibswgt.back.commit_to_wb_step,
bgcolor=color_funcs.adjust_hsv_of_rgb255((0, 232, 211), 0., -.9, 0.),
fontkw=primary_fontkw,
enabled=True)
ibswgt.control_widget_lists = [
[
ibswgt.import_button,
ibswgt.imageset_button,
_NEWLBL(''),
ibswgt.detect_button,
ibswgt.batch_intra_occurrence_query_button,
ibswgt.batch_vsexemplar_query_button,
ibswgt.reviewed_button,
],
[
_NEWBUT(
'Advanced ID Interface',
back.show_advanced_id_interface,
bgcolor=color_funcs.adjust_hsv_of_rgb255(identify_color),
fgcolor=(0, 0, 0),
fontkw=primary_fontkw
)
]
]
# Other components
# New widget has black magic (for implicit layouts) in it
# Add control widgets (import, group, species selector, etc...)
for count, control_widgets in enumerate(ibswgt.control_widget_lists):
_container = status_wgt.addNewWidget(orientation=Qt.Horizontal,
margin=0,
name='ControlContainer%d' %
(count,))
for widget in control_widgets:
_container.addWidget(widget)
# Output log (turned off by default)
ibswgt.outputLog = gt.newOutputLog(
status_wgt, pointSize=8, visible=WITH_GUILOG,
verticalStretch=6)
status_wgt.addWidget(ibswgt.outputLog)
# Add selected ids status widget
ibswgt.selectionStatusWidget = status_wgt.addNewWidget(orientation=Qt.Horizontal,
margin=3,
name='SelectionStatus')
for widget in ibswgt.status_widget_list:
ibswgt.selectionStatusWidget.addWidget(widget)
# Progress bar is at the bottom
ibswgt.prog_bar = status_wgt.addNewProgressBar(visible=False,
verticalStretch=1,
name='prog_bar')
#ibswgt.vsplitter.print_widget_heirarchy(max_depth=4)
def _connect_signals_and_slots(ibswgt):
if VERBOSE_GUI:
print('[newgui] _connect_signals_and_slots')
for tblname in ibswgt.super_tblname_list:
tblview = ibswgt.views[tblname]
tblview.doubleClicked.connect(ibswgt.on_doubleclick)
tblview.contextMenuClicked.connect(ibswgt.on_contextMenuClicked)
if tblname != gh.IMAGESET_TABLE:
tblview.selectionModel().selectionChanged.connect(ibswgt.update_selection)
#front.printSignal.connect(back.backend_print)
#front.raiseExceptionSignal.connect(back.backend_exception)
# CONNECT HOOK TO GET NUM ROWS
tblview.rows_updated.connect(ibswgt.on_rows_updated)
@slot_(QtCore.QItemSelection, QtCore.QItemSelection)
def update_selection(ibswgt, selected, deselected):
"""
Quirky behavior: if you select two columns in a row and then unselect
only one, the whole row is unselected, because this function only deals
with deltas.
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront()
>>> ibswgt.set_table_tab(gh.NAMES_TREE)
>>> view = ibswgt.views[gh.NAMES_TREE]
>>> view.expandAll()
>>> AUTOSELECT = False
>>> if AUTOSELECT:
... view.selectAll()
>>> selmodel = view.selectionModel()
>>> selected = selmodel.selection()
>>> deselected = QtCore.QItemSelection()
>>> # verify results
>>> print(result)
"""
#print('[ibswgt] update selection')
#print('selected = ' + str(selected.indexes()))
#print('deselected = ' + str(deselected.indexes()))
deselected_model_index_list_ = deselected.indexes()
selected_model_index_list_ = selected.indexes()
def get_selection_info(model_index_list_):
model_index_list = [qtindex for qtindex in model_index_list_ if qtindex.isValid()]
model_list = [qtindex.model() for qtindex in model_index_list]
tablename_list = [model.name for model in model_list]
level_list = [model._get_level(qtindex)
for model, qtindex in zip(model_list, model_index_list)]
rowid_list = [model._get_row_id(qtindex)
for model, qtindex in zip(model_list, model_index_list)]
table_key_list = list(zip(tablename_list, level_list))
return table_key_list, rowid_list
select_table_key_list, select_rowid_list = get_selection_info(
selected_model_index_list_)
deselect_table_key_list, deselect_rowid_list = get_selection_info(
deselected_model_index_list_)
table_key2_selected_rowids = dict(ut.group_items(select_rowid_list,
select_table_key_list))
table_key2_deselected_rowids = dict(ut.group_items(deselect_rowid_list,
deselect_table_key_list))
table_key2_selected_rowids = {
key: list(set(val))
for key, val in six.iteritems(table_key2_selected_rowids)}
table_key2_deselected_rowids = {
key: list(set(val))
for key, val in six.iteritems(table_key2_deselected_rowids)}
if ut.VERBOSE:
print('table_key2_selected_rowids = ' + ut.repr2(table_key2_selected_rowids))
print('table_key2_deselected_rowids = ' + ut.repr2(table_key2_deselected_rowids))
gh_const_tablename_map = {
(IMAGE_TABLE, 0) : const.IMAGE_TABLE,
(IMAGE_GRID, 0) : const.IMAGE_TABLE,
(gh.ANNOTATION_TABLE, 0) : const.ANNOTATION_TABLE,
(NAME_TABLE, 0) : const.NAME_TABLE,
(NAMES_TREE, 0) : const.NAME_TABLE,
(NAMES_TREE, 1) : const.ANNOTATION_TABLE,
}
# here tablename is a backend const tablename
for table_key, id_list in six.iteritems(table_key2_deselected_rowids):
tablename = gh_const_tablename_map[table_key]
ibswgt.back._set_selection3(tablename, id_list, mode='diff')
for table_key, id_list in six.iteritems(table_key2_selected_rowids):
tablename = gh_const_tablename_map[table_key]
ibswgt.back._set_selection3(tablename, id_list, mode='add')
ibswgt.back.update_selection_texts()
#tblview.selectionModel().selectedIndexes()
def changing_models_gen(ibswgt, tblnames=None):
"""
Loops over tablenames emitting layoutChanged at the end for each
"""
tblnames = ibswgt.super_tblname_list if tblnames is None else tblnames
if VERBOSE_GUI:
print('[newgui] changing_models_gen(tblnames=%r)' % (tblnames,))
model_list = [ibswgt.models[tblname] for tblname in tblnames]
#model_list = [ibswgt.models[tblname] for tblname in tblnames if
#ibswgt.views[tblname].isVisible()]
with ChangeLayoutContext(model_list):
for tblname in tblnames:
yield tblname
def update_tables(ibswgt, tblnames=None, clear_view_selection=True):
""" forces changing models """
print('[newgui] update_tables(%r)' % (tblnames,))
hack_selections = []
#print('[new_gui.UPDATE_TABLES]')
for tblname in ibswgt.changing_models_gen(tblnames=tblnames):
#print('[new_gui.update_tables] tblname=%r' % (tblname, ))
model = ibswgt.models[tblname]
view = ibswgt.views[tblname]
if clear_view_selection:
hack_selections.append(view.clearSelection)
model._update()
# Hack: Call this outside changing models gen
for clearSelection in hack_selections:
clearSelection()
def connect_ibeis_control(ibswgt, ibs):
""" Connects a new ibscontroler to the models """
if VERBOSE_GUI:
print('[newgui] connect_ibeis_control(ibs=%r)' % (ibs,))
ibswgt.imageset_tabwgt._close_all_tabs()
if ibs is None:
if VERBOSE_GUI:
print('[newgui] invalid ibs')
title = 'No Database Opened'
ibswgt.setWindowTitle(title)
else:
if VERBOSE_GUI:
print('[newgui] Connecting valid ibs=%r' % ibs.get_dbname())
#with ut.Indenter('[CONNECTING]'):
# Give the frontend the new control
ibswgt.ibs = ibs
if not ut.get_argflag('--fast'):
with ut.Timer('update special', verbose=VERBOSE_GUI):
if not ibs.readonly:
ibs.update_special_imagesets()
else:
if VERBOSE_GUI:
print('Skipping special imagesets')
# Update the api models to use the new control
with ut.Timer('make headers', verbose=VERBOSE_GUI):
header_dict, declare_tup = gh.make_ibeis_headers_dict(ibswgt.ibs)
ibswgt.declare_tup = declare_tup
title = ibsfuncs.get_title(ibswgt.ibs)
ibswgt.setWindowTitle(title)
if ut.VERBOSE:
print('[newgui] Calling model _update_headers')
#block_wgt_flag = ibswgt._tables_tab_widget.blockSignals(True)
with ut.Timer('[newgui] update models', verbose=VERBOSE_GUI):
#for tblname in ibswgt.changing_models_gen(ibswgt.super_tblname_list):
for tblname in ibswgt.super_tblname_list:
model = ibswgt.models[tblname]
view = ibswgt.views[tblname]
#if not view.isVisible():
# print(view)
#ut.embed()
header = header_dict[tblname]
#widget = ibswgt.widgets[tblname]
#widget.change_headers(header)
# NOT SURE IF THESE BLOCKERS SHOULD BE COMMENTED
#block_model_flag = model.blockSignals(True)
model._update_headers(**header)
view._update_headers(**header) # should use model headers
#model.blockSignals(block_model_flag)
#
#view.infer_delegates_from_model()
for tblname in ibswgt.super_tblname_list:
view = ibswgt.views[tblname]
#if not view.isVisible():
# print(view)
# continue
view.hide_cols()
#ibswgt._tables_tab_widget.blockSignals(block_wgt_flag)
# Update species with ones enabled in database
if not ibs.readonly:
ibswgt.update_species_available()
# FIXME: bad code
# TODO: load previously loaded imageset or nothing
LOAD_IMAGESET_ON_START = not ut.get_argflag('--fast')
if LOAD_IMAGESET_ON_START:
imgsetid_list = ibs.get_valid_imgsetids(shipped=False)
if len(imgsetid_list) > 0:
DEFAULT_LARGEST_IMAGESET = False
if DEFAULT_LARGEST_IMAGESET:
numImg_list = ibs.get_imageset_num_gids(imgsetid_list)
argx = ut.list_argsort(numImg_list, reverse=True)[0]
imgsetid = imgsetid_list[argx]
else: # Grab "first" imageset
imgsetid = imgsetid_list[0]
#ibswgt._change_imageset(imgsetid)
ibswgt.select_imageset_tab(imgsetid)
else:
ibswgt._change_imageset(-1)
def update_species_available(ibswgt, reselect=False,
reselect_new_name=None, deleting=False):
ibs = ibswgt.ibs
# TODO: update these options depending on ibs.get_species_with_detectors
# when a controller is attached to the gui
detection_combo_box_options = [
# Text # Value
#('Select Species', 'none'),
('Select Species', const.UNKNOWN),
('Unknown', const.UNKNOWN),
#'none'),
] + sorted(list(ibs.get_working_species()))
species_text = ibswgt.back.get_selected_species()
reselect_index = None
if not deleting and reselect_new_name is None and species_text is not None:
species_rowid = ibs.get_species_rowids_from_text(species_text)
reselect_new_name = ibs.get_species_nice(species_rowid)
if VERBOSE_GUI:
print('[front] Reselecting old selection: %r' % (reselect_new_name, ))
nice_name_list = [ str(_[0]) for _ in detection_combo_box_options ]
if reselect_new_name in nice_name_list:
reselect_index = nice_name_list.index(reselect_new_name)
if VERBOSE_GUI:
print('[front] Reselecting renamed selection: %r' % (reselect_new_name, ))
if VERBOSE_GUI:
print('[front] Reselecting index: %r' % (reselect_index, ))
# ibswgt.species_combo.setOptions(detection_combo_box_options)
# ibswgt.species_combo.updateOptions(reselect=reselect, reselect_index=reselect_index)
def setWindowTitle(ibswgt, title):
parent_ = ibswgt.parent()
if parent_ is not None:
parent_.setWindowTitle(title)
else:
IBEIS_WIDGET_BASE.setWindowTitle(ibswgt, title)
def _change_imageset(ibswgt, imgsetid):
if VERBOSE_GUI:
print('[newgui] _change_imageset(imgsetid=%r, uuid=%r)' %
(imgsetid, ibswgt.back.ibs.get_imageset_uuid(imgsetid)))
for tblname in ibswgt.tblname_list:
view = ibswgt.views[tblname]
view.clearSelection()
for tblname in ibswgt.changing_models_gen(tblnames=ibswgt.tblname_list):
view = ibswgt.views[tblname]
view._change_imageset(imgsetid)
try:
ibswgt.back.select_imgsetid(imgsetid)
except Exception as ex:
ut.printex(ex, iswarning=True)
ibswgt.set_table_tab(IMAGE_TABLE)
def _update_imageset_tab_name(ibswgt, imgsetid, imagesettext):
ibswgt.imageset_tabwgt._update_imageset_tab_name(imgsetid, imagesettext)
#------------
# SLOT HELPERS
#------------
def get_table_tab_index(ibswgt, tblname):
view = ibswgt.views[tblname]
index = ibswgt._tables_tab_widget.indexOf(view)
return index
def set_selection_status(ibswgt, key, ids):
text = repr(ids)
index = ibswgt.tablename_to_status_widget_index[key]
if len(ids) <= 1:
text2 = 'Selected %s:' % (ibswgt.key_to_objnice[key],)
else:
text2 = 'Selected %s %s:' % (
len(ids), ut.pluralize(ibswgt.key_to_objnice[key], len(ids)),)
ibswgt.status_widget_list[index - 1].setText(text2)
ibswgt.status_widget_list[index].setText(text)
def set_table_tab(ibswgt, tblname):
"""
Programmatically change to the table-tab to either:
Image, ImageGrid, Annotation, or Names table/tree
Example:
>>> # GUI_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront()
>>> ibswgt.set_table_tab(gh.ANNOTATION_TABLE)
"""
if VERBOSE_GUI:
print('[newgui] set_table_tab: %r ' % (tblname,))
with ut.Timer('set table tab', verbose=VERBOSE_GUI):
index = ibswgt.get_table_tab_index(tblname)
ibswgt._tables_tab_widget.setCurrentIndex(index)
def select_imageset_tab(ibswgt, imgsetid):
if False:
prefix = ut.get_caller_name(range(0, 10))
prefix = prefix.replace('[wrp_noexectb]', 'w')
prefix = prefix.replace('[slot_wrapper]', 's')
prefix = prefix.replace('[X]', 'x')
else:
prefix = ''
if VERBOSE_GUI:
print(prefix + '[newgui] select_imageset_tab imgsetid=%r' % (imgsetid,))
if isinstance(imgsetid, six.string_types):
# Hack
imagesettext = imgsetid
imgsetid = ibswgt.ibs.get_imageset_imgsetids_from_text(imagesettext)
else:
imagesettext = ibswgt.ibs.get_imageset_text(imgsetid)
#ibswgt.back.select_imgsetid(imgsetid)
ibswgt.imageset_tabwgt._add_imageset_tab(imgsetid, imagesettext)
def spawn_edit_image_annotation_interaction_from_aid(ibswgt, aid, imgsetid,
model=None,
qtindex=None):
"""
hack for letting annots spawn image editing
CommandLine:
python -m ibeis.gui.newgui spawn_edit_image_annotation_interaction_from_aid --show
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> import ibeis
>>> main_locals = ibeis.main(defaultdb='testdb1')
>>> ibs, back = ut.dict_take(main_locals, ['ibs', 'back'])
>>> ibswgt = back.ibswgt # NOQA
>>> aid = 4
>>> imgsetid = 1
>>> ibswgt.spawn_edit_image_annotation_interaction_from_aid(aid, imgsetid)
>>> if ut.show_was_requested():
>>> gt.qtapp_loop(qwin=ibswgt)
"""
gid = ibswgt.back.ibs.get_annot_gids(aid)
if model is None:
view = ibswgt.views[IMAGE_TABLE]
model = view.model()
qtindex, row = view.get_row_and_qtindex_from_id(gid)
ibswgt.spawn_edit_image_annotation_interaction(model, qtindex, gid, imgsetid)
def spawn_edit_image_annotation_interaction(ibswgt, model, qtindex, gid, imgsetid):
"""
TODO: needs reimplement using more standard interaction methods
"""
print('[newgui] Creating new annotation interaction: gid=%r' % (gid,))
ibs = ibswgt.ibs
# Select gid
ibswgt.back.select_gid(gid, imgsetid, show=False)
# Interact with gid
nextcb, prevcb, current_gid = ibswgt._interactannot2_callbacks(model, qtindex)
iannot2_kw = {
'rows_updated_callback': ibswgt.update_tables,
'next_callback': nextcb,
'prev_callback': prevcb,
}
assert current_gid == gid, 'problem in next/prev updater'
ibswgt.annot_interact = interact_annotations2.ANNOTATION_Interaction2(
ibs, gid, **iannot2_kw)
# hacky GID_PROG: TODO: FIX WITH OTHER HACKS OF THIS TYPE
# FIXME; this should depend on the model.
#_, row = model.view.get_row_and_qtindex_from_id(gid)
#pt.set_figtitle('%d/%d' % (row + 1, model.rowCount()))
level_num_rows = model._get_level_row_count(qtindex)
level_row = model._get_level_row_index(qtindex)
pt.set_figtitle('%d/%d' % (level_row + 1, level_num_rows))
def make_adjacent_qtindex_callbacks(ibswgt, model, qtindex):
r"""
Returns:
tuple: (current_rowid, next_callback, prev_callback)
CommandLine:
python -m ibeis.gui.newgui --test-make_adjacent_qtindex_callbacks
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront()
>>> gid = ibs.get_valid_gids()[0]
>>> model = ibswgt.models[gh.IMAGE_TABLE]
>>> qtindex, row = model.get_row_and_qtindex_from_id(gid)
>>> tup = ibswgt.make_adjacent_qtindex_callbacks(model, qtindex)
>>> (current_rowid, next_callback, prev_callback) = tup
>>> assert prev_callback is None, 'should not be a previous image id'
>>> current_rowid1, next_callback1, prev_callback1 = next_callback()
>>> assert next_callback() is None, 'race condition not prevented'
>>> current_rowid2, next_callback2, prev_callback2 = next_callback1()
>>> testdata_main_loop(globals(), locals())
"""
current_rowid = model._get_row_id(qtindex)
next_qtindex = model._get_adjacent_qtindex(qtindex, 1)
prev_qtindex = model._get_adjacent_qtindex(qtindex, -1)
next_callback = None
prev_callback = None
numclicks = [0] # semephore, invalidates both functions after one call
if next_qtindex is not None and next_qtindex.isValid():
def next_callback():
if numclicks[0] != 0:
print('race condition in next_callback %d ' % numclicks[0])
return
numclicks[0] += 1
return ibswgt.make_adjacent_qtindex_callbacks(model, next_qtindex)
if prev_qtindex is not None and prev_qtindex.isValid():
def prev_callback():
if numclicks[0] != 0:
print('race condition in next_callback %d ' % numclicks[0])
return
numclicks[0] += 1
return ibswgt.make_adjacent_qtindex_callbacks(model, next_qtindex)
return current_rowid, next_callback, prev_callback
def _interactannot2_callbacks(ibswgt, model, qtindex):
"""
callbacks for the edit image annotation (from image table) interaction
python -m ibeis --db lynx --imgsetid 2
TODO: needs reimplement
"""
#if not qtindex.isValid():
# raise AssertionError('Bug: qtindex got invalidated')
# # BUG: somewhere qtindex gets invalidated
# #return None, None, -1
# HACK FOR NEXT AND PREVIOUS CLICK CALLBACKS
#print('model.name = %r' % (model.name,))
if model.name == gh.IMAGE_TABLE:
cur_gid = model._get_row_id(qtindex)
# elif model.name == gh.IMAGE_GRID:
# cur_gid = model._get_row_id(qtindex)
elif model.name == gh.NAMES_TREE:
cur_level = model._get_level(qtindex)
if cur_level == 1:
cur_aid = model._get_row_id(qtindex)
cur_gid = ibswgt.ibs.get_annot_gids(cur_aid)
else:
raise NotImplementedError('Unknown model.name=%r, cur_level=%r' % (model.name, cur_level))
else:
print('gh.IMAGE_TABLE = %r' % (gh.IMAGE_TABLE,))
raise NotImplementedError('Unknown model.name =%r' % (model.name,))
next_qtindex = model._get_adjacent_qtindex(qtindex, 1)
prev_qtindex = model._get_adjacent_qtindex(qtindex, -1)
numclicks = [0] # semephore
def make_qtindex_callback(qtindex_, type_='nextprev'):
def _qtindex_callback():
if numclicks[0] != 0:
print('race condition in %s_callback %d ' % (type_, numclicks[0]))
return
numclicks[0] += 1
# call this function again with next index
nextcb, prevcb, new_gid1 = ibswgt._interactannot2_callbacks(model, qtindex_)
print('[newgui] %s_callback: new_gid1=%r' % (type_, new_gid1))
ibswgt.annot_interact.update_image_and_callbacks(
new_gid1, nextcb, prevcb, do_save=True)
# hacky GID_PROG: TODO: FIX WITH OTHER HACKS OF THIS TYPE
#_, row = model.view.get_row_and_qtindex_from_id(new_gid1)
#pt.set_figtitle('%d/%d' % (row + 1, model.rowCount()))
level_num_rows = model._get_level_row_count(qtindex_)
level_row = model._get_level_row_index(qtindex_)
pt.set_figtitle('%d/%d' % (level_row + 1, level_num_rows))
return _qtindex_callback
if next_qtindex is not None and next_qtindex.isValid():
next_callback = make_qtindex_callback(next_qtindex, 'next')
else:
next_callback = None
if prev_qtindex is not None and prev_qtindex.isValid():
prev_callback = make_qtindex_callback(prev_qtindex, 'prev')
else:
prev_callback = None
return next_callback, prev_callback, cur_gid
#------------
# SLOTS
#------------
@slot_(str)
def select_annot_text_editing_finished(ibswgt):
tablename = gh.ANNOTATION_TABLE
index = ibswgt.tablename_to_status_widget_index[tablename]
text = ibswgt.status_widget_list[index].text()
ibswgt.select_table_indicies_from_text(tablename, text)
@slot_(str)
def select_name_text_editing_finished(ibswgt):
tablename = gh.NAMES_TREE
index = ibswgt.tablename_to_status_widget_index[tablename]
text = ibswgt.status_widget_list[index].text()
ibswgt.select_table_indicies_from_text(tablename, text)
@slot_(str)
def select_image_text_editing_finished(ibswgt):
tablename = gh.IMAGE_TABLE
index = ibswgt.tablename_to_status_widget_index[tablename]
text = ibswgt.status_widget_list[index].text()
ibswgt.select_table_indicies_from_text(tablename, text)
def select_table_indicies_from_text(ibswgt, tblname, text, allow_table_change=False):
"""
Args:
tblname - tablename of the id to parse from text
Ignore:
text = '[1, 2, 3,]'
text = '51e10019-968b-5f2e-2287-8432464d7547 '
Example:
>>> # GUI_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront()
>>> ibswgt.set_table_tab(gh.ANNOTATION_TABLE)
>>> tblname = gh.NAMES_TREE
>>> text = 'lena'
>>> ibswgt.select_table_indicies_from_text(tblname, text)
"""
if not ut.QUIET:
print('[newgui] select_table_indicies_from_text')
print('[newgui] * gh.tblname = %r' % (tblname,))
print('[newgui] * text = %r' % (text,))
to_backend_tablename = {
gh.ANNOTATION_TABLE : const.ANNOTATION_TABLE,
gh.NAMES_TREE : const.NAME_TABLE,
gh.IMAGE_TABLE : const.IMAGE_TABLE,
}
backend_tablename = to_backend_tablename[tblname]
if not ut.QUIET:
print('[newgui] * backend_tablename = %r' % (backend_tablename,))
if text == '':
text = '[]'
try:
#MODE1 = True
#if MODE1:
id_list_ = text.lstrip('[').rstrip(']').split(',')
id_list = [id_.strip() for id_ in id_list_]
id_list = [id_ for id_ in id_list if len(id_) > 0]
try:
id_list = list(map(int, id_list))
except ValueError:
import uuid
try:
# First check to see if the text is a UUID
id_list = list(map(uuid.UUID, id_list))
except ValueError:
if tblname != gh.NAMES_TREE:
raise
else:
# then maybe it was a name that was selected
id_list = list(map(str, id_list))
#else:
# id_list_ = eval(text, globals(), locals())
# id_list = ut.ensure_iterable(id_list_) # NOQA
except Exception as ex:
ut.printex(ex, iswarning=True, keys=['text'])
else:
if not ut.QUIET:
print('[newgui] * id_list = %r' % (id_list,))
#print(id_list)
id_list = ibswgt.back._set_selection3(backend_tablename, id_list, mode='set')
# Select the index if we are in the right table tab
if len(id_list) == 1 and (
allow_table_change or ibswgt._tables_tab_widget.current_tblname == tblname):
if not ut.QUIET:
print('[newgui] * attempting to select from rowid')
#view = ibswgt.views[tblname]
#view.select_row_from_id(id_list[0])
ibswgt.goto_table_id(tblname, id_list[0])
else:
# TODO: convert the id into the ids corresponding with this tablename and move
# to the first one
pass
ibswgt.back.update_selection_texts()
#pass
@slot_(str, int)
def on_rows_updated(ibswgt, tblname, nRows):
"""
When the rows are updated change the tab names
"""
if VERBOSE_GUI:
print('[newgui] on_rows_updated: tblname=%12r nRows=%r ' % (tblname, nRows))
if tblname == IMAGESET_TABLE: # Hack
#print('... tblname == IMAGESET_TABLE, ...hack return')
return
tblname = str(tblname)
TABLE_NICE = ibswgt.declare_tup[1] # hack
tblnice = TABLE_NICE[tblname]
index = ibswgt.get_table_tab_index(tblname)
text = tblnice + ' ' + str(nRows)
# CHANGE TAB NAME TO SHOW NUMBER OF ROWS
ibswgt._tables_tab_widget.setTabText(index, text)
def goto_table_id(ibswgt, tablename, _id):
print('[newgui] goto_table_id(tablenamd=%r, _id=%r)' % (tablename, _id))
ibswgt.set_table_tab(tablename)
view = ibswgt.views[tablename]
view.select_row_from_id(_id, scroll=True)
@slot_(QtCore.QModelIndex, QtCore.QPoint)
def on_contextMenuClicked(ibswgt, qtindex, pos):
"""
Right click anywhere in the GUI
Context menus on right click of a table
CommandLine:
python -m ibeis --db WS_ALL --imgsetid 2 --select-name=A-003
"""
if not qtindex.isValid():
return
model = qtindex.model()
tblview = ibswgt.views[model.name]
context_options = []
qtindex_list = tblview.selectedIndexes()
id_list = [model._get_row_id(_qtindex) for _qtindex in qtindex_list]
level_list = [model._get_level(_qtindex) for _qtindex in qtindex_list]
level2_ids_ = ut.group_items(id_list, level_list)
level2_ids = {level: ut.unique_ordered(ids)
for level, ids in six.iteritems(level2_ids_)}
ibs = ibswgt.back.ibs
back = ibswgt.back
def build_annot_context_options(ibswgt, ibs, aid_list, imgsetid, **kwargs):
context_options = []
if len(aid_list) == 1:
aid = aid_list[0]
if kwargs.get('goto_image', True):
context_options += [
('Go to image',
lambda: ibswgt.goto_table_id(IMAGE_TABLE,
ibswgt.back.ibs.get_annot_gids(aid)),)
]
if kwargs.get('goto_annot', True):
context_options += [
('Go to annot',
lambda: ibswgt.goto_table_id(gh.ANNOTATION_TABLE, aid))
]
if kwargs.get('goto_name', True):
context_options += [
('Go to name',
lambda: ibswgt.goto_table_id(NAMES_TREE,
ibswgt.back.ibs.get_annot_nids(aid))),
]
if kwargs.get('canedit', True):
context_options += [
('Edit Annotation in Image',
lambda: ibswgt.spawn_edit_image_annotation_interaction_from_aid(aid, imgsetid)),
]
context_options += [
('----', lambda: None),
('View annotation in Web',
#lambda: ibswgt.back.select_aid(aid, imgsetid, show=True)),
lambda: ibswgt.back.show_annotation(aid, web=True)),
('View image in Web',
lambda: ibswgt.back.select_gid_from_aid(aid, imgsetid, show=True, web=True)),
('----', lambda: None),
('Remove annotation\'s name',
lambda: ibswgt.back.unset_names([aid])),
('Delete annotation',
lambda: ibswgt.back.delete_annot(aid_list)),
('----', lambda: None),
]
from ibeis.viz.interact import interact_chip
from ibeis import viz
context_options += interact_chip.build_annot_context_options(
ibswgt.back.ibs, aid, refresh_func=viz.draw,
with_interact_image=False)
else:
context_options += [
('View annotations in Web',
lambda: ibswgt.back.show_aid_list_in_web(aid_list)),
('Unset annotations\' names', lambda: ibswgt.back.unset_names(aid_list)),
('Delete annotations', lambda: ibswgt.back.delete_annot(aid_list)),
]
return context_options
def name_context_options(ibswgt, ibs, nid_list, aid_list, imgsetid, **kwargs):
context_options = []
if len(aid_list) == 1:
aid = aid_list[0]
context_options += build_annot_context_options(ibswgt, ibs, [aid], imgsetid)
if len(aid_list) > 0:
def set_annot_names_to_same_new_name(ibswgt, aid_list):
ibswgt.back.ibs.set_annot_names_to_same_new_name(aid_list)
ibswgt.update_tables(tblnames=[gh.NAMES_TREE])
context_options += [
('Rename annots (%s) to new name' % ut.list_str_summarized(
aid_list, 'aid_list'),
lambda: set_annot_names_to_same_new_name(ibswgt, aid_list)),
]
if len(nid_list) > 0:
def run_splits(ibs, nid_list):
print('Checking for splits')
aids_list = ibs.get_name_aids(nid_list)
aid_list = sorted(list(set(ut.flatten(aids_list))))
back.run_annot_splits(aid_list)
def export_nids(ibs, nid_list):
from ibeis.dbio import export_subset
if not back.are_you_sure('Confirm export of nid_list=%r' % (nid_list,)):
return
export_subset.export_names(ibs, nid_list)
def create_new_imageset_from_names_(ibs, nid_list):
ibs.create_new_imageset_from_names(nid_list)
ibswgt.update_tables([gh.IMAGESET_TABLE], clear_view_selection=False)
context_options += [
('View name(s) in Web', lambda: ibswgt.back.show_nid_list_in_web(nid_list)),
('----', lambda: None),
('Check for splits', lambda: run_splits(ibs, nid_list)),
('Export names', lambda: export_nids(ibs, nid_list)),
('Create ImageSet From Name(s)',
lambda: create_new_imageset_from_names_(ibs, nid_list)),
]
from ibeis.viz.interact import interact_name
context_options += interact_name.build_name_context_options(
ibswgt.back.ibs, nid_list)
elif len(nid_list) == 0:
#from ibeis.viz.interact import interact_name
#context_options += interact_name.build_name_context_options(
# ibswgt.back.ibs, nid_list)
#print('nutin')
pass
return context_options
def build_image_context_options(ibswgt, ibs, gid_list, imgsetid, **kwargs):
current_imagesettext = ibswgt.back.ibs.get_imageset_text(imgsetid)
context_options = []
# Conditional context menu
context_options = [
('Edit image ' + ut.pluralize('time', len(gid_list)),
lambda: ibswgt.edit_image_time([gid]))
]
if len(gid_list) == 1:
gid = gid_list[0]
imgsetid = model.imgsetid
aid_list = ibs.get_image_aids(gid)
annot_options = [
('Options aid=%r' % (aid,),
build_annot_context_options(ibswgt, ibs, [aid], imgsetid, goto_image=False))
for aid in aid_list
]
if len(aid_list) == 1:
annot_option_item = (
'Annot Options (aid=%r)' % (aid_list[0],),
annot_options[0][1]
)
else:
annot_option_item = ('Annot Options', annot_options)
if kwargs.get('goto_image_in_imgtbl', False):
context_options += [
('Go to image in Images Table',
lambda: ibswgt.goto_table_id(IMAGE_TABLE, gid)),
]
context_options += [
('View image in Matplotlib',
lambda: ibswgt.back.select_gid(gid, imgsetid, show=True, web=False)),
('View image in Web',
lambda: ibswgt.back.select_gid(gid, imgsetid, show=True, web=True)),
('View detection image (Hough) [dev]',
lambda: ibswgt.back.show_hough_image(gid)),
annot_option_item,
#('View annotation in Matplotlib:',
# view_aid_options1),
#('View annotation in Web:',
# view_aid_options2),
('Add annotation from entire image',
lambda: ibswgt.back.add_annotation_from_image([gid])),
('Run detection on image (can cause duplicates)',
lambda: ibswgt.back.run_detection_on_images([gid])),
]
else:
context_options += [
('View images in Web',
lambda: ibswgt.back.show_gid_list_in_web(gid_list)),
('----', lambda: None),
('Add annotation from entire images',
lambda: ibswgt.back.add_annotation_from_image(gid_list)),
('Run detection on images (can cause duplicates)',
lambda: ibswgt.back.run_detection_on_images(gid_list)),
]
# Special condition for imagesets
if current_imagesettext != const.NEW_IMAGESET_IMAGESETTEXT:
context_options += [
('----', lambda: None),
('Move to new imageset',
lambda: ibswgt.back.send_to_new_imageset(gid_list, mode='move')),
('Copy to new imageset',
lambda: ibswgt.back.send_to_new_imageset(gid_list, mode='copy')),
]
if current_imagesettext != const.UNGROUPED_IMAGES_IMAGESETTEXT:
context_options += [
('----', lambda: None),
('Remove from imageset',
lambda: ibswgt.back.remove_from_imageset(gid_list)),
]
# Continue the conditional context menu
if len(gid_list) == 1:
# We get gid from above
context_options += [
('----', lambda: None),
('Delete image\'s annotations',
lambda: ibswgt.back.delete_image_annotations([gid])),
('Delete image',
lambda: ibswgt.back.delete_image(gid)),
]
else:
context_options += [
('----', lambda: None),
('Delete images\' annotations',
lambda: ibswgt.back.delete_image_annotations(gid_list)),
('Delete images',
lambda: ibswgt.back.delete_image(gid_list)),
]
return context_options
# ---- IMAGESET CONTEXT ----
if model.name == IMAGESET_TABLE:
# This is for the benefit of merge imagesets
merge_destination_id = model._get_row_id(qtindex)
imagesettext = ibswgt.back.ibs.get_imageset_text(merge_destination_id)
imgsetid_list = level2_ids[0]
# Conditional context menu
# TODO: remove duplicate code
if len(imgsetid_list) == 1:
context_options += [
('View imageset in Web', lambda: ibswgt.back.show_imgsetid_list_in_web(imgsetid_list)),
('Turk imageset\'s detections in Web', lambda: ibswgt.back.show_imgsetid_detection_turk_in_web(imgsetid_list)),
('Turk imageset\'s annotations in Web', lambda: ibswgt.back.show_imgsetid_annotation_turk_in_web(imgsetid_list)),
('----', lambda: None),
('Run detection on imageset (can cause duplicates)',
lambda: ibswgt.back.run_detection_on_imageset(imgsetid_list)),
('Merge %d imageset into %s' % (len(imgsetid_list), (imagesettext)),
lambda: ibswgt.back.merge_imagesets(imgsetid_list, merge_destination_id)),
('Copy imageset', lambda: ibswgt.back.copy_imageset(imgsetid_list)),
('Export imageset', lambda: ibswgt.back.export_imagesets(imgsetid_list)),
('----', lambda: None),
('Delete imageset', lambda: ibswgt.back.delete_imageset(imgsetid_list)),
('----', lambda: None),
('Delete imageset AND images',
lambda: ibswgt.back.delete_imageset_and_images(imgsetid_list)),
]
else:
context_options += [
('Run detection on imagesets (can cause duplicates)',
lambda: ibswgt.back.run_detection_on_imageset(imgsetid_list)),
('Copy imageset', lambda: ibswgt.back.copy_imageset(imgsetid_list)),
('Merge %d imagesets into %s' % (len(imgsetid_list), (imagesettext)),
lambda: ibswgt.back.merge_imagesets(imgsetid_list, merge_destination_id)),
('----', lambda: None),
('Delete imagesets', lambda: ibswgt.back.delete_imageset(imgsetid_list)),
('----', lambda: None),
('Delete imagesets AND images',
lambda: ibswgt.back.delete_imageset_and_images(imgsetid_list)),
# ('export imagesets', lambda: ibswgt.back.export_imagesets(imgsetid_list)),
]
# ---- IMAGE CONTEXT ----
elif model.name == IMAGE_TABLE:
gid_list = level2_ids[0]
imgsetid = ibswgt.back.get_selected_imgsetid()
context_options += build_image_context_options(ibswgt, ibs,
gid_list, imgsetid)
# ---- IMAGE GRID CONTEXT ----
elif model.name == IMAGE_GRID:
gid_list = level2_ids[0]
imgsetid = ibswgt.back.get_selected_imgsetid()
context_options += build_image_context_options(ibswgt, ibs,
gid_list, imgsetid,
goto_image_in_imgtbl=True)
# ---- ANNOTATION CONTEXT ----
elif model.name == gh.ANNOTATION_TABLE:
aid_list = level2_ids[0]
# Conditional context menu
# TODO: UNIFY COMMMON CONTEXT MENUS
context_options += build_annot_context_options(
ibswgt, ibs, aid_list, model.imgsetid, goto_annot=False)
# ---- NAMES TREE CONTEXT ----
elif model.name == NAMES_TREE:
# TODO: map level list to tablename more reliably
ut.print_dict(level2_ids)
nid_list = level2_ids.get(0, [])
aid_list = level2_ids.get(1, [])
if len(aid_list) > 0 and len(nid_list) > 0:
# two types of indices are selected, just return
# fixme to do something useful
print('multiple types of indicies selected')
return
else:
imgsetid = model.imgsetid
context_options += name_context_options(ibswgt, ibs, nid_list, aid_list, imgsetid)
# Show the context menu
#ut.print_list(context_options, nl=2)
if len(context_options) > 0:
gt.popup_menu(tblview, pos, context_options)
@slot_(QtCore.QModelIndex)
def on_doubleclick(ibswgt, qtindex):
"""
Double clicking anywhere in the GUI
CommandLine:
python -m ibeis --db lynx --imgsetid 2 --select-name=goku
"""
print('\n+--- DOUBLE CLICK ---')
if not qtindex.isValid():
print('[doubleclick] invalid qtindex')
return
model = qtindex.model()
id_ = model._get_row_id(qtindex)
if model.name == IMAGESET_TABLE:
imgsetid = id_
ibswgt.select_imageset_tab(imgsetid)
else:
imgsetid = model.imgsetid
if (model.name == IMAGE_TABLE) or (model.name == IMAGE_GRID):
gid = id_
ibswgt.spawn_edit_image_annotation_interaction(model, qtindex, gid, imgsetid)
elif model.name == gh.ANNOTATION_TABLE:
aid = id_
ibswgt.back.select_aid(aid, imgsetid)
elif model.name == NAME_TABLE:
nid = id_
ibswgt.back.select_nid(nid, imgsetid)
elif model.name == NAMES_TREE:
level = model._get_level(qtindex)
if level == 0:
nid = id_
ibswgt.back.select_nid(nid, imgsetid, show=True)
elif level == 1:
aid = id_
ibswgt.spawn_edit_image_annotation_interaction_from_aid(aid, imgsetid, model, qtindex)
#ibswgt.back.select_aid(aid, imgsetid, show=True)
# @slot_(list)
def imagesDropped(ibswgt, url_list):
r"""
image drag and drop event
CommandLine:
python -m ibeis.gui.newgui imagesDropped --show
Example:
>>> # GUI_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront('hstest')
>>> url_list = ['images.foo']
>>> url_list = [ut.truepath('~/Downloads/hs-images.zip')]
>>> url = url_list[0]
>>> ut.quit_if_noshow()
>>> ibswgt.imagesDropped(url_list)
>>> testdata_main_loop(globals(), locals())
"""
print('[drop_event] url_list=%r' % (url_list,))
has_zipext = ut.partial(ut.fpath_has_ext, exts=['.zip'])
gpath_list = list(filter(ut.fpath_has_imgext, url_list))
dir_list = list(filter(isdir, url_list))
zipfile_list = list(filter(has_zipext, url_list))
old = False
if old:
if len(dir_list) > 0:
options = ['No', 'Yes']
title = 'Non-Images dropped'
msg = 'Recursively import from directories?'
ans = gt.user_option(ibswgt, msg=msg, title=title,
options=options)
if ans == 'Yes':
unflat_gpaths = [ut.list_images(dir_, fullpath=True, recursive=True)
for dir_ in dir_list]
flat_gpaths = ut.flatten(unflat_gpaths)
flat_unix_gpaths = list(map(ut.unixpath, flat_gpaths))
gpath_list.extend(flat_unix_gpaths)
else:
return
print('[drop_event] gpath_list=%r' % (gpath_list,))
if len(gpath_list) > 0:
ibswgt.back.import_images_from_file(gpath_list=gpath_list)
else:
from ibeis.dbio import ingest_database
ibs = ibswgt.back.ibs
ingestable = ingest_database.Ingestable2(
ibs.get_dbdir(), gpath_list, dir_list, zipfile_list)
num_gpaths = len(ingestable.imgpath_list)
num_dpaths = len(ingestable.imgdir_list)
num_zips = len(ingestable.zipfile_list)
confirm_list = []
if num_gpaths > 0:
confirm_list += [ut.quantstr('image file', num_gpaths)]
if num_dpaths > 0:
confirm_list += ['recursively from ' + ut.quantstr('directory', num_dpaths, 's')]
if num_zips > 0:
confirm_list += [ut.quantstr('zip file', num_zips, 's')]
confirm_msg = 'Import from: ' + ut.conj_phrase(confirm_list, 'and') + '.'
# gt.rrrr()
config = ingestable.ingest_config
# cfg = config
dlg = gt.ConfigConfirmWidget.as_dialog(ibswgt,
title='Confirm Import Images',
msg=confirm_msg,
config=config)
dlg.resize(700, 500)
self = dlg.widget
dlg.exec_()
print('config = %r' % (config,))
updated_config = self.config # NOQA
print('updated_config = %r' % (updated_config,))
gid_list = ingestable.execute(ibs=ibs)
ibswgt.back._process_new_images(refresh=True, gid_list=gid_list, clock_offset=False)
def select_table_id(ibswgt, table_key, level, id_, imgsetid):
select_func_dict = {
(IMAGE_TABLE, 0) : ibswgt.back.select_gid,
(IMAGE_GRID, 0) : ibswgt.back.select_gid,
(gh.ANNOTATION_TABLE, 0) : ibswgt.back.select_aid,
(NAME_TABLE, 0) : ibswgt.back.select_nid,
(NAMES_TREE, 0) : ibswgt.back.select_nid,
(NAMES_TREE, 1) : ibswgt.back.select_aid,
}
select_func = select_func_dict[(table_key, level)]
select_func(id_, imgsetid, show=False)
def edit_image_time(ibswgt, gid_list):
"""
CommandLine:
python -m ibeis.gui.newgui --exec-edit_image_time --show
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront('testdb3')
>>> #ibs, back, ibswgt, testdata_main_loop = testdata_guifront('lynx')
>>> ibswgt.edit_image_time([277, 630])
>>> testdata_main_loop(globals(), locals())
"""
from ibeis.gui import clock_offset_gui
ibswgt.co_wgt = clock_offset_gui.ClockOffsetWidget(ibswgt.ibs, gid_list, hack=True)
ibswgt.co_wgt.show()
def filter_annotation_table(ibswgt):
r"""
TODO: Finish implementation
CommandLine:
python -m ibeis.gui.newgui --test-filter_annotation_table --show --db lynx --imgsetid 2
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> ibs, back, ibswgt, testdata_main_loop = testdata_guifront('testdb3')
>>> #ibs, back, ibswgt, testdata_main_loop = testdata_guifront('PZ_Master1')
>>> result = ibswgt.filter_annotation_table()
>>> print(result)
>>> testdata_main_loop(globals(), locals())
"""
from functools import partial
#ibs.filter_annots_general()
ibs = ibswgt.back.ibs
#ibs.filterannots_by_tags(aid_list)
print('\n------FILTERING ANNOTS\n\n')
#annotmatch_rowid_list = ibs._get_all_annotmatch_rowids()
#isscenerymatch_list = ibs.get_annotmatch_is_scenerymatch(annotmatch_rowid_list)
#ut.take(isscenerymatch_list, ut.list_where(isscenerymatch_list))
# Applies annotation based filtering to the annotation table
#filter_kw = dict(any_matches='.*error.*', been_adjusted=True)
filter_kw = dict(been_adjusted=True)
#filter_kw = dict(require_timestamp=True)
filter_fn = partial(ibs.filter_annots_general, filter_kw=filter_kw)
model = ibswgt.models[gh.ANNOTATION_TABLE] # NOQA
model.set_ider_filters([filter_fn])
with ChangeLayoutContext([model]):
model._update_rows(rebuild_structure=True)
######################
###### Testing #######
######################
def testdata_guifront(defaultdb='testdb1'):
import ibeis
main_locals = ibeis.main(defaultdb=defaultdb)
ibs, back = ut.dict_take(main_locals, ['ibs', 'back'])
ibswgt = back.ibswgt # NOQA
globals__ = globals()
locals__ = locals()
def testdata_main_loop(globals_=globals__, locals_=locals__):
locals_ = locals_.copy()
globals_ = globals_.copy()
locals_.update(locals__)
globals_.update(globals__)
if '--cmd' in sys.argv:
gt.qtapp_loop(qwin=ibswgt, ipy=True)
six.exec_(ut.ipython_execstr(), globals_, locals_)
elif ut.show_was_requested():
gt.qtapp_loop(qwin=ibswgt)
return ibs, back, ibswgt, testdata_main_loop
def testfunc():
r"""
CommandLine:
python -m ibeis.gui.newgui --test-testfunc --show
python -m ibeis.gui.newgui --test-testfunc --cmd
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.gui.newgui import * # NOQA
>>> result = testfunc()
>>> print(result)
"""
ibs, back, ibswgt, testdata_main_loop = testdata_guifront()
view = ibswgt.views[gh.IMAGE_TABLE]
testdata_main_loop(globals(), locals())
if __name__ == '__main__':
"""
CommandLine:
python -m ibeis.gui.newgui
python -m ibeis.gui.newgui --allexamples
python -m ibeis.gui.newgui --allexamples --noface --nosrc
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()
| {
"content_hash": "7cde0340dfdc8d00dc2bc16cbb594099",
"timestamp": "",
"source": "github",
"line_count": 1681,
"max_line_length": 133,
"avg_line_length": 44.10113027959548,
"alnum_prop": 0.5489114306526021,
"repo_name": "SU-ECE-17-7/ibeis",
"id": "8479b498d5e35128eca93b2754d3d0ddc4ffe55e",
"size": "74183",
"binary": false,
"copies": "1",
"ref": "refs/heads/next",
"path": "ibeis/gui/newgui.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "331"
},
{
"name": "CSS",
"bytes": "26792"
},
{
"name": "HTML",
"bytes": "33762203"
},
{
"name": "Inno Setup",
"bytes": "1585"
},
{
"name": "JavaScript",
"bytes": "227454"
},
{
"name": "Jupyter Notebook",
"bytes": "66346367"
},
{
"name": "Python",
"bytes": "6112508"
},
{
"name": "Shell",
"bytes": "58211"
}
],
"symlink_target": ""
} |
"""
Define UVParameters: data and metadata objects for interferometric data sets.
UVParameters are objects to hold specific data and metadata associated with
interferometric data sets. They are used as attributes for classes based on
UVBase. This module also includes specialized sublasses for particular types
of metadata.
"""
import builtins
import numpy as np
import astropy.units as units
from . import utils
__all__ = ["UVParameter", "AngleParameter", "LocationParameter"]
def _get_generic_type(expected_type, strict_type_check=False):
"""Return tuple of more generic types.
Allows for more flexible type checking in the case when a Parameter's value
changes precison or to/from a numpy dtype but still is the desired generic type.
If a generic type cannot be found, the expected_type is returned
Parameters
----------
expected_type : Type or string
The expected type of a Parameter object or a string of the name of a type.
strict_type_check : bool
If True the input expected_type is return exactly
if strict_type_check:
return expected_type exactly
Returns
-------
Tuple of types based on input expected_type
"""
if isinstance(expected_type, str):
try:
expected_type = getattr(builtins, expected_type)
except AttributeError as err:
raise ValueError(
f"Input expected_type is a string with value: '{expected_type}'. "
"When the expected_type is a string, it must be a Python builtin type."
) from err
if strict_type_check:
return expected_type
for types in [
(bool, np.bool_),
(float, np.floating),
(np.unsignedinteger), # unexpected but just in case
(int, np.integer),
(complex, np.complexfloating),
]:
if issubclass(expected_type, types):
return types
return expected_type
class UVParameter(object):
"""
Data and metadata objects for interferometric data sets.
Attributes
----------
name : str
The name of the attribute. Used as the associated property name in
classes based on UVBase.
required : bool
Flag indicating whether this is required metadata for
the class with this UVParameter as an attribute. Default is True.
value
The value of the data or metadata.
spoof_val
A fake value that can be assigned to a non-required UVParameter if the
metadata is required for a particular file-type.
This is not an attribute of required UVParameters.
form : 'str' or tuple
Either 'str' or a tuple giving information about the expected
shape of the value. Elements of the tuple may be the name of other
UVParameters that indicate data shapes.
Form examples:
- 'str': a string value
- ('Nblts', 3): the value should be an array of shape:
Nblts (another UVParameter name), 3
description : str
Description of the data or metadata in the object.
expected_type
The type that the data or metadata should be. Default is int or str if
form is 'str'.
acceptable_vals : list, optional
List giving allowed values for elements of value.
acceptable_range: 2-tuple, optional
Tuple giving a range of allowed magnitudes for elements of value.
tols : float or 2-tuple of float
Tolerances for testing the equality of UVParameters. Either a single
absolute value or a tuple of relative and absolute values to be used by
np.isclose()
strict_type_check : bool
When True, the input expected_type is used exactly, otherwise a more
generic type is found to allow changes in precicions or to/from numpy
dtypes to not break checks.
"""
def __init__(
self,
name,
required=True,
value=None,
spoof_val=None,
form=(),
description="",
expected_type=int,
acceptable_vals=None,
acceptable_range=None,
tols=(1e-05, 1e-08),
strict_type_check=False,
):
"""Init UVParameter object."""
self.name = name
self.required = required
# cannot set a spoof_val for required parameters
if not self.required:
self.spoof_val = spoof_val
self.value = value
self.description = description
self.form = form
if self.form == "str":
self.expected_type = str
self.strict_type = True
else:
self.expected_type = _get_generic_type(
expected_type, strict_type_check=strict_type_check,
)
self.strict_type = strict_type_check
self.acceptable_vals = acceptable_vals
self.acceptable_range = acceptable_range
if np.size(tols) == 1:
# Only one tolerance given, assume absolute, set relative to zero
self.tols = (0, tols)
else:
# relative and absolute tolerances to be used in np.isclose
self.tols = tols
def __eq__(self, other):
"""Equal if classes match and values are identical."""
if isinstance(other, self.__class__):
if self.value is None:
if other.value is not None:
print(f"{self.name} is None on left, but not right")
return False
else:
return True
if other.value is None:
if self.value is not None:
print(f"{self.name} is None on right, but not left")
return False
if isinstance(self.value, np.ndarray) and not isinstance(
self.value.item(0), (str, np.str_)
):
if not isinstance(other.value, np.ndarray):
print(f"{self.name} parameter value is array, but other is not")
return False
if self.value.shape != other.value.shape:
print(f"{self.name} parameter value is array, shapes are different")
return False
# check to see if strict types are used
if self.strict_type:
# types must match
if other.strict_type:
# both strict, expected_type must match
if self.expected_type != other.expected_type:
print(
f"{self.name} parameter has incompatible types. "
f"Left is {self.expected_type}, right is "
f"{other.expected_type}"
)
return False
elif not isinstance(self.value.item(0), other.expected_type):
print(
f"{self.name} parameter has incompatible dtypes. Left "
f"requires {self.expected_type}, right is "
f"{other.value.dtype}"
)
return False
elif other.strict_type:
# types must match in the other direction
if not isinstance(other.value.item(0), self.expected_type):
print(
f"{self.name} parameter has incompatible dtypes. Left is "
f"{self.value.dtype}, right requires {other.expected_type}"
)
return False
elif isinstance(self.value, units.Quantity):
if not self.value.unit.is_equivalent(other.value.unit):
print(
f"{self.name} parameter value is an astropy Quantity, "
"units are not equivalent"
)
return False
if not isinstance(self.tols[1], units.Quantity):
atol_use = self.tols[1] * self.value.unit
else:
atol_use = self.tols[1]
if not units.quantity.allclose(
self.value,
other.value,
rtol=self.tols[0],
atol=atol_use,
equal_nan=True,
):
print(
f"{self.name} parameter value is an astropy Quantity, "
"values are not close"
)
return False
elif not np.allclose(
self.value,
other.value,
rtol=self.tols[0],
atol=self.tols[1],
equal_nan=True,
):
print(f"{self.name} parameter value is array, values are not close")
return False
else:
# check to see if strict types are used
if self.strict_type:
# types must match
if not isinstance(self.value, other.expected_type):
print(
f"{self.name} parameter has incompatible types. Left "
f"requires {type(self.value)}, right is "
f"{other.expected_type}"
)
return False
if other.strict_type:
# types must match in the other direction
if not isinstance(other.value, self.expected_type):
print(
f"{self.name} parameter has incompatible types. Left is "
f"{self.expected_type}, right requires {type(other.value)}"
)
return False
str_type = False
if isinstance(self.value, str):
str_type = True
if isinstance(self.value, (list, np.ndarray, tuple)):
if isinstance(self.value[0], str):
str_type = True
if not str_type:
if isinstance(other.value, np.ndarray):
print(
f"{self.name} parameter value is not an array, "
"but other is not"
)
return False
try:
if not np.allclose(
np.array(self.value),
np.array(other.value),
rtol=self.tols[0],
atol=self.tols[1],
equal_nan=True,
):
print(
f"{self.name} parameter value can be cast to an array"
" and tested with np.allclose. The values are "
"not close"
)
return False
except (TypeError):
if isinstance(self.value, dict):
try:
# Try a naive comparison first
# this will fail if keys are the same
# but cases differ.
# so only look for exact equality
# then default to the long test below.
if self.value == other.value:
return True
except ValueError:
pass
# this dict probably contains arrays
# we will need to check each item individually
# check to see if they are equal other than
# upper/lower case keys
self_lower = {k.lower(): v for k, v in self.value.items()}
other_lower = {k.lower(): v for k, v in other.value.items()}
message_str = f"{self.name} parameter is a dict"
if set(self_lower.keys()) != set(other_lower.keys()):
message_str += ", keys are not the same."
print(message_str)
return False
else:
# need to check if values are close,
# not just equal
values_close = True
for key in self_lower.keys():
try:
if not np.allclose(
self_lower[key], other_lower[key]
):
message_str += f", key {key} is not equal"
values_close = False
except (TypeError):
# this isn't a type that can be
# handled by np.isclose,
# test for equality
if self_lower[key] != other_lower[key]:
message_str += f", key {key} is not equal"
values_close = False
if values_close is False:
print(message_str)
return False
else:
return True
else:
if self.value != other.value:
print(
f"{self.name} parameter value is not a string "
"or a dict and cannot be cast as a numpy "
"array. The values are not equal."
)
return False
else:
if isinstance(self.value, (list, np.ndarray, tuple)):
if [s.strip() for s in self.value] != [
s.strip() for s in other.value
]:
print(
f"{self.name} parameter value is a list of strings, "
"values are different"
)
return False
else:
if self.value.strip() != other.value.strip():
if self.value.replace("\n", "").replace(
" ", ""
) != other.value.replace("\n", "").replace(" ", ""):
print(
f"{self.name} parameter value is a string, "
"values are different"
)
return False
return True
else:
print(f"{self.name} parameter classes are different")
return False
def __ne__(self, other):
"""Not equal."""
return not self.__eq__(other)
def apply_spoof(self):
"""Set value to spoof_val for non-required UVParameters."""
self.value = self.spoof_val
def expected_shape(self, uvbase):
"""
Get the expected shape of the value based on the form.
Parameters
----------
uvbase : object
Object with this UVParameter as an attribute. Needed
because the form can refer to other UVParameters on this object.
Returns
-------
tuple
The expected shape of the value.
"""
if self.form == "str":
return self.form
elif isinstance(self.form, (int, np.integer)):
# Fixed shape, just return the form
return (self.form,)
else:
# Given by other attributes, look up values
eshape = ()
for p in self.form:
if isinstance(p, (int, np.integer)):
eshape = eshape + (p,)
else:
val = getattr(uvbase, p)
if val is None:
raise ValueError(
f"Missing UVBase parameter {p} needed to "
"calculate expected shape of parameter"
)
eshape = eshape + (val,)
return eshape
def check_acceptability(self):
"""Check that values are acceptable."""
if self.acceptable_vals is None and self.acceptable_range is None:
return True, "No acceptability check"
else:
# either acceptable_vals or acceptable_range is set. Prefer acceptable_vals
if self.acceptable_vals is not None:
# acceptable_vals are a list of allowed values
if self.expected_type is str:
# strings need to be converted to lower case
if isinstance(self.value, str):
value_set = {self.value.lower()}
else:
# this is a list or array of strings, make them all lower case
value_set = {x.lower() for x in self.value}
acceptable_vals = [x.lower() for x in self.acceptable_vals]
else:
if isinstance(self.value, (list, np.ndarray)):
value_set = set(self.value)
else:
value_set = {self.value}
acceptable_vals = self.acceptable_vals
for elem in value_set:
if elem not in acceptable_vals:
message = (
f"Value {elem}, is not in allowed values: {acceptable_vals}"
)
return False, message
return True, "Value is acceptable"
else:
# acceptable_range is a tuple giving a range of allowed magnitudes
testval = np.mean(np.abs(self.value))
if (testval >= self.acceptable_range[0]) and (
testval <= self.acceptable_range[1]
):
return True, "Value is acceptable"
else:
message = (
f"Mean of abs values, {testval}, is not in allowed range: "
f"{self.acceptable_range}"
)
return False, message
class AngleParameter(UVParameter):
"""
Subclass of UVParameter for Angle type parameters.
Adds extra methods for conversion to & from degrees (used by UVBase objects
for _degrees properties associated with these parameters).
"""
def degrees(self):
"""Get value in degrees."""
if self.value is None:
return None
else:
return self.value * 180.0 / np.pi
def set_degrees(self, degree_val):
"""
Set value in degrees.
Parameters
----------
degree_val : float
Value in degrees to use to set the value attribute.
"""
if degree_val is None:
self.value = None
else:
self.value = degree_val * np.pi / 180.0
class LocationParameter(UVParameter):
"""
Subclass of UVParameter for Earth location type parameters.
Adds extra methods for conversion to & from lat/lon/alt in radians or
degrees (used by UVBase objects for _lat_lon_alt and _lat_lon_alt_degrees
properties associated with these parameters).
"""
def __init__(
self,
name,
required=True,
value=None,
spoof_val=None,
description="",
acceptable_range=(6.35e6, 6.39e6),
tols=1e-3,
):
super(LocationParameter, self).__init__(
name,
required=required,
value=value,
spoof_val=spoof_val,
form=3,
description=description,
expected_type=float,
acceptable_range=acceptable_range,
tols=tols,
)
def lat_lon_alt(self):
"""Get value in (latitude, longitude, altitude) tuple in radians."""
if self.value is None:
return None
else:
# check defaults to False b/c exposed check kwarg exists in UVData
return utils.LatLonAlt_from_XYZ(self.value, check_acceptability=False)
def set_lat_lon_alt(self, lat_lon_alt):
"""
Set value from (latitude, longitude, altitude) tuple in radians.
Parameters
----------
lat_lon_alt : 3-tuple of float
Tuple with the latitude (radians), longitude (radians)
and altitude (meters) to use to set the value attribute.
"""
if lat_lon_alt is None:
self.value = None
else:
self.value = utils.XYZ_from_LatLonAlt(
lat_lon_alt[0], lat_lon_alt[1], lat_lon_alt[2]
)
def lat_lon_alt_degrees(self):
"""Get value in (latitude, longitude, altitude) tuple in degrees."""
if self.value is None:
return None
else:
latitude, longitude, altitude = self.lat_lon_alt()
return latitude * 180.0 / np.pi, longitude * 180.0 / np.pi, altitude
def set_lat_lon_alt_degrees(self, lat_lon_alt_degree):
"""
Set value from (latitude, longitude, altitude) tuple in degrees.
Parameters
----------
lat_lon_alt : 3-tuple of float
Tuple with the latitude (degrees), longitude (degrees)
and altitude (meters) to use to set the value attribute.
"""
if lat_lon_alt_degree is None:
self.value = None
else:
latitude, longitude, altitude = lat_lon_alt_degree
self.value = utils.XYZ_from_LatLonAlt(
latitude * np.pi / 180.0, longitude * np.pi / 180.0, altitude
)
def check_acceptability(self):
"""Check that vector magnitudes are in range."""
if self.acceptable_range is None:
return True, "No acceptability check"
else:
# acceptable_range is a tuple giving a range of allowed vector magnitudes
testval = np.sqrt(np.sum(np.abs(self.value) ** 2))
if (testval >= self.acceptable_range[0]) and (
testval <= self.acceptable_range[1]
):
return True, "Value is acceptable"
else:
message = (
f"Value {testval}, is not in allowed range: {self.acceptable_range}"
)
return False, message
| {
"content_hash": "dee3728f9533c2716fff8b9a13c0af5e",
"timestamp": "",
"source": "github",
"line_count": 584,
"max_line_length": 88,
"avg_line_length": 40.26883561643836,
"alnum_prop": 0.476506357103372,
"repo_name": "HERA-Team/pyuvdata",
"id": "294b2940a3754b3e481b5a28028936c1f39a6f34",
"size": "23650",
"binary": false,
"copies": "1",
"ref": "refs/heads/fix_uvh5_phase_info",
"path": "pyuvdata/parameter.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3599"
},
{
"name": "C",
"bytes": "305381"
},
{
"name": "C++",
"bytes": "33406"
},
{
"name": "IDL",
"bytes": "6021"
},
{
"name": "Objective-C",
"bytes": "1706"
},
{
"name": "PowerShell",
"bytes": "2972"
},
{
"name": "Python",
"bytes": "1140005"
},
{
"name": "Shell",
"bytes": "391"
},
{
"name": "TeX",
"bytes": "3991"
}
],
"symlink_target": ""
} |
import os
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from pyxform.builder import create_survey_from_xls
from onadata.apps.logger.models.xform import XForm
from onadata.libs.utils.logger_tools import publish_xls_form
from onadata.libs.utils.viewer_tools import django_file
class Command(BaseCommand):
args = 'xls_file username'
help = ("Publish an XLS file with the option of replacing an"
"existing one")
def add_arguments(self, parser):
parser.add_argument('xls_filepath',
help="Path to the xls file")
parser.add_argument('username',
help="Username to publish the form to")
parser.add_argument('-r', '--replace',
action='store_true',
dest='replace',
help="Replace existing form if any")
def handle(self, *args, **options):
try:
xls_filepath = options['xls_filepath']
except KeyError:
raise CommandError("You must provide the path to the xls file.")
# make sure path exists
if not xls_filepath or not os.path.exists(xls_filepath):
raise CommandError(
"The xls file '%s' does not exist." %
xls_filepath)
try:
username = options['username']
except KeyError:
raise CommandError(
"You must provide the username to publish the form to.")
# make sure user exists
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
raise CommandError("The user '%s' does not exist." % username)
# wasteful but we need to get the id_string beforehand
survey = create_survey_from_xls(xls_filepath)
# check if a form with this id_string exists for this user
form_already_exists = XForm.objects.filter(
user=user, id_string=survey.id_string).count() > 0
# id_string of form to replace, if any
id_string = None
if form_already_exists:
if 'replace' in options and options['replace']:
id_string = survey.id_string
self.stdout.write("Form already exist, replacing ..\n")
else:
raise CommandError(
"The form with id_string '%s' already exists, use the -r "
"option to replace it." % survey.id_string)
else:
self.stdout.write("Form does NOT exist, publishing ..\n")
# publish
xls_file = django_file(
xls_filepath, 'xls_file', 'application/vnd.ms-excel')
publish_xls_form(xls_file, user, id_string)
self.stdout.write("Done..\n")
| {
"content_hash": "5e43d973bf0915c681f6d7a470c51ef9",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 78,
"avg_line_length": 37.82666666666667,
"alnum_prop": 0.5826577370461755,
"repo_name": "kobotoolbox/kobocat",
"id": "890691b7247979dc0e9af46c18944fa3ccf19de2",
"size": "2853",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "onadata/apps/logger/management/commands/publish_xls.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "146326"
},
{
"name": "Dockerfile",
"bytes": "3965"
},
{
"name": "HTML",
"bytes": "136962"
},
{
"name": "JavaScript",
"bytes": "734122"
},
{
"name": "Less",
"bytes": "19821"
},
{
"name": "Makefile",
"bytes": "2286"
},
{
"name": "Python",
"bytes": "1264157"
},
{
"name": "Shell",
"bytes": "9858"
}
],
"symlink_target": ""
} |
"""
This module allows replacing stdlib's logging module with twiggy,
it implements the following interface:
logging's interface:
:getLogger: returns a logger that supports debug/info/error etc'.
:root: the root logger.
:basicConfig: raises an Exception.
hijack interface:
:hijack: for 'import logging' to import twiggy.
:restore: for restoring the original logging module.
logging bridge:
:LoggingBridgeOutput: an output that bridges log messages to stdlib's logging.
"""
import sys
import logging as orig_logging
from threading import Lock
from .lib.converter import ConversionTable, drop
from .formats import LineFormat
from .outputs import Output
from . import levels, log
from .levels import DEBUG, INFO, WARNING, ERROR, CRITICAL, NOTICE, DISABLED
__all__ = ["basicConfig", "hijack", "restore",
"getLogger", "root", "LoggingBridgeOutput"]
def basicConfig(**kwargs):
raise RuntimeError("Twiggy doesn't support logging's basicConfig")
def hijack():
"""Replace the original module with the compatibility module."""
sys.modules["logging"] = sys.modules[__name__]
def restore():
"""Replace the compatibility module with the original module."""
sys.modules["logging"] = orig_logging
def log_func_decorator(level):
def new_func(self, *args, **kwargs):
return self.log(level, * args, ** kwargs)
return new_func
class FakeLogger(object):
"""
This class emulates stlib's logging.Logger,
it translates calls to twiggy's log system.
usage:
getLogger("spam").error("eggs")
translates to:
log.name("spam").error("eggs")
"""
__slots__ = ["_logger"]
def __init__(self, logger):
self._logger = logger
debug = log_func_decorator(DEBUG)
info = log_func_decorator(INFO)
warn = warning = log_func_decorator(WARNING)
error = log_func_decorator(ERROR)
critical = fatal = log_func_decorator(CRITICAL)
def exception(self, *args, **kwargs):
kwargs['exc_info'] = True
self.error(*args, **kwargs)
def setLevel(self, level):
self._logger.min_level = level
@property
def level(self):
return self._logger.min_level
def getEffectiveLevel(self):
return self.level
def isEnabledFor(self, level):
return level >= self.level
def log(self, level, format_spec, *args, **kwargs):
"""
Log with a given level, for including exception info
call with exc_info=True.
"""
logger = self._logger
if kwargs.pop("exc_info", False):
logger = logger.trace("error")
if not isinstance(level, levels.LogLevel):
raise ValueError("Unknown level: {0}".format(level))
logger._emit(level, format_spec, args, kwargs)
root = FakeLogger(log.options(style="percent"))
_logger_cache = {} # name to logger
_logger_cache_lock = Lock()
def getLogger(name=None):
if name is not None:
with _logger_cache_lock:
if name not in _logger_cache:
_logger_cache[name] = FakeLogger(log.name(name).options(style="percent"))
return _logger_cache[name]
return root
logging_bridge_converter = ConversionTable([('time', lambda x: x, drop),
('name', lambda x: x, drop),
('level', lambda x: x, drop)])
logging_bridge_converter.genericValue = str
logging_bridge_converter.genericItem = "{0}={1}".format
logging_bridge_converter.aggregate = ':'.join
class LoggingBridgeFormat(LineFormat):
"""
This logging bridge uses a converter that doesn't display a level, time and name. That's
because users of stdlib's logging usually setup formatters that display this info.
"""
def __init__(self, *args, **kwargs):
super(LoggingBridgeFormat, self).__init__(conversion=logging_bridge_converter,
*args, **kwargs)
def __call__(self, msg):
return (super(LoggingBridgeFormat, self).__call__(msg),
msg.level,
msg.name)
class LoggingBridgeOutput(Output):
"""
usage:
twiggy.add_emitters(("spam", DEBUG, None, LoggingBridgeOutput()))
This output provides a translation between twiggy's:
log.name("spam").info("eggs")
into logging's:
logging.getLogger("spam").info("eggs")
We translate a logging level to a twiggy level by name or
by a fallback map. and get logging's logger by the name
of twiggy's logger.
"""
# for levels in twiggy that aren't in stdlib's logging
FALLBACK_MAP = {NOTICE: orig_logging.WARNING,
DISABLED: orig_logging.NOTSET}
def __init__(self, *args, **kwargs):
super(LoggingBridgeOutput, self).__init__(format=LoggingBridgeFormat(),
* args, ** kwargs)
def _open(self):
pass
def _close(self):
pass
def _write(self, args):
text, level, name = args
logging_level = getattr(orig_logging, str(level), None)
if logging_level is None:
logging_level = self.FALLBACK_MAP[level]
orig_logging.getLogger(name).log(logging_level, text)
| {
"content_hash": "3e540b5039b6afaceccd9ec08b60f3a2",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 93,
"avg_line_length": 29.138121546961326,
"alnum_prop": 0.6259006446719757,
"repo_name": "wearpants/twiggy",
"id": "b3d5e86c443091e16521d1d006ed7738ec25e46b",
"size": "5274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twiggy/logging_compat.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "416"
},
{
"name": "Python",
"bytes": "149080"
},
{
"name": "Shell",
"bytes": "317"
}
],
"symlink_target": ""
} |
import unittest
from is_increasing import is_increasing
class Test_IS_Increasing(unittest.TestCase):
def test_inc(self):
self.assertTrue(is_increasing([3, 5, 8, 9, 22]))
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "cdd8a34b056a7088438040c285f2b659",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 56,
"avg_line_length": 19.5,
"alnum_prop": 0.6538461538461539,
"repo_name": "sevgo/Programming101",
"id": "70c1f44bd991a07be3683080c94697e127e48dac",
"size": "258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "week1/warmups/is_increasing_test.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "81618"
}
],
"symlink_target": ""
} |
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.utils import translation
from django.utils.encoding import smart_text
from fluent_pages.models.db import UrlNode
from icekit.publishing.models import \
sync_mptt_tree_fields_from_draft_to_published
class Command(NoArgsCommand):
help = "Resync MPTT tree data and cached URLs for published page copies"
option_list = (
make_option(
'-n', '--dry-run', action='store_true', dest='dry-run',
default=False,
help="Only list what will change, don't make the actual changes."
),
make_option(
'-f', '--force-update-cached-urls', action='store_true',
dest='force-update-cached-urls',
default=False,
help="Update cached URLs"
),
) + NoArgsCommand.option_list
def log(self, msg, at_verbosity=1):
if self.verbosity >= at_verbosity:
self.stdout.write(smart_text(msg))
def sync_draft_copy_tree_attrs_to_published_copy(
self, drafts_qs, is_dry_run=False, force_update_cached_urls=False):
for draft in drafts_qs:
published_copy = getattr(draft, 'publishing_linked', None)
if not published_copy:
continue
change_report = sync_mptt_tree_fields_from_draft_to_published(
draft,
dry_run=is_dry_run,
force_update_cached_urls=force_update_cached_urls)
if not change_report:
continue
for change in change_report:
item, description, old_value, new_value = change
if old_value != new_value:
item_repr = smart_text(repr(item))
self.log(u"%s %s => %s (was %s)" % (
item_repr, description, new_value, old_value))
def handle_noargs(self, **options):
is_dry_run = options.get('dry-run', False)
force_update_cached_urls = options.get(
'force-update-cached-urls', False)
self.verbosity = options.get('verbosity', 1)
translation.activate('en')
drafts_qs = UrlNode.objects.filter(status=UrlNode.DRAFT)
self.sync_draft_copy_tree_attrs_to_published_copy(
drafts_qs,
is_dry_run=is_dry_run,
force_update_cached_urls=force_update_cached_urls)
| {
"content_hash": "c3fca769a972de2a10e5fd4124ec4ee2",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 79,
"avg_line_length": 37.90625,
"alnum_prop": 0.5935696619950536,
"repo_name": "ic-labs/django-icekit",
"id": "17bb54738b298d313a5bdf42b0623634f582e270",
"size": "2426",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "icekit/management/commands/resync_published_page_tree.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18019"
},
{
"name": "HTML",
"bytes": "92605"
},
{
"name": "JavaScript",
"bytes": "27803"
},
{
"name": "Python",
"bytes": "1476354"
},
{
"name": "Shell",
"bytes": "37850"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from account.utils import get_default_redirect, user_display
from models import SignupCode
from forms import SignupForm, InviteUserForm
def group_and_bridge(request):
"""
Given the request we can depend on the GroupMiddleware to provide the
group and bridge.
"""
# be group aware
group = getattr(request, "group", None)
if group:
bridge = request.bridge
else:
bridge = None
return group, bridge
def group_context(group, bridge):
# @@@ use bridge
ctx = {
"group": group,
}
if group:
ctx["group_base"] = bridge.group_base_template()
return ctx
def signup(request, **kwargs):
form_class = kwargs.pop("form_class", SignupForm)
template_name = kwargs.pop("template_name", "account/signup.html")
template_name_failure = kwargs.pop("template_name_failure", "signup_codes/failure.html")
success_url = kwargs.pop("success_url", None)
group, bridge = group_and_bridge(request)
ctx = group_context(group, bridge)
if success_url is None:
if hasattr(settings, "SIGNUP_REDIRECT_URLNAME"):
fallback_url = reverse(settings.SIGNUP_REDIRECT_URLNAME)
else:
if hasattr(settings, "LOGIN_REDIRECT_URLNAME"):
fallback_url = reverse(settings.LOGIN_REDIRECT_URLNAME)
else:
fallback_url = settings.LOGIN_REDIRECT_URL
success_url = get_default_redirect(request, fallback_url)
code = request.GET.get("code")
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
user = form.save(request=request)
signup_code = form.cleaned_data["signup_code"]
if signup_code:
signup_code.use(user)
form.login(request, user)
messages.add_message(request, messages.SUCCESS,
ugettext("Successfully logged in as %(username)s.") % {
"username": user_display(user),
}
)
return HttpResponseRedirect(success_url)
else:
signup_code = SignupCode.check(code)
if signup_code:
initial = {
"signup_code": code,
"email": signup_code.email,
}
form = form_class(initial=initial, group=group)
else:
if not settings.ACCOUNT_OPEN_SIGNUP:
ctx.update({
"code": code,
})
ctx = RequestContext(request, ctx)
# if account signup is not open we want to fail when there is
# no sign up code or what was provided failed.
return render_to_response(template_name_failure, ctx)
else:
form = form_class(group=group)
ctx.update({
"code": code,
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
@staff_member_required
def admin_invite_user(request, **kwargs):
"""
This view, by default, works inside the Django admin.
"""
form_class = kwargs.pop("form_class", InviteUserForm)
template_name = kwargs.pop("template_name", "signup_codes/admin_invite_user.html")
group, bridge = group_and_bridge(request)
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
email = form.cleaned_data["email"]
form.send_signup_code()
messages.add_message(request, messages.INFO,
ugettext("An email has been sent to %(email)s.") % {
"email": email
}
)
form = form_class() # reset
else:
form = form_class(group=group)
ctx = group_context(group, bridge)
ctx.update({
"title": ugettext("Invite user"),
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
| {
"content_hash": "29d7e48e5239241bd97c36414f5062e5",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 92,
"avg_line_length": 32.15217391304348,
"alnum_prop": 0.5927428442641425,
"repo_name": "zhiwehu/IBookmark",
"id": "dd3fe2172d35ca2a3b5a1c9eb3448d3ea427dac1",
"size": "4437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/apps/signup_codes/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "460708"
},
{
"name": "Python",
"bytes": "132225"
}
],
"symlink_target": ""
} |
"""
WSGI config for djcms_sbadmin2 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djcms_sbadmin2.settings")
application = get_wsgi_application()
| {
"content_hash": "85062f9d5b62151d20a76f0a7bd6b40b",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 25.3125,
"alnum_prop": 0.7728395061728395,
"repo_name": "guhuajun/djcms-sbadmin2",
"id": "de1ef8bcf98e22a1ee4ff30c89d0c38925414f88",
"size": "405",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djcms_sbadmin2/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "298679"
},
{
"name": "HTML",
"bytes": "221221"
},
{
"name": "JavaScript",
"bytes": "1375590"
},
{
"name": "PHP",
"bytes": "2157"
},
{
"name": "Python",
"bytes": "24284"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from main.models import File,Share,Setting,Peer
# Register your models here.
admin.site.register(File)
admin.site.register(Share)
admin.site.register(Setting)
admin.site.register(Peer)
| {
"content_hash": "b2cc63727406f542f3fc33e6c1b75d8b",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 47,
"avg_line_length": 24.444444444444443,
"alnum_prop": 0.8090909090909091,
"repo_name": "TheTacoScott/tacozmq2",
"id": "9e0a0c888bbe79b1419f4d85d54efa11a034a392",
"size": "220",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taco/main/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2309"
},
{
"name": "Python",
"bytes": "5309"
}
],
"symlink_target": ""
} |
import struct
import json
import socket
from .. import const
from ..Remote import Remote
from ..Node import Node
class TCPRPC(object):
def __init__(self, service, loop):
self.service = service
self.loop = loop
def pack_ping(self, local, remote, echo):
"""Pack Ping Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.PING),
echo,
local.id,
self.pack_remote(remote)
])
def pack_pong(self, local, remote, echo):
"""Pack Ping Message
Args:
local: Self Node
remote: Self Address
echo: Recieved Echo Message
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.PONG),
echo,
local.id,
self.pack_remote(remote)
])
async def read_ping(self, reader):
return None
async def read_pong(self, reader):
return None
def pack_store(self, local, remote, echo, key, value):
"""Pack FindNode Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
key, value: (key, value) to save
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.STORE),
echo,
local.id,
self.pack_remote(remote),
key,
struct.pack('>L', len(value)),
value
])
def pack_pong_store(self, local, remote, echo, key):
"""Pack Pong Store Message
Args:
local: Self Node
remote: Self Address
echo: Recieved Echo Message
key: Key Saved
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.PONG_STORE),
echo,
local.id,
self.pack_remote(remote),
key
])
async def read_store(self, reader):
key = await reader.readexactly(20)
len_value = struct.unpack('>L', await reader.readexactly(4))[0]
value = await reader.readexactly(len_value)
return key, value
async def read_pong_store(self, reader):
key = await reader.readexactly(20)
return key
def pack_findNode(self, local, remote, echo, remoteId):
"""Pack FindNode Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
remoteId: Hash of Node to Ping
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.FIND_NODE),
echo,
local.id,
self.pack_remote(remote),
remoteId
])
def pack_pong_findNode(self, local, remote, echo, remoteId, remoteNodes):
"""Pack Pong FindNode Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
remoteId: Hash of Node to return
remoteNode: Remote Address
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.PONG_FIND_NODE),
echo,
local.id,
self.pack_remote(remote),
remoteId,
struct.pack('B', len(remoteNodes)),
*[
self.pack_node(remoteNode) for remoteNode in remoteNodes
]
])
async def read_findNode(self, reader):
return await reader.readexactly(20)
async def read_pong_findNode(self, reader):
remoteId = await reader.readexactly(20)
remoteCount = struct.unpack('B', await reader.readexactly(1))[0]
remoteNodes = []
for i in range(remoteCount):
remoteNodes.append(await self.read_node(reader))
return remoteId, remoteCount, remoteNodes
def pack_findValue(self, local, remote, echo, key):
"""Pack FindValue Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
key: Key to Find
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.FIND_VALUE),
echo,
local.id,
self.pack_remote(self.service.server.remote),
key
])
def pack_pong_findValue(self, local, remote, echo, key, value):
"""Pack Pong FindValue Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
key, value: (key, value) to send
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.PONG_FIND_VALUE),
echo,
local.id,
self.pack_remote(self.service.server.remote),
key,
struct.pack('>L', len(value)),
value
])
async def read_findValue(self, reader):
key = await reader.readexactly(20)
return key
async def read_pong_findValue(self, reader):
key = await reader.readexactly(20)
len_value = struct.unpack('>L', await reader.readexactly(4))[0]
value = await reader.readexactly(len_value)
return key, value
def pack_reduce(self, local, remote, echo, keyStart, keyEnd):
"""Pack FindValue Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
keyStart, keyEnd: Keys to Reduce
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.REDUCE),
echo,
local.id,
self.pack_remote(self.service.server.remote),
keyStart,
keyEnd
])
def pack_pong_reduce(self, local, remote, echo, keyStart, keyEnd, value):
"""Pack Pong FindValue Message
Args:
local: Self Node
remote: Self Address
echo: Random Echo Message
key, value: (key, value) to send
keyStart, keyEnd: Keys to Reduce
Returns:
Packed Data to Send
"""
return b"".join([
struct.pack('B', const.kad.command.PONG_REDUCE),
echo,
local.id,
self.pack_remote(self.service.server.remote),
keyStart,
keyEnd,
struct.pack('>L', len(value)),
value
])
async def read_reduce(self, reader):
keyStart = await reader.readexactly(20)
keyEnd = await reader.readexactly(20)
return keyStart, keyEnd
async def read_pong_reduce(self, reader):
keyStart = await reader.readexactly(20)
keyEnd = await reader.readexactly(20)
len_value = struct.unpack('>L', await reader.readexactly(4))[0]
value = await reader.readexactly(len_value)
return keyStart, keyEnd, value
def get_command_string(self, id):
return const.kad.command.COMMANDS[id]
def pack_remote(self, remote):
remote_ip = socket.inet_aton(remote.host)
return b"".join([
struct.pack('>BH', len(remote_ip), remote.port),
remote_ip
])
async def read_remote(self, reader):
ip_size, port = struct.unpack('>BH', await reader.readexactly(3))
host = socket.inet_ntoa(await reader.readexactly(ip_size))
return Remote(
host = host,
port = port
)
def pack_node(self, node):
return b"".join([
node.id,
self.pack_remote(node.remote)
])
async def read_node(self, reader):
node = Node(
await reader.readexactly(20),
remote = await self.read_remote(reader)
)
return node
async def read_command(self, reader):
"""Read Command
Args:
reader: a StreamReader object
Returns:
JSON Data
"""
command = struct.unpack('B', await reader.readexactly(1))[0]
echo = await reader.readexactly(20)
remoteNode = Node(
id = await reader.readexactly(20),
remote = await self.read_remote(reader)
)
data = (command, echo, remoteNode)
if command is const.kad.command.PING:
return (*data, await self.read_ping(reader))
elif command is const.kad.command.PONG:
return (*data, await self.read_pong(reader))
elif command is const.kad.command.STORE:
return (*data, await self.read_store(reader))
elif command is const.kad.command.PONG_STORE:
return (*data, await self.read_pong_store(reader))
elif command is const.kad.command.FIND_NODE:
return (*data, await self.read_findNode(reader))
elif command is const.kad.command.PONG_FIND_NODE:
return (*data, await self.read_pong_findNode(reader))
elif command is const.kad.command.FIND_VALUE:
return (*data, await self.read_findValue(reader))
elif command is const.kad.command.PONG_FIND_VALUE:
return (*data, await self.read_pong_findValue(reader))
elif command is const.kad.command.REDUCE:
return (*data, await self.read_reduce(reader))
elif command is const.kad.command.PONG_REDUCE:
return (*data, await self.read_pong_reduce(reader))
| {
"content_hash": "0cd861f047984bef53646c11d2eefdae",
"timestamp": "",
"source": "github",
"line_count": 338,
"max_line_length": 77,
"avg_line_length": 29.340236686390533,
"alnum_prop": 0.5403851971362307,
"repo_name": "SkyZH/ddcm-protocol",
"id": "7047bb14535210c97d1d029f6fd92b25e359aa5f",
"size": "9917",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ddcm/TCPService/TCPRPC.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "80828"
},
{
"name": "Shell",
"bytes": "68"
}
],
"symlink_target": ""
} |
'''
Created on Jan 17, 2014
@author: oliwa
'''
import sys as sys
import numpy as np
from prody.dynamics.anm import calcANM, ANM
from prody.dynamics.editing import extendModel, sliceModel
from prody.dynamics.functions import saveModel, loadModel, writeArray
from prody.proteins.pdbfile import writePDB, parsePDB
from prody.dynamics.mode import Vector
from prody.measure.measure import calcCenter, calcDistance
from prody.dynamics.compare import calcOverlap, calcCumulOverlap,\
calcSubspaceOverlap, calcCovOverlap, printOverlapTable, getOverlapTable
from prody.apps.prody_apps.prody_contacts import prody_contacts
import traceback
from prody.dynamics.nmdfile import writeNMD
import scipy as sp
class ANMs(object):
"""
This class holds all the ANMs for an encounter.
"""
def __init__(self, utils):
"""
Constructor
"""
self.utils = utils
def createSlcSelectionString(self, reference, isBoundComplex, ref_chain, referenceTitle):
""" Under the assumption that is reflected in the Benchmark 4.0 that the receptor atoms are set before the
ligand atoms (spacially in the PDB file), if the current protein under investigation is a ligand,
an offset is added to the selection string to match the atoms of the ligand from the complex. """
if isBoundComplex and not self.utils.isReceptor(referenceTitle):
print "adding offset"
return self.utils.addOffset(ref_chain.getSelstr(), reference.select('segment "R."').numAtoms())
else:
print "using original selstr"
return ref_chain.getSelstr()
def calcANMs(self, reference, ref_chain, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha', modified="", forceRebuild=False, isBoundComplex=False):
# if the base model does not exist, it needs to be created along with the
# extended and slicedback models
if forceRebuild or not self.doesANMExist(reference, numberOfModes, selstr, whatAtomsToMatch, modified):
# Create the anm
anm = calcANM(reference, n_modes=numberOfModes, selstr=selstr)
# First extend the anm on all atoms
anm_extend = extendModel(anm[0], anm[1], reference, norm=True)
# Then slice it back to matched
selectionAtoms = self.createSlcSelectionString(reference, isBoundComplex, ref_chain, encounter.getReference().getTitle())
anm_slc = sliceModel(anm_extend[0], anm_extend[1], selectionAtoms)
# If isBoundComplex, slice one anm back to its overall matched chains
if isBoundComplex:
selectionAtomsCounterpart = self.createSlcSelectionString(reference, isBoundComplex, encounter.getBoundCounterpartChain(), encounter.getUnboundCounterpart().getTitle())
anm_slc_counterpart= sliceModel(anm_extend[0], anm_extend[1], selectionAtomsCounterpart)
# Save the models
# saveModel(anm[0],
# filename=self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch),
# matrices=True)
# saveModel(anm_extend[0],
# filename=self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="extended"),
# matrices=True
# )
# saveModel(anm_slc[0],
# filename=self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="slicedback"),
# matrices=True
# )
print "created and saved models"
# print "reference, it is the complex: ", reference.select('calpha and segment "R."').numAtoms()
# print "to slice on, it is the mob_chain: ", ref_chain.numAtoms()
print "anm hessian : " + str(anm[0].getHessian().shape)
print "number of calpha : " + str(reference.select('calpha').numAtoms())
print "anm size : " + str(anm[0].getArray().shape)
print "anm_ext size : " + str(anm_extend[0].getArray().shape)
print "anm_slice size : " + str(anm_slc[0].getArray().shape)
print "selectionAtoms : " + selectionAtoms
if isBoundComplex:
print "anm slice counterpart size: " + str(anm_slc_counterpart[0].getArray().shape)
print "selectionAtoms counterpart: " + selectionAtomsCounterpart
# Save the models"
self._anm = anm
self._anm_extend = anm_extend
self._anm_slc = anm_slc
if isBoundComplex:
self._anm_slc_counterpart = anm_slc_counterpart
else:
#raise Exception("Problem with capturing the selection of saved models, do not use load models from files now.")
try:
# load models
anmModel = loadModel(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch)+".anm.npz")
anm_extendModel = loadModel(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="extended")+".nma.npz")
anm_slcModel = loadModel(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="slicedback")+".nma.npz")
# store models selections
anmModelSelection = reference.select(selstr)
anm_extendModelSelection = reference
selectionAtoms = self.createSlcSelectionString(reference, isBoundComplex, ref_chain)
anm_slcModelSelection = reference.select(selectionAtoms)
# recombine models and selections as tuples
anm = (anmModel, anmModelSelection)
anm_extend = (anm_extendModel, anm_extendModelSelection)
anm_slc = (anm_slcModel, anm_slcModelSelection)
print "loaded models"
print "anm size : " + str(anm[0].getArray().shape)
print "anm_ext size : " + str(anm_extend[0].getArray().shape)
print "anm_slice size: " + str(anm_slc[0].getArray().shape)
print "selectionAtoms: " + selectionAtoms
self._anm = anm
self._anm_extend = anm_extend
self._anm_slc = anm_slc
except IOError as e:
print "Error loading ANM models from disc: "+str(e)
def calcANMsForPart2a2k(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, selstr='calpha', whatAtomsToMatch='calpha'):
# Create the anm of reference, counterpart and proteinComplex)
# print "reference, counterpart, proteinComplex, chain_complex (calphas, calphas*3-6) : ", (reference.select('calpha').numAtoms(), reference.select('calpha').numAtoms()*3 -6), (counterpart.select('calpha').numAtoms(), counterpart.select('calpha').numAtoms()*3-6), (proteinComplex.select('calpha').numAtoms(), proteinComplex.select('calpha').numAtoms()*3-6), (chain_complex.select('calpha').numAtoms(), chain_complex.select('calpha').numAtoms()*3 -6)
# print "anm_reference, anm_counterpart, anm_complex hessian shapes : ", anm_reference[0].getHessian().shape, anm_counterpart[0].getHessian().shape, anm_complex[0].getHessian().shape
# print "anm_reference, anm_counterpart, anm_complex, anm_complex_slc getArray() shapes : ", anm_reference[0].getArray().shape, anm_counterpart[0].getArray().shape, anm_complex[0].getArray().shape, anm_complex_slc[0].getArray().shape
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes/2, selstr, whatAtomsToMatch)
self._anm_counterpart, self._anm_counterpart_slc = self._calcANMsUnified(counterpart, counterpart_chain, numberOfModes/2, selstr, whatAtomsToMatch)
# print "15 ang contact before moving atoms:", proteinComplex.select('same residue as exwithin 15 of segment "L." ').numAtoms()
# self._moveSegment(proteinComplex, "L", 30)
# if proteinComplex.select('same residue as exwithin 15 of segment "L." ') != None:
# print "15 ang contact after moving atoms: ", proteinComplex.select('same residue as exwithin 15 of segment "L." ').numAtoms()
# else:
# print "15 ang contact after moving atoms: 0"
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModes, selstr, whatAtomsToMatch)
#self.utils.testHessianSubMatrices(self._anm_reference, self._anm_counterpart, self._anm_complex)
# check blockmatrix differences and pymol output
# useRelError = True
#significantDifferences = self.utils.testBlockMatrixMembership(self._anm_reference[0].getHessian(), self._anm_counterpart[0].getHessian(), self._anm_complex[0].getHessian(), useRelativeError=useRelError)
#self.utils.whichPatternsAreAffectedbySignificantDifferences(significantDifferences)
# assert reference.getResnums()[0] == proteinComplex.getResnums()[0]
#print self.utils.significantDifferencesToPymolResiduesString(significantDifferences, reference.getResnums()[0])
print "anm_reference_slc, anm_counterpart_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_counterpart_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
def calcANMsUnified(self, reference, counterpart, proteinComplex, numberOfModes, encounter, ref_chain = None, counterpart_chain = None, chain_complex = None, selstr='calpha', whatAtomsToMatch='calpha',):
""" Calculate the ANMs for the NMA. If examinations on the complex, it is assumed (for now) that the reference protein is the receptor. """
if (ref_chain == None) and (counterpart_chain == None) and (chain_complex == None):
self.bound_provided = False
else:
self.bound_provided = True
if self.utils.config.investigationsOn == "Individual" or self.utils.config.investigationsOn == "Complex" :
assert self.utils.config.whichCustomHIndividual == "HC_subvector" or self.utils.config.whichCustomHIndividual == "submatrix" or self.utils.config.whichCustomHIndividual == "canonical"
numberOfModesComplex = min((proteinComplex.select('calpha').numAtoms()*3 - 6), self.utils.config.maxModesToCalculate)
if ref_chain != None:
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, numberOfModes, selstr, whatAtomsToMatch, ref_chain)
else:
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, numberOfModes, selstr, whatAtomsToMatch)
self._anm_counterpart = calcANM(counterpart, n_modes = numberOfModes, selstr = selstr, zeros = True)
if chain_complex != None:
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, numberOfModesComplex, selstr, whatAtomsToMatch, chain_complex)
else:
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, numberOfModesComplex, selstr, whatAtomsToMatch)
# elif self.utils.config.investigationsOn == "Complex":
# numberOfModesComplex = numberOfModes*2
# self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, numberOfModes, selstr, whatAtomsToMatch, ref_chain)
# self._anm_counterpart, self._anm_counterpart_slc = self._calcANMsUnified(counterpart, numberOfModes, selstr, whatAtomsToMatch, counterpart_chain)
# self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, numberOfModesComplex, selstr, whatAtomsToMatch, chain_complex)
print "anm_reference anm_counterpart, anm_complex getArray() shapes : ", self._anm_reference[0].getArray().shape, self._anm_counterpart[0].getArray().shape, self._anm_complex[0].getArray().shape
print "anm_reference_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
# create custom H via U1
if self.utils.config.customH:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
# for now assert that reference is always the receptor
if self.utils.config.investigationsOn == "Complex":
assert self.utils.isReceptor(reference.getTitle())
HCcustomBuild = np.zeros((HC.shape[0], HC.shape[1]))
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.whichCustomHC == "HC_U1" or self.utils.config.whichCustomHC == "HC_U1_1k1k":
HRtildeH_ANew, interCalphaIndicesHR = self.calcCustomH_ANew(HR.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HLtildeH_ANew, interCalphaIndicesHL = self.calcCustomH_ANew(HL.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getReference(), encounter.getUnboundCounterpart(), encounter, False, "r_ij", True, selstr)
elif self.utils.config.whichCustomHC == "HC_0" or self.utils.config.whichCustomHC == "HC_06":
HRtildeH_ANew = HR.copy()
HLtildeH_ANew = HL.copy()
HRL_new = np.zeros(((reference.select('calpha').numAtoms()*3), (counterpart.select('calpha').numAtoms()*3) ))
interCalphaIndicesHR = None
interCalphaIndicesHL = None
print "reference is receptor, shapes of HRtilde, HLtilde, HRL: ", HRtildeH_ANew.shape, HLtildeH_ANew.shape, HRL_new.shape
else:
if self.utils.config.whichCustomHC == "HC_U1":
HRtildeH_ANew, interCalphaIndicesHR = self.calcCustomH_ANew(HR.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HLtildeH_ANew, interCalphaIndicesHL = self.calcCustomH_ANew(HL.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getUnboundCounterpart(), encounter.getReference(), encounter, False, "r_ij", False, selstr)
print "reference is ligand, shapes of HLtilde, HRtilde, HRL: ", HLtildeH_ANew.shape, HRtildeH_ANew.shape, HRL_new.shape
# put the new HRtilde and HLtilde inside HC
HCcustomBuild[:HR.shape[0], :HR.shape[1]] = HRtildeH_ANew
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HLtildeH_ANew
HCcustomBuild[0:HR.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HRL_new
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], 0:HR.shape[1]] = HRL_new.T
# optional assertion to test if HCcustomBuild equals the original HC if k = 1 and d = 15 (default ProDy settings)
if (self.utils.config.whichCustomHC == "HC_U1" and self.utils.config.customHRdistance == 15 and self.utils.config.customForceConstant == 1.0):
# assert np.allclose(HC, HCcustomBuild) # assert this if k = 1, A = 15
print "not asserting HCcustomBuild equals original HC with k1 A15"
# Projection
# def projectHessian(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle, projectOnlyReferencePartOfHC=False, interCalphaIndices=None):
if self.utils.config.projectHessian:
if self.utils.config.investigationsOn == "Individual" or self.utils.config.investigationsOn == "Complex":
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.whichCustomHC == "HC_U1":
if self.utils.config.projectionStyle == "full" or self.utils.config.projectionStyle == "intra":
if self.utils.config.whichCustomHIndividual == "HC_subvector":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle, True, interCalphaIndicesHR)
#HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), proteinComplex, proteinComplex, '', self.utils.config.projectionStyle, False, interCalphaIndicesHR)
elif self.utils.config.whichCustomHIndividual == "submatrix":
HRtildeH_ANew = self.projectHessian(HRtildeH_ANew.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle, False, interCalphaIndicesHR)
elif self.utils.config.projectionStyle == "fixedDomainFrame":
HCcustomBuild = self.transformHessianToFixedDomainFrame(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle)
# else reference is the ligand
else:
if self.utils.config.whichCustomHC == "HC_U1":
if self.utils.config.projectionStyle == "full" or self.utils.config.projectionStyle == "intra":
if self.utils.config.whichCustomHIndividual == "HC_subvector":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "L", self.utils.config.projectionStyle, True, interCalphaIndicesHL)
#HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), proteinComplex, proteinComplex, '', self.utils.config.projectionStyle, False, interCalphaIndicesHL)
elif self.utils.config.whichCustomHIndividual == "submatrix":
HLtildeH_ANew = self.projectHessian(HLtildeH_ANew.copy(), reference, proteinComplex, "L", self.utils.config.projectionStyle, False, interCalphaIndicesHL)
elif self.utils.config.projectionStyle == "fixedDomainFrame":
HCcustomBuild = self.transformHessianToFixedDomainFrame(HCcustomBuild.copy(), reference, proteinComplex, "L", self.utils.config.projectionStyle)
elif self.utils.config.investigationsOn == "Complex":
# project out the rigid body motions of the receptor. if the goal is to project the whole complex, do: HCcustomBuild = self.projectHessian(HCcustomBuild, proteinComplex, proteinComplex, '')
if self.utils.config.projectionStyle == "full" or self.utils.config.projectionStyle == "intra":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle, True, interCalphaIndicesHR)
elif self.utils.config.projectionStyle == "fullComplex":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), proteinComplex, proteinComplex, '', self.utils.config.projectionStyle)
elif self.utils.config.projectionStyle == "fixedDomainFrame":
HCcustomBuild = self.transformHessianToFixedDomainFrame(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle)
else:
raise Exception('unknown projection style')
if self.utils.config.investigationsOn == "Complex" or self.utils.config.whichCustomHIndividual == "HC_subvector":
# Create the custom complex ANM
self._anm_complex_tilde = ANM(self._anm_complex[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_complex_tilde.setHessian(HCcustomBuild)
if self.utils.config.calculateZeroEigvalModes:
if self.utils.config.whichCustomHC == "HC_0" or self.utils.config.whichCustomHC == "HC_06":
numberOfModesComplex += 6
self._anm_complex_tilde.calcModes(n_modes=numberOfModesComplex, zeros=True)
else:
self._anm_complex_tilde.calcModes(n_modes=numberOfModesComplex)
# Extend the self._anm_reference_tilde on all atoms
anm_complex_tilde_extend = extendModel(self._anm_complex_tilde, self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_complex to the matched atoms
self._anm_complex_tilde_slc = sliceModel(anm_complex_tilde_extend[0], anm_complex_tilde_extend[1], selstr)
# Normalize the modes of the sliced anm
self._anm_complex_tilde_slc = self.getNormalizedANM(self._anm_complex_tilde_slc)
# Replace the complex anm and the complex_slc anm with the modified ANMs
print "Replacing ANM H with ANM Htilde for the complex"
self._anm_complex = (self._anm_complex_tilde, self._anm_complex[1])
self._anm_complex_slc = self._anm_complex_tilde_slc
# modify HR to have the sliced part of HC_tilde
if self.utils.config.investigationsOn == "Individual" or self.utils.config.investigationsOn == "Complex":
if self.utils.config.whichCustomHIndividual == "HC_subvector":
Marray = self.utils.sliceComplexModestoMatchProtein(self._anm_complex[0].getArray(), reference, encounter.getReferenceSegment())
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_reference_tilde.setEigens(Marray, self._anm_complex[0].getEigvals())
self._anm_reference_tilde = (self._anm_reference_tilde, self._anm_reference[1])
self._anm_reference_tilde = self.getNormalizedANM(self._anm_reference_tilde)
# submatrix, take the new HRtilde/HLtilde, re-calculate its modes and replace the previous ANM
elif self.utils.config.whichCustomHIndividual == "submatrix":
if self.utils.isReceptor(reference.getTitle()):
submatrix = HRtildeH_ANew
else:
submatrix = HLtildeH_ANew
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_reference_tilde.setHessian(submatrix)
if self.utils.config.calculateZeroEigvalModes:
self._anm_reference_tilde.calcModes(n_modes=numberOfModes, zeros=True)
else:
self._anm_reference_tilde.calcModes(n_modes=numberOfModes)
self._anm_reference_tilde = (self._anm_reference_tilde, self._anm_reference[1])
# Extend the self._anm_reference_tilde on all atoms
anm_reference_tilde_extend = extendModel(self._anm_reference_tilde[0], self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
self._anm_reference_tilde_slc = sliceModel(anm_reference_tilde_extend[0], anm_reference_tilde_extend[1], selstr)
self._anm_reference_tilde_slc = self.getNormalizedANM(self._anm_reference_tilde_slc)
# Replace reference and reference_slc with the modified ANMs
print "Replacing ANM H with ANM Htilde for the reference"
self._anm_reference = self._anm_reference_tilde
self._anm_reference_slc = self._anm_reference_tilde_slc
def calcANMsForPart2b2k(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha'):
""" Unbound complex to bound complex NMA, it is assumed that the reference is the receptor and is the first object in the complex pdb file
This method creates self.* NMA objects
Args:
reference: the receptor protein
counterpart: the ligand protein
proteinComplex: the protein complex
ref_chain: the matched part of the reference
counterpart_chain: the matched part of the counterpart
chain_complex: the matched part on the complex
numberOfModes: the 2k number of modes
encounter: object aggregating proteins
selstr: the selection string for the NMA, course grained is calpha
"""
# Create the anm of reference, counterpart and proteinComplex)
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes/2, selstr, whatAtomsToMatch)
self._anm_counterpart, self._anm_counterpart_slc = self._calcANMsUnified(counterpart, counterpart_chain, numberOfModes/2, selstr, whatAtomsToMatch)
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModes, selstr, whatAtomsToMatch)
print "anm_reference anm_counterpart, anm_complex getArray() shapes : ", self._anm_reference[0].getArray().shape, self._anm_counterpart[0].getArray().shape, self._anm_complex[0].getArray().shape
print "anm_reference_slc, anm_counterpart_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_counterpart_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
# modify the hessians
if self.utils.config.customH:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
# for now assert that reference is always the receptor, in case of complex investigation
assert self.utils.isReceptor(reference.getTitle())
HCcustomBuild = np.zeros((HC.shape[0], HC.shape[1]))
if self.utils.config.whichCustomHC == "HC_U1":
# create the complex hessian with interactions on the off diagonal using U1
print "HC_U1"
HRtildeH_ANew = self.calcCustomH_ANew(HR.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HLtildeH_ANew = self.calcCustomH_ANew(HL.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getReference(), encounter.getUnboundCounterpart(), encounter, False, "r_ij", True, selstr)
elif self.utils.config.whichCustomHC == "HC_0" or self.utils.config.whichCustomHC == "HC_06":
# create the hessian by just using canonical HR and HL and offmatrices zero
print "HC_0 or HC_06"
HRtildeH_ANew = HR.copy()
HLtildeH_ANew = HL.copy()
HRL_new = np.zeros(((reference.select('calpha').numAtoms()*3), (counterpart.select('calpha').numAtoms()*3) ))
print "reference is receptor, shapes of HRtilde, HLtilde, HRL: ", HRtildeH_ANew.shape, HLtildeH_ANew.shape, HRL_new.shape
print "finished projecting H, anm_reference_tilde calc modes"
# put the new HRtilde and HLtilde inside HC
HCcustomBuild[:HR.shape[0], :HR.shape[1]] = HRtildeH_ANew
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HLtildeH_ANew
HCcustomBuild[0:HR.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HRL_new
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], 0:HR.shape[1]] = HRL_new.T
#if self.utils.config.whichCustomHC == "HC_U1":
# assert np.allclose(HC, HCcustomBuild) # assert this if k = 1, A = 15
# print "asserted HC with k1 A 15"
if self.utils.config.projectHessian:
HCcustomBuild = self.projectHessian(HCcustomBuild, proteinComplex, proteinComplex, '')
# make HC anm
self._anm_complex_tilde = ANM(self._anm_complex[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_complex_tilde.setHessian(HCcustomBuild)
self._anm_complex_tilde.calcModes(n_modes=numberOfModes)
# Extend the self._anm_reference_tilde on all atoms
anm_complex_tilde_extend = extendModel(self._anm_complex_tilde, self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_complex to the matched atoms
self._anm_complex_tilde_slc = sliceModel(anm_complex_tilde_extend[0], anm_complex_tilde_extend[1], chain_complex.getSelstr())
# Replace the complex anm and the complex_slc anm with the modified ANMs
print "Replacing ANM H with ANM Htilde for the complex"
self._anm_complex = (self._anm_complex_tilde, self._anm_complex[1])
self._anm_complex_slc = self._anm_complex_tilde_slc
def calcANMsForPart2b(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha'):
""" Create the ANMs of the reference, counterpart and complex objects. If set in config, project the hessian matrix of the reference
to ensure 6 zero eigenvalue modes, see formula 8.27 from the book "A practical introduction to the simulation of molecular dynamics", Field. """
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes, selstr, whatAtomsToMatch)
self._anm_counterpart = calcANM(counterpart, selstr=selstr)
# self._moveSegment(proteinComplex, "L", 50)
numberOfModesComplex = min((proteinComplex.select('calpha').numAtoms()*3 - 6), self.utils.config.maxModesToCalculate)
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModesComplex, selstr, whatAtomsToMatch)
# project hessian matrix
if self.utils.config.projectHessian:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
##
#writeArray("HRtildefromHC.txt", HRtilde, format='%f')
#writeArray("HLtildefromHC.txt", HLtilde, format='%f')
##
# Create the tilde ANM
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_tilde")
# Here the PH'P treatment for the hessian matrix from the normal modes book by Field
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.modifyHDelta:
print "modifying HR with deltaHR"
HRtilde = self.addscaledHdelta(HR, HRtilde, self.utils.config.deltamultiplicatorForH)
# if using terms with true bound structure second derivation parts r_{ij}-r_{ij}^{2}
if self.utils.config.customHR_A:
#writeArray("originalHR.txt", self._anm_reference[0].getHessian(), format='%f')
HRtilde = self.calcCustomH_A_NeighborsBound(self._anm_reference[0].getHessian(), encounter, selstr)
#writeArray("customHRtilde.txt", HRtilde, format='%f')
print "reference is receptor, shape of HRtilde: ", HRtilde.shape
HRtilde = self.projectHessian(HRtilde, reference, proteinComplex, encounter.getReferenceSegment())
self._anm_reference_tilde.setHessian(HRtilde)
else:
if self.utils.config.modifyHDelta:
print "modifying HL with deltaHL"
HLtilde = self.addscaledHdelta(HL, HLtilde, self.utils.config.deltamultiplicatorForH)
# if using terms with true bound structure second derivation parts r_{ij}-r_{ij}^{2}
if self.utils.config.customHR_A:
#writeArray("originalHL.txt", self._anm_reference[0].getHessian(), format='%f')
HLtilde = self.calcCustomH_A_NeighborsBound(self._anm_reference[0].getHessian(), encounter, selstr)
#writeArray("customHLtilde.txt", HLtilde, format='%f')
print "reference is ligand, shape of HLtilde: ", HLtilde.shape
HLtilde = self.projectHessian(HLtilde, reference, proteinComplex, encounter.getReferenceSegment())
self._anm_reference_tilde.setHessian(HLtilde)
print "finished projecting H, anm_reference_tilde calc modes"
# testing of projected eigenvals
self._anm_reference_tilde.calcModes(n_modes=numberOfModes)
#print "HR eigenvals: ", self._anm_reference[0].getEigvals()[0:10]
#print "HRtilde eigenvals: ", self._anm_reference_tilde.getEigvals()[0:10]
# Extend the self._anm_reference_tilde on all atoms
anm_reference_tilde_extend = extendModel(self._anm_reference_tilde, self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
self._anm_reference_tilde_slc = sliceModel(anm_reference_tilde_extend[0], anm_reference_tilde_extend[1], ref_chain.getSelstr())
# Replace reference and reference_slc with the modified ANMs
print "Replacing ANM H with ANM Htilde for the reference"
self._anm_reference = (self._anm_reference_tilde, self._anm_reference[1])
self._anm_reference_slc = self._anm_reference_tilde_slc
if self.utils.config.HR1kHRtilde1k:
self._anm_reference_original, self._anm_reference_slc_original = self._calcANMsUnified(reference, ref_chain, numberOfModes, selstr, whatAtomsToMatch)
def calcANMsForPart2bIndividualProtein_U1(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha'):
""" Create the ANMs of the reference, counterpart and complex objects. If set in config, project the hessian matrix of the reference
to ensure 6 zero eigenvalue modes, see formula 8.27 from the book "A practical introduction to the simulation of molecular dynamics", Field. """
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes, selstr, whatAtomsToMatch)
self._anm_counterpart = calcANM(counterpart, selstr=selstr)
# self._moveSegment(proteinComplex, "L", 50)
numberOfModesComplex = min((proteinComplex.select('calpha').numAtoms()*3 - 6), self.utils.config.maxModesToCalculate)
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModesComplex, selstr, whatAtomsToMatch)
###
print "anm_reference anm_counterpart, anm_complex getArray() shapes : ", self._anm_reference[0].getArray().shape, self._anm_counterpart[0].getArray().shape, self._anm_complex[0].getArray().shape
print "anm_reference_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
# create custom H via U1
if self.utils.config.customH:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
# for now assert that reference is always the receptor
HCcustomBuild = np.zeros((HC.shape[0], HC.shape[1]))
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.customHR_A:
#HR, referenceStructure, neighborStructure, encounter, neighborhoodFrom, equilibriumAt, workOnReceptor=True, selstr='calpha'
HRtildeH_ANew = self.calcCustomH_ANew(HR.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HLtildeH_ANew = self.calcCustomH_ANew(HL.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getReference(), encounter.getUnboundCounterpart(), encounter, False, "r_ij", True, selstr)
print "reference is receptor, shapes of HRtilde, HLtilde, HRL: ", HRtildeH_ANew.shape, HLtildeH_ANew.shape, HRL_new.shape
else:
if self.utils.config.customHR_A:
HRtildeH_ANew = self.calcCustomH_ANew(HR.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HLtildeH_ANew = self.calcCustomH_ANew(HL.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getUnboundCounterpart(), encounter.getReference(), encounter, False, "r_ij", False, selstr)
print "reference is ligand, shapes of HLtilde, HRtilde, HRL: ", HLtildeH_ANew.shape, HRtildeH_ANew.shape, HRL_new.shape
print "finished projecting H, anm_reference_tilde calc modes"
# put the new HRtilde and HLtilde inside HC
HCcustomBuild[:HR.shape[0], :HR.shape[1]] = HRtildeH_ANew
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HLtildeH_ANew
HCcustomBuild[0:HR.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HRL_new
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], 0:HR.shape[1]] = HRL_new.T
#assert np.allclose(HC, HCcustomBuild)
#sys.exit()
# Project the reference part in the HCcustomBuild matrix
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.customHR_A:
HCcustomBuildprojected = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "R", True)
else:
if self.utils.config.customHR_A:
HCcustomBuildprojected = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "L", True)
# Create the custom complex ANM
self._anm_complex_tilde = ANM(self._anm_complex[0].getTitle()+"_tilde")
self._anm_complex_tilde.setHessian(HCcustomBuildprojected)
if self.utils.config.enforceAllModesAfterProjection:
self._anm_complex_tilde.calcModes(n_modes=numberOfModes, zeros=True)
else:
self._anm_complex_tilde.calcModes(n_modes=numberOfModes)
# Extend the self._anm_reference_tilde on all atoms
anm_complex_tilde_extend = extendModel(self._anm_complex_tilde, self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_complex to the matched atoms
self._anm_complex_tilde_slc = sliceModel(anm_complex_tilde_extend[0], anm_complex_tilde_extend[1], chain_complex.getSelstr())
# Replace the complex anm and the complex_slc anm with the modified ANMs
print "Replacing ANM H with ANM Htilde for the complex"
self._anm_complex = (self._anm_complex_tilde, self._anm_complex[1])
self._anm_complex_slc = self._anm_complex_tilde_slc
# Create custom anm for reference
if self.utils.config.enforceAllModesAfterProjection:
Marray = self.utils.sliceComplexModestoMatchProtein(self._anm_complex[0].getArray()[:,6:], reference, encounter.getReferenceSegment())
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_tilde")
self._anm_reference_tilde.setEigens(Marray, self._anm_complex[0].getEigvals()[6:])
else:
Marray = self.utils.sliceComplexModestoMatchProtein(self._anm_complex[0].getArray(), reference, encounter.getReferenceSegment())
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_tilde")
self._anm_reference_tilde.setEigens(Marray, self._anm_complex[0].getEigvals())
# Extend the self._anm_reference_tilde on all atoms
anm_reference_tilde_extend = extendModel(self._anm_reference_tilde, self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
self._anm_reference_tilde_slc = sliceModel(anm_reference_tilde_extend[0], anm_reference_tilde_extend[1], ref_chain.getSelstr())
#
# try modes comparison
# ranges = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 30, 40, 50, 60, 70]
#
# try:
# subspaceOverlaps = []
# for val in ranges:
# subspaceOverlaps.append(calcSubspaceOverlap(self._anm_reference[0][0:val], self._anm_reference_tilde[0:val]))
# encounter.storeSubSpaceOverlaps(subspaceOverlaps, ranges)
# except Exception:
# sys.exc_clear()
#
# try:
# MarrayNormed = self.utils.normalized(Marray.copy(), axis=0)
# anm_reference_tilde_normed = ANM(self._anm_reference[0].getTitle()+"_tildenormed")
# anm_reference_tilde_normed.setEigens(MarrayNormed, self._anm_complex[0].getEigvals())
# covarianceOverlaps = []
# for val in ranges:
# covarianceOverlaps.append(calcCovOverlap(self._anm_reference[0][0:val], anm_reference_tilde_normed[0:val]))
# encounter.storeCovarianceOverlap(covarianceOverlaps, ranges)
# except Exception, err:
# #sys.exc_clear()
# print "Exception covarianceoverlap occurred: ", err
# print traceback.format_exc()
#
# try:
# overlapTable = getOverlapTable(self._anm_reference[0], self._anm_reference_tilde)
# encounter.storeOverlapTable(overlapTable)
# except Exception:
# sys.exc_clear()
#
# Replace reference and reference_slc with the modified ANMs
print "Replacing ANM H with ANM Htilde for the reference"
self._anm_reference = (self._anm_reference_tilde, self._anm_reference[1])
self._anm_reference_slc = self._anm_reference_tilde_slc
def _calcANMsUnified(self, reference, numberOfModes, selstr='calpha', whatAtomsToMatch='calpha', direct_call = None, ref_chain = None):
# Create the anm of the reference
#writePDB(reference.getTitle()+"forANMmoved.pdb", reference)
if self.utils.config.calculateZeroEigvalModes == True:
anm_reference = calcANM(reference, n_modes=numberOfModes, selstr=selstr, zeros=True)
else:
anm_reference = calcANM(reference, n_modes=numberOfModes, selstr=selstr)
# Extend the anm_reference on all atoms
anm_reference_extend = extendModel(anm_reference[0], anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
if direct_call == None:
if self.bound_provided == True:
anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], ref_chain.getSelstr())
else:
anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], selstr)
else:
anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], selstr)
# Normalize the slices anm
anm_reference_slc = self.getNormalizedANM(anm_reference_slc)
if direct_call == True:
self._anm_reference = anm_reference
self._anm_reference_slc = anm_reference_slc
else:
return anm_reference, anm_reference_slc
def getNormalizedANM(self, anm):
""" Normalize the modes of the anm and return this anm object
Args:
anm: the anm with modes calculated
Returns: anm with normalized modes
"""
M = self.normalizeM(anm[0].getArray())
eigenvals = anm[0].getEigvals()
anm[0].setEigens(M, eigenvals)
return anm
def _moveSegment(self, reference, segment, angstrom):
""" Move all atoms x,y,z, belonging to the segment the number in angstrom """
print "15 ang contact before moving atoms:", reference.select('same residue as exwithin 15 of segment "L." ').numAtoms()
ref_select = reference.select('segment \"'+segment+'.\"')
ref_select.setCoords(ref_select.getCoords()+angstrom)
if reference.select('same residue as exwithin 15 of segment "L." ') != None:
print "15 ang contact after moving atoms: ", reference.select('same residue as exwithin 15 of segment "L." ').numAtoms()
else:
print "15 ang contact after moving atoms: 0"
def replaceReferenceANMs(self, anm_new, reference, ref_chain = None):
""" Replace the anm of reference with anm_new and normalize along the way.
Args:
anm_new: the new ANM
reference: the protein the ANM was created on
ref_chain: the matched chains of reference
Result:
replaced self._anm_reference and self._anm_reference_slc based on normalized anm_new
"""
self._anm_reference = anm_new
self._anm_reference = self.getNormalizedANM(self._anm_reference)
# Extend the self._anm_reference_tilde on all atoms
anm_reference_extend = extendModel(self._anm_reference[0], self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
if ref_chain != None:
self._anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], ref_chain.getSelstr())
else:
self._anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], 'calpha')
self._anm_reference_slc = self.getNormalizedANM(self._anm_reference_slc)
def replaceComplexANMs(self, anm_new, proteinComplex, complex_chain = None):
""" Replace the anm of the complex with anm_new and normalize along the way.
Args:
anm_new: the new ANM
proteinComplex: the complex that the ANM was created on
complex_chain: the matched chains of the complex
Result:
replaced self._anm_complex and self._anm_complex_slc based on normalized anm_new
"""
self._anm_complex = anm_new
self._anm_complex = self.getNormalizedANM(self._anm_complex)
# Extend the self.self._anm_complex_tilde on all atoms
anm_complex_extend = extendModel(self._anm_complex[0], self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_reference to the matched
if complex_chain != None:
self._anm_complex_slc = sliceModel(anm_complex_extend[0], anm_complex_extend[1], complex_chain.getSelstr())
else:
self._anm_complex_slc = sliceModel(anm_complex_extend[0], anm_complex_extend[1], complex_chain.getSelstr())
self._anm_complex_slc = self.getNormalizedANM(self._anm_complex_slc)
def calcANMSlcInterface(self, ref_chain_interface, reference, titleOfReferenceSingleProtein, isBoundComplex=False):
self._anm_slc_interface = self.getSlicedInterfaceANM(self.getANMExtend(), ref_chain_interface, reference, titleOfReferenceSingleProtein, isBoundComplex)
def getSlicedInterfaceANM(self, anm_ext, ref_chain_interface, reference, titleOfReferenceSingleProtein, isBoundComplex=False):
selectionAtoms = self.createSlcSelectionString(reference, isBoundComplex, ref_chain_interface, titleOfReferenceSingleProtein)
anm_slc_interface = sliceModel(anm_ext[0], anm_ext[1], selectionAtoms)
return anm_slc_interface
def calcInterfaceANMsforPart2a2k(self, encounter):
self._anm_reference_slc_interface = self._slicedInterfaceANMs(self._anm_reference, encounter.getMobile(), encounter.getMobChainInterface())
self._anm_counterpart_slc_interface = self._slicedInterfaceANMs(self._anm_counterpart, encounter.getBoundCounterpart(), encounter.getBoundCounterpartChainInterface())
self._anm_boundcomplex_slc_interface = self._slicedInterfaceANMs(self._anm_complex, encounter.boundComplex.complex , encounter.getBoundComplexChainInterface())
assert (self._anm_reference_slc_interface[1].numAtoms()
+ self._anm_counterpart_slc_interface[1].numAtoms()
== self._anm_boundcomplex_slc_interface[1].numAtoms())
for i in range(0, self._anm_reference_slc_interface[1].numAtoms()):
assert self._anm_reference_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][i].getResname()
assert np.alltrue(self._anm_reference_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][i].getCoords())
assert self._anm_reference_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][i].getName()
offsetAtoms = self._anm_reference_slc_interface[1].numAtoms()
for i in range(0, self._anm_counterpart_slc_interface[1].numAtoms()):
j = i + offsetAtoms
assert self._anm_counterpart_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][j].getResname()
assert np.alltrue(self._anm_counterpart_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][j].getCoords())
assert self._anm_counterpart_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][j].getName()
def calcInterfaceANMsUnified(self, reference, counterpart, proteinComplex, ref_chain_interface, counterpart_chain_interface, complex_chain_interface):
""" Calculate (slice) the ANMs according to the interfaces on prot1, prot2 and their complex representation.
Args:
reference: prot1
counterpart: prot2
proteinComplex: prot1 and prot2 as one parsed object
ref_chain_interface: interface of prot1
counterpart_chain_interface: interface of prot2
complex_chain_interface: interface of the proteinComplex
"""
self._anm_reference_slc_interface = self._slicedInterfaceANMs(self._anm_reference, reference, ref_chain_interface)
self._anm_counterpart_slc_interface = self._slicedInterfaceANMs(self._anm_counterpart, counterpart, counterpart_chain_interface)
self._anm_boundcomplex_slc_interface = self._slicedInterfaceANMs(self._anm_complex, proteinComplex, complex_chain_interface)
# normalize modes
self._anm_reference_slc_interface = self.getNormalizedANM(self._anm_reference_slc_interface)
self._anm_counterpart_slc_interface = self.getNormalizedANM(self._anm_counterpart_slc_interface)
self._anm_boundcomplex_slc_interface = self.getNormalizedANM(self._anm_boundcomplex_slc_interface)
assert (self._anm_reference_slc_interface[1].numAtoms()
+ self._anm_counterpart_slc_interface[1].numAtoms()
== self._anm_boundcomplex_slc_interface[1].numAtoms())
assertANMAtomEquality = False
if assertANMAtomEquality:
if self.utils.isReceptor(reference.getTitle()):
for i in range(0, self._anm_reference_slc_interface[1].numAtoms()):
# print i, self._anm_reference_slc_interface[1][i].getCoords(), self._anm_boundcomplex_slc_interface[1][i].getCoords()
assert self._anm_reference_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][i].getResname()
assert np.alltrue(self._anm_reference_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][i].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_reference_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][i].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_reference_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][i].getName()
offsetAtoms = self._anm_reference_slc_interface[1].numAtoms()
for i in range(0, self._anm_counterpart_slc_interface[1].numAtoms()):
j = i + offsetAtoms
assert self._anm_counterpart_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][j].getResname()
assert np.alltrue(self._anm_counterpart_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][j].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_counterpart_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][j].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_counterpart_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][j].getName()
else:
offsetAtoms = self._anm_counterpart_slc_interface[1].numAtoms()
for i in range(0, self._anm_reference_slc_interface[1].numAtoms()):
j = i + offsetAtoms
# print i, self._anm_reference_slc_interface[1][i].getCoords(), self._anm_boundcomplex_slc_interface[1][i].getCoords()
assert self._anm_reference_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][j].getResname()
assert np.alltrue(self._anm_reference_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][j].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_reference_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][j].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_reference_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][j].getName()
for i in range(0, self._anm_counterpart_slc_interface[1].numAtoms()):
assert self._anm_counterpart_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][i].getResname()
assert np.alltrue(self._anm_counterpart_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][i].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_counterpart_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][i].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_counterpart_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][i].getName()
def _slicedInterfaceANMs(self, anm, reference, interface):
""" Slice an anm to match the provided interface.
Args:
anm: the anm to be sliced
reference: the protein that the anm is based upon, necessary for extention of the model first
interface: the interface of the protein
"""
anm_ext = extendModel(anm[0], anm[1], reference, norm=True)
anm_slc = sliceModel(anm_ext[0], anm_ext[1], interface.getSelstr())
anm_slc = self.getNormalizedANM(anm_slc)
return anm_slc
def getANM(self):
""" Get the ANM calculated on the reference (default) calpha atoms. """
if self._anm == None:
raise Exception('self._anm == None')
return self._anm
def getANMExtend(self):
""" Get the ANM extended to the whole reference (all atoms). """
if self._anm_extend == None:
raise Exception('self._anm == None')
return self._anm_extend
def getANMSlc(self):
""" Get the sliced back ANM to match all atoms in the ref_chain."""
if self._anm_slc == None:
raise Exception('self._anm_slc == None')
return self._anm_slc
def getANMSlcCounterpart(self):
""" Get the sliced back ANM to match all atoms in the counterpart chain(s) """
if self._anm_slc_counterpart == None:
raise Exception('self._anm_slc == None')
return self._anm_slc_counterpart
def getANMSlcInterface(self):
""" Get the sliced back ANM to match all atoms in the ref_chain_interface. """
if self._anm_slc_interface == None:
raise Exception('self._anm_slc_interface == None')
return self._anm_slc_interface
def getANMComplexSlc(self):
""" Get the sliced back ANM to match all atoms in the chain_complex. """
if self._anm_complex_slc == None:
raise Exception('self._anm_complex_slc == None')
return self._anm_complex_slc
def getANMReference2a2kSlc(self):
""" Get the sliced back self._anm_reference_slc ANM to match all atoms in the reference variable. """
if self._anm_reference_slc == None:
raise Exception('self._anm_reference_slc == None')
return self._anm_reference_slc
def getANMCounterpart2a2kSlc(self):
""" Get the sliced back self._anm_counterpart_slc ANM to match all atoms in the counterpart variable. """
if self._anm_counterpart_slc == None:
raise Exception('self._anm_counterpart_slc == None')
return self._anm_counterpart_slc
def getANMReference(self):
if self._anm_reference == None:
raise Exception('self._anm_reference == None')
return self._anm_reference
def getANMReferenceSlc(self):
if self._anm_reference_slc == None:
raise Exception('self._anm_reference_slc == None')
return self._anm_reference_slc
def getANMCounterpart(self):
if self._anm_counterpart == None:
raise Exception('self._anm_counterpart == None')
return self._anm_counterpart
def getANMComplex(self):
if self._anm_complex == None:
raise Exception('self._anm_complex == None')
return self._anm_complex
def getANMReferenceSlcInterface(self):
if self._anm_reference_slc_interface == None:
raise Exception('self._anm_reference_slc_interface == None')
return self._anm_reference_slc_interface
def getANMCounterpartSlcInterface(self):
if self._anm_counterpart_slc_interface == None:
raise Exception('self._anm_counterpart_slc_interface == None')
return self._anm_counterpart_slc_interface
def getANMComplexSlcInterface(self):
if self._anm_boundcomplex_slc_interface == None:
raise Exception('self._anm_boundcomplex_slc_interface == None')
return self._anm_boundcomplex_slc_interface
def getANMPath(self, reference, numberOfModes, selstr, whatAtomsToMatch, modified=""):
path = self.utils.config.anmPath
prefix = reference.getTitle()
prefix = prefix.replace(" ", "_")
if modified == "":
return path+prefix+"_modes"+str(numberOfModes)+"_buildOn"+selstr+"_matchedOn"+whatAtomsToMatch
elif modified == "extended":
return path+"extended/"+prefix+"_modes"+str(numberOfModes)+"_buildOn"+selstr+"_matchedOn"+whatAtomsToMatch+"_extended"
elif modified == "slicedback":
return path+"slicedback/"+prefix+"_modes"+str(numberOfModes)+"_buildOn"+selstr+"_matchedOn"+whatAtomsToMatch+"_slicedback"
else:
raise Exception("the variable modified is not the empty string, extended or slicedback.")
def doesANMExist(self, reference, numberOfModes, selstr, whatAtomsToMatch, modified=""):
path = self.utils.config.anmPath
try:
with open(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified)+".anm.npz"):
return True
except IOError:
return False
def projectHessian(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle, projectOnlyReferencePartOfHC=False, interCalphaIndices=None):
""" Return the PH'P hessian which has 6 zero eigenvalues according to the formula 8.27
from the book "A practical introduction to the simulation of molecular dynamics", Field.
However, here it is made sure that the assumed basis is orthonormal via np.linalg.qr applied
on the six vectors discussed in this book.
Args:
hessian: the hessian to be projected
reference: the protein the hessian or HRtilde/HLtilde of the hessian was created on
proteinComplex: the whole protein that reference is part of
referenceSegment: if reference is receptor, provide "R", else it needs to be ligand, provide "L"
projectionStyle: project away from "full" (intra+inter) or "intra" (intra) or "fullComplex"
pojectOnlyReferencePartOfHC: if true, the hessian was created on reference, if false, HRtilde or HLtilde
of the hessian were created on the reference
interCalphaIndices: list of calphas indices that have intermolecular interactions
Returns: projected hessian with 6 external degrees of freedom (rotation and translation) removed
"""
assert projectionStyle == "full" or projectionStyle == "intra" or projectionStyle == "fullComplex"
normalize = True
numAtoms = reference.select('calpha').numAtoms()
numCoords = numAtoms*3
centerOfCoords = calcCenter(reference.select('calpha'))
assert np.alltrue(centerOfCoords == calcCenter(proteinComplex.select('segment \"'+referenceSegment+'.\"').select('calpha')))
print "before projection symmetry ==, allclose: ", np.all(hessian-hessian.T==0), np.allclose(hessian, hessian.T)
if projectOnlyReferencePartOfHC:
numComplexAtoms = proteinComplex.select('calpha').numAtoms()
numComplexCoords = numComplexAtoms*3
numCounterpartCoords = numComplexCoords - numCoords
if referenceSegment == "R":
assert numCounterpartCoords == proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3
else:
assert numCounterpartCoords == proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
# Create null vector with length of the counterpart calphas
counterPartNullVector = np.zeros(numCounterpartCoords)
# Create I
I = np.identity(numCoords)
# Create the three translation vectors Tx, Ty, Tz
Tx = np.zeros(numCoords)
Tx = self.utils.fill3DArrayWithValue(Tx, 1.0, 0)
Ty = np.zeros(numCoords)
Ty = self.utils.fill3DArrayWithValue(Ty, 1.0, 1)
Tz = np.zeros(numCoords)
Tz = self.utils.fill3DArrayWithValue(Tz, 1.0, 2)
# Create the three rotation vectors Rx, Ry, Rz
coordsCopy = reference.select('calpha').getCoords().copy()
Rx = self.utils.createRx(coordsCopy)
coordsCopy2 = reference.select('calpha').getCoords().copy()
Ry = self.utils.createRy(coordsCopy2)
coordsCopy3 = reference.select('calpha').getCoords().copy()
Rz = self.utils.createRz(coordsCopy3)
# remove inter atoms from projection
if projectionStyle == "intra":
Tx = self.removeInterAtoms(Tx, interCalphaIndices)
Ty = self.removeInterAtoms(Ty, interCalphaIndices)
Tz = self.removeInterAtoms(Tz, interCalphaIndices)
Rx = self.removeInterAtoms(Rx, interCalphaIndices)
Ry = self.removeInterAtoms(Ry, interCalphaIndices)
Rz = self.removeInterAtoms(Rz, interCalphaIndices)
if projectOnlyReferencePartOfHC:
# overwrite previous I
I = np.identity(numComplexCoords)
# extend (with the nullvector) the rotational and translational vectors to the dimension of the complex
if referenceSegment == "R":
Tx = np.concatenate((Tx, counterPartNullVector))
Ty = np.concatenate((Ty, counterPartNullVector))
Tz = np.concatenate((Tz, counterPartNullVector))
Rx = np.concatenate((Rx, counterPartNullVector))
Ry = np.concatenate((Ry, counterPartNullVector))
Rz = np.concatenate((Rz, counterPartNullVector))
else:
Tx = np.concatenate((counterPartNullVector, Tx))
Ty = np.concatenate((counterPartNullVector, Tz))
Tz = np.concatenate((counterPartNullVector, Tz))
Rx = np.concatenate((counterPartNullVector, Rx))
Ry = np.concatenate((counterPartNullVector, Ry))
Rz = np.concatenate((counterPartNullVector, Rz))
# Normalize translation vectors and apply rotational fix
if normalize:
Tx = Vector(Tx)
#Tx = self.subtractCenterOfCoords(Tx, centerOfCoords[0], 0.0, 0.0)
Tx = Tx.getNormed().getArray()
Ty = Vector(Ty)
#Ty = self.subtractCenterOfCoords(Ty, 0.0, centerOfCoords[1], 0.0)
Ty = Ty.getNormed().getArray()
Tz = Vector(Tz)
#Tz = self.subtractCenterOfCoords(Tz, 0.0, 0.0, centerOfCoords[2])
Tz = Tz.getNormed().getArray()
Rx = Vector(Rx)
#Rx = self.subtractCenterOfCoords(Rx, 0.0, centerOfCoords[2], centerOfCoords[1])
Rx = Rx.getNormed().getArray()
Ry = Vector(Ry)
#Ry = self.subtractCenterOfCoords(Ry, centerOfCoords[2], 0.0, centerOfCoords[0])
Ry = Ry.getNormed().getArray()
Rz = Vector(Rz)
#Rz = self.subtractCenterOfCoords(Rz, centerOfCoords[1], centerOfCoords[0], 0.0)
Rz = Rz.getNormed().getArray()
# Create P
#P = I - np.outer(Rx, Rx) - np.outer(Ry, Ry) - np.outer(Rz, Rz) - np.outer(Tx, Tx) - np.outer(Ty, Ty) - np.outer(Tz, Tz)
### corres P = I - P
#print "independent columns P: ", self.utils.independent_columns(P).shape
#print "matrix rank P: ", self.utils.matrixrank(P)
#print "independent columns I-P: ", self.utils.independent_columns(I-P).shape
#print "matrix rank I-P: ", self.utils.matrixrank(I-P)
#print "np matrix rank I-P : ", np.linalg.matrix_rank(I-P)
#print "np matrix as matrix rank I-P : ", np.linalg.matrix_rank(np.matrix(I-P))
assumedBasis = np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T
MyQ, MyR = np.linalg.qr(assumedBasis)
#print "MyQ.shape: ", MyQ.shape
Rx = MyQ.T[0]
Ry = MyQ.T[1]
Rz = MyQ.T[2]
Tx = MyQ.T[3]
Ty = MyQ.T[4]
Tz = MyQ.T[5]
###
print "before full projection"
###
P = I - np.outer(Rx, Rx) - np.outer(Ry, Ry) - np.outer(Rz, Rz) - np.outer(Tx, Tx) - np.outer(Ty, Ty) - np.outer(Tz, Tz)
#print "assumedBasis : \n", assumedBasis.round(4)
#print "basis after QR: \n", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4)
#writeArray("assumedBasis.txt", assumedBasis.round(4), format="%f")
#writeArray("basis_after_QR.txt", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4), format="%f")
###
#print "P", P
# print "P.shape", P.shape
# print "symmetric P: ", np.allclose(P, P.T)
# print "complex calphas * 3: ", proteinComplex.select('calpha').numAtoms() * 3
# print "rank of P projection", projectionStyle, ": ", np.linalg.matrix_rank(np.matrix(P))
# P_eigenvals, P_eigenvecs = np.linalg.eigh(P)
# print "number of P_eigenvals: ", len(P_eigenvals)
# #print "P_eigenvals: ", P_eigenvals
# print "number of P_eigenvecs: ", len(P_eigenvecs)
# #print "P_eigenvecs: ", P_eigenvecs
# #writeArray("helperScripts/"+proteinComplex.getTitle()+"_P_"+projectionStyle+".txt", P, format='%10.7f')
# #writeArray("P_eigenvals"+projectionStyle+".txt", P_eigenvals, format='%10.7f')
# #writeArray("P_eigenvecs"+projectionStyle+".txt", P_eigenvecs, format='%10.7f')
#
# P_times_Peigenvecs = P.dot(P_eigenvecs)
# P_times_Peigenvecs_T = P.dot(P_eigenvecs).T
# P_orthonormalityTest = P_times_Peigenvecs_T.dot(P_times_Peigenvecs)
# #writeArray("P_orthonormalityTest"+projectionStyle+".txt", P_orthonormalityTest, format='%10.7f')
# # does this P_orthonormalityTest equal the identity matrix or part of it?
# print "P_orthonormalityTest: ", np.allclose(P_orthonormalityTest, np.identity(len(P_eigenvecs)))
# print "P_orthonormalityTest w/o upper 6x6: ", np.allclose(P_orthonormalityTest[6:,6:], np.identity(len(P_eigenvecs)-6))
# zeroM = np.zeros((len(P_eigenvecs), len(P_eigenvecs)))
# zeroM[6:,6:] = P_orthonormalityTest[6:,6:]
# print "P_orthonormalityTest except lower n-6,n-6 zero: ", np.allclose(P_orthonormalityTest, zeroM)
# proteinComplex_ca = proteinComplex.select('calpha')
# writePDB("complex_allatoms.pdb", proteinComplex)
# writePDB("complex_before_Ptimes.pdb", proteinComplex_ca)
# coord_shape = proteinComplex_ca.getCoords().shape
# coords_P = P.dot(proteinComplex_ca.getCoords().flatten())
# coords_P = coords_P.reshape(coord_shape)
# proteinComplex_ca.setCoords(coords_P)
# writePDB("complex_after_Ptimes"+projectionStyle+".pdb", proteinComplex_ca)
#raw_input()
###
# Q, R = np.linalg.qr(P, mode="complete")
# print "independent columns Q: ", self.utils.independent_columns(Q).shape
# print "matrix rank Q: ", self.utils.matrixrank(Q)
# print "matrix np rank Q: ", np.linalg.matrix_rank(Q)," ", np.linalg.matrix_rank(np.matrix(Q))
# print "log of determinant of Q: ", np.linalg.slogdet(Q)
### corres Q = I - Q
#P = I-Q
# Apply PH'H, np.dot is matrix multiplication for 2D arrays
#print "count orthogonal columns: ", self.utils.countOrthogonalColumns(I-P)
Hprime = np.dot(P.T, hessian)
Hprime = np.dot(Hprime, P)
# Return the projected hessian
#print "after projection symmetry ==, allclose: ", np.all(Hprime-Hprime.T==0), np.allclose(Hprime, Hprime.T)
#print "H: ", hessian
#print "Hprime: ", Hprime
return Hprime
def projectHessian_test2timesQR(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle, projectOnlyReferencePartOfHC=False, interCalphaIndices=None):
""" Return the PH'P hessian which has 6 zero eigenvalues according to the formula 8.27
from the book "A practical introduction to the simulation of molecular dynamics", Field.
However, here it is made sure that the assumed basis is orthonormal via np.linalg.qr applied
on the six vectors discussed in this book.
Args:
hessian: the hessian to be projected
reference: the protein the hessian or HRtilde/HLtilde of the hessian was created on
proteinComplex: the whole protein that reference is part of
referenceSegment: if reference is receptor, provide "R", else it needs to be ligand, provide "L"
projectionStyle: project away from "full" (intra+inter) or "intra" (intra) or "fullComplex"
pojectOnlyReferencePartOfHC: if true, the hessian was created on reference, if false, HRtilde or HLtilde
of the hessian were created on the reference
interCalphaIndices: list of calphas indices that have intermolecular interactions
Returns: projected hessian with 6 external degrees of freedom (rotation and translation) removed
"""
assert projectionStyle == "full"
normalize = True
numAtoms = reference.select('calpha').numAtoms()
numCoords = numAtoms*3
centerOfCoords = calcCenter(reference.select('calpha'))
assert np.alltrue(centerOfCoords == calcCenter(proteinComplex.select('segment \"'+referenceSegment+'.\"').select('calpha')))
print "before projection symmetry ==, allclose: ", np.all(hessian-hessian.T==0), np.allclose(hessian, hessian.T)
numComplexAtoms = proteinComplex.select('calpha').numAtoms()
numComplexCoords = numComplexAtoms*3
numCounterpartCoords = numComplexCoords - numCoords
if referenceSegment == "R":
assert numCounterpartCoords == proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3
else:
assert numCounterpartCoords == proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
# Create null vector with length of the counterpart calphas
counterPartNullVector = np.zeros(numCounterpartCoords)
# Create I
I = np.identity(numComplexCoords)
# Create the three translation vectors Tx, Ty, Tz
Tx = np.zeros(numComplexCoords)
Tx = self.utils.fill3DArrayWithValue(Tx, 1.0, 0)
Ty = np.zeros(numComplexCoords)
Ty = self.utils.fill3DArrayWithValue(Ty, 1.0, 1)
Tz = np.zeros(numComplexCoords)
Tz = self.utils.fill3DArrayWithValue(Tz, 1.0, 2)
# Create the three rotation vectors Rx, Ry, Rz
coordsCopy = proteinComplex.select('calpha').getCoords().copy()
Rx = self.utils.createRx(coordsCopy)
coordsCopy2 = proteinComplex.select('calpha').getCoords().copy()
Ry = self.utils.createRy(coordsCopy2)
coordsCopy3 = proteinComplex.select('calpha').getCoords().copy()
Rz = self.utils.createRz(coordsCopy3)
# if projectOnlyReferencePartOfHC:
# # overwrite previous I
# I = np.identity(numComplexCoords)
# # extend (with the nullvector) the rotational and translational vectors to the dimension of the complex
# if referenceSegment == "R":
# Tx = np.concatenate((Tx, counterPartNullVector))
# Ty = np.concatenate((Ty, counterPartNullVector))
# Tz = np.concatenate((Tz, counterPartNullVector))
# Rx = np.concatenate((Rx, counterPartNullVector))
# Ry = np.concatenate((Ry, counterPartNullVector))
# Rz = np.concatenate((Rz, counterPartNullVector))
# else:
# Tx = np.concatenate((counterPartNullVector, Tx))
# Ty = np.concatenate((counterPartNullVector, Tz))
# Tz = np.concatenate((counterPartNullVector, Tz))
# Rx = np.concatenate((counterPartNullVector, Rx))
# Ry = np.concatenate((counterPartNullVector, Ry))
# Rz = np.concatenate((counterPartNullVector, Rz))
# Normalize translation vectors and apply rotational fix
if normalize:
Tx = Vector(Tx)
#Tx = self.subtractCenterOfCoords(Tx, centerOfCoords[0], 0.0, 0.0)
Tx = Tx.getNormed().getArray()
Ty = Vector(Ty)
#Ty = self.subtractCenterOfCoords(Ty, 0.0, centerOfCoords[1], 0.0)
Ty = Ty.getNormed().getArray()
Tz = Vector(Tz)
#Tz = self.subtractCenterOfCoords(Tz, 0.0, 0.0, centerOfCoords[2])
Tz = Tz.getNormed().getArray()
Rx = Vector(Rx)
#Rx = self.subtractCenterOfCoords(Rx, 0.0, centerOfCoords[2], centerOfCoords[1])
Rx = Rx.getNormed().getArray()
Ry = Vector(Ry)
#Ry = self.subtractCenterOfCoords(Ry, centerOfCoords[2], 0.0, centerOfCoords[0])
Ry = Ry.getNormed().getArray()
Rz = Vector(Rz)
#Rz = self.subtractCenterOfCoords(Rz, centerOfCoords[1], centerOfCoords[0], 0.0)
Rz = Rz.getNormed().getArray()
assumedBasis = np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T
MyQ, MyR = np.linalg.qr(assumedBasis, mode='full')
Rx = MyQ.T[0]
Ry = MyQ.T[1]
Rz = MyQ.T[2]
Tx = MyQ.T[3]
Ty = MyQ.T[4]
Tz = MyQ.T[5]
Rx = Rx[:numCoords]
Ry = Ry[:numCoords]
Rz = Rz[:numCoords]
Tx = Tx[:numCoords]
Ty = Ty[:numCoords]
Tz = Tz[:numCoords]
assumedBasis = np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T
MyQ, MyR = np.linalg.qr(assumedBasis, mode='full')
Rx = MyQ.T[0]
Ry = MyQ.T[1]
Rz = MyQ.T[2]
Tx = MyQ.T[3]
Ty = MyQ.T[4]
Tz = MyQ.T[5]
print "len(Rx): ", len(Rx)
Tx = np.concatenate((Tx, counterPartNullVector))
Ty = np.concatenate((Ty, counterPartNullVector))
Tz = np.concatenate((Tz, counterPartNullVector))
Rx = np.concatenate((Rx, counterPartNullVector))
Ry = np.concatenate((Ry, counterPartNullVector))
Rz = np.concatenate((Rz, counterPartNullVector))
print "Pr test"
raw_input()
P = I - np.outer(Rx, Rx) - np.outer(Ry, Ry) - np.outer(Rz, Rz) - np.outer(Tx, Tx) - np.outer(Ty, Ty) - np.outer(Tz, Tz)
#print "assumedBasis : \n", assumedBasis.round(4)
#print "basis after QR: \n", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4)
#writeArray("assumedBasis.txt", assumedBasis.round(4), format="%f")
#writeArray("basis_after_QR.txt", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4), format="%f")
###
print "P", P
print "P.shape", P.shape
print "symmetric P: ", np.allclose(P, P.T)
print "complex calphas * 3: ", proteinComplex.select('calpha').numAtoms() * 3
print "rank of P projection", projectionStyle, ": ", np.linalg.matrix_rank(np.matrix(P))
P_eigenvals, P_eigenvecs = np.linalg.eigh(P)
print "number of P_eigenvals: ", len(P_eigenvals)
#print "P_eigenvals: ", P_eigenvals
print "number of P_eigenvecs: ", len(P_eigenvecs)
#print "P_eigenvecs: ", P_eigenvecs
writeArray("helperScripts/"+proteinComplex.getTitle()+"_P_"+projectionStyle+".txt", P, format='%10.7f')
#writeArray("P_eigenvals"+projectionStyle+".txt", P_eigenvals, format='%10.7f')
#writeArray("P_eigenvecs"+projectionStyle+".txt", P_eigenvecs, format='%10.7f')
P_times_Peigenvecs = P.dot(P_eigenvecs)
P_times_Peigenvecs_T = P.dot(P_eigenvecs).T
P_orthonormalityTest = P_times_Peigenvecs_T.dot(P_times_Peigenvecs)
#writeArray("P_orthonormalityTest"+projectionStyle+".txt", P_orthonormalityTest, format='%10.7f')
# does this P_orthonormalityTest equal the identity matrix or part of it?
print "P_orthonormalityTest: ", np.allclose(P_orthonormalityTest, np.identity(len(P_eigenvecs)))
print "P_orthonormalityTest w/o upper 6x6: ", np.allclose(P_orthonormalityTest[6:,6:], np.identity(len(P_eigenvecs)-6))
zeroM = np.zeros((len(P_eigenvecs), len(P_eigenvecs)))
zeroM[6:,6:] = P_orthonormalityTest[6:,6:]
print "P_orthonormalityTest except lower n-6,n-6 zero: ", np.allclose(P_orthonormalityTest, zeroM)
# proteinComplex_ca = proteinComplex.select('calpha')
# writePDB("complex_allatoms.pdb", proteinComplex)
# writePDB("complex_before_Ptimes.pdb", proteinComplex_ca)
# coord_shape = proteinComplex_ca.getCoords().shape
# coords_P = P.dot(proteinComplex_ca.getCoords().flatten())
# coords_P = coords_P.reshape(coord_shape)
# proteinComplex_ca.setCoords(coords_P)
# writePDB("complex_after_Ptimes"+projectionStyle+".pdb", proteinComplex_ca)
raw_input()
###
# Q, R = np.linalg.qr(P, mode="complete")
# print "independent columns Q: ", self.utils.independent_columns(Q).shape
# print "matrix rank Q: ", self.utils.matrixrank(Q)
# print "matrix np rank Q: ", np.linalg.matrix_rank(Q)," ", np.linalg.matrix_rank(np.matrix(Q))
# print "log of determinant of Q: ", np.linalg.slogdet(Q)
### corres Q = I - Q
#P = I-Q
# Apply PH'H, np.dot is matrix multiplication for 2D arrays
#print "count orthogonal columns: ", self.utils.countOrthogonalColumns(I-P)
Hprime = np.dot(P.T, hessian)
Hprime = np.dot(Hprime, P)
# Return the projected hessian
#print "after projection symmetry ==, allclose: ", np.all(Hprime-Hprime.T==0), np.allclose(Hprime, Hprime.T)
#print "H: ", hessian
#print "Hprime: ", Hprime
return Hprime
def transformHessianToFixedDomainFrame(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle):
""" Application of formula 20 from:
Fuchigami, Sotaro, Satoshi Omori, Mitsunori Ikeguchi, and Akinori Kidera.
"Normal Mode Analysis of Protein Dynamics in a Non-Eckart Frame."
The Journal of Chemical Physics 132, no. 10 (March 11, 2010): 104109. doi:10.1063/1.3352566.
"""
numAtoms = reference.select('calpha').numAtoms()
numCoords = numAtoms*3
centerOfCoords = calcCenter(reference.select('calpha'))
#assert np.alltrue(centerOfCoords == calcCenter(proteinComplex.select('segment \"'+referenceSegment+'.\"').select('calpha')))
numComplexAtoms = proteinComplex.select('calpha').numAtoms()
numComplexCoords = numComplexAtoms*3
numCounterpartCoords = numComplexCoords - numCoords
if referenceSegment == "R":
# create the P matrix, receptor is fixed domain
P = np.zeros((numComplexCoords, numComplexCoords))
P[:numCoords, :numCoords] = np.identity(numCoords)
assert numCounterpartCoords == proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3
else:
# create the P matrix, ligand is fixed domain
P = np.zeros((numComplexCoords, numComplexCoords))
numCoords_receptor = proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
P[numCoords_receptor:, numCoords_receptor:] = np.identity(proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3)
assert numCounterpartCoords == proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
# create rigid body motion eigenvectors out_values
out_vals, out_vectors = sp.linalg.eigh(hessian)
# sort the eigenvalues and eigenvectors ascendingly, this is not asserted by the eigh return, see
# http://stackoverflow.com/questions/8092920/sort-eigenvalues-and-associated-eigenvectors-after-using-numpy-linalg-eig-in-pyt
idx = out_vals.argsort()
out_vals = out_vals[idx]
out_vectors = out_vectors[:,idx]
# take the first six eigenvalues and eigenvectors
out_vals = out_vals[:6]
out_vectors = out_vectors.T[:6].T
#print "P.shape: ", P.shape
#print "out_vectors.shape: ", out_vectors.shape
# create the transformation matrix
inv = (out_vectors.T.dot(P)).dot(out_vectors)
inv = np.linalg.inv(inv)
secondTerm = ((out_vectors.dot(inv)).dot(out_vectors.T)).dot(P)
U = np.identity(numComplexCoords) - secondTerm
print "calculated transformation matrix U"
#writeArray("hessianbeforeU.txt", hessian, format='%10.7f')
Hprime = np.dot(U, hessian)
Hprime = np.dot(Hprime, U.T)
#writeArray(proteinComplex.getTitle()+"U.txt", U, format='%10.7f')
#writeArray("hessianafterU.txt", Hprime, format='%10.7f')
print "obtained Hprime with a fixed domain frame"
return Hprime
def subtractCenterOfCoords(self, vector, xElement, yElement, zElement):
""" Subtract from a vector having a [i][3] dim array elementwise the center of coords and return the result. """
coordsNx3 = vector.getArrayNx3()
subtractArray = np.array([xElement, yElement, zElement])
coordsNx3 = coordsNx3 - subtractArray
resultVector = Vector(coordsNx3.flatten())
return resultVector
def addscaledHdelta(self, HR, HRtilde, deltaHRmultiplicator):
assert HR.shape == HRtilde.shape
deltaHR = HRtilde - HR
deltaHR = deltaHR * deltaHRmultiplicator
return (HR + deltaHR)
def calcCustomH_ANew(self, HR, referenceStructure, neighborStructure, encounter, neighborhoodFrom, equilibriumAt, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian HR or HL by adding additonal terms for intramolecular contacts.
Args:
HR: The original HR as calculated by prody
referenceStructure: structure to take calphas from, the hessian HR belongs to it or to its superset if I is a chain
neighborStructure: structure to apply the neighborhood calculations on
encounter: object with all encounter information
neighborhoodFrom: is the neighborhood calculated from the unbound complex C_u or the bound complex C_b
equilibriumAt: is the equilibrium set to r_ij or r_ij_b
workonReceptor: is the Hessian and the referenceStructure receptor or ligand
selstr: atomType of the course grained ANM (by default calpha)
"""
assert equilibriumAt == "r_ij" or equilibriumAt == "r_ij_b"
assert neighborhoodFrom == "C_u" or neighborhoodFrom == "C_b"
if workOnReceptor:
reference = encounter.getReference()
if self.bound_provided == True:
refchain = encounter.getRefChain()
mobile = encounter.getMobile()
mobChain = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpart()
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
else:
reference = encounter.getUnboundCounterpart()
if self.bound_provided == True:
refchain = encounter.getUnboundCounterpartChain()
mobile = encounter.getBoundCounterpart()
mobChain = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobile()
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
neighborStructureCalpha = neighborStructure.select('calpha')
contactsCounter = 0
interCalphaIndices = []
for idx, element in enumerate(referenceStructure.select('calpha')):
contactsOfI = encounter.getIntermolecularNeighborsOfAtom(element, neighborStructure, selstr, str(self.utils.config.customHRdistance))
# if element has contacts in the neighborStructure, the hessian needs an update in the 3*3 matrix on the diagonal of this element atom
if contactsOfI:
contactsCounter += contactsOfI.numAtoms()
interCalphaIndices.append(idx)
print "intermolecular contacts: ", contactsOfI.numAtoms()
contacts_counterpartChainIndices = self.utils.getMatchingStructureSelections(neighborStructureCalpha, contactsOfI, neighborStructureCalpha)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
if neighborhoodFrom == "C_u":
r_ij = calcDistance(element, elementcontact)
if equilibriumAt == "r_ij":
r_ij_b = r_ij
#if element is not in matched reference or contact is not in matched counterpart: r_ij_b = r_ij
elif not(element in refchain.select('calpha')) or not(elementcontact in unboundCounterpartChain.select('calpha')):
r_ij_b = r_ij
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, refchain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, unboundCounterpartChain.select('calpha'))
r_ij_b = calcDistance(mobChain.select('calpha')[elementPositionInChain], boundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(element, elementcontact, r_ij, r_ij_b)
#print element, elementcontact, " r_ij, rij_b: ", r_ij, r_ij_b
overallTerm += deltaTerm
else:
if equilibriumAt == "r_ij_b":
r_ij_b = calcDistance(element, elementcontact)
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
r_ij_b = r_ij
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], r_ij, r_ij_b)
#print refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], " r_ij, rij_b: ", r_ij, r_ij_b
overallTerm += deltaTerm
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# add the overallterm to the hessian matrix
if neighborhoodFrom == "C_b":
elementPosition = encounter.accessANMs().getCalphaPosition(refchain.select('calpha')[elementPositionInChain], reference.select('calpha'))
else:
elementPosition = encounter.accessANMs().getCalphaPosition(element, reference.select('calpha'))
HR = self.add3By3MatrixtoHessian(overallTerm, HR, elementPosition*3)
print "added custom terms to hessian"
print "total intermolecular contacts: ", contactsCounter
return HR, interCalphaIndices
def calcCustomH_ANew_IJ(self, referenceStructure, neighborStructure, encounter, areStructuresChains, equilibriumAt, workOnReceptor=True, selstr='calpha'):
""" Creates the HRL matrix made through intramolecular contacts.
Args:
referenceStructure: structure to take calphas from, the hessian HR belongs to it or to its superset if I is a chain
neighborStructure: structure to apply the neighborhood calculations on
encounter: object with all encounter information
areStructuresChains: boolean to describe if the structures are chains (subsets)
equilibriumAt: is the equilibrium set to r_ij or r_ij_b
workonReceptor: is the Hessian and the referenceStructure receptor or ligand
selstr: atomType of the course grained ANM (by default calpha)
"""
assert equilibriumAt == "r_ij" or equilibriumAt == "r_ij_b"
if workOnReceptor:
if areStructuresChains:
if self.bound_provided == True:
mobile = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpartChain()
else:
pass
else:
reference = encounter.getReference()
unboundCounterpart = encounter.getUnboundCounterpart()
if self.bound_provided == True:
refchain = encounter.getRefChain()
mobile = encounter.getMobile()
mobChain = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpart()
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
else:
if areStructuresChains:
if self.bound_provided == True:
mobile = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobChain()
else:
pass
else:
reference = encounter.getUnboundCounterpart()
unboundCounterpart = encounter.getReference()
if self.bound_provided == True:
refchain = encounter.getUnboundCounterpartChain()
mobile = encounter.getBoundCounterpart()
mobChain = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobile()
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
neighborStructureCalpha = neighborStructure.select('calpha')
offDiagonalHessianMatrix = np.zeros(((reference.select('calpha').numAtoms()*3), (unboundCounterpart.select('calpha').numAtoms()*3) ))
contactsCounter = 0
for idx, element in enumerate(referenceStructure.select('calpha')):
contactsOfI = encounter.getIntermolecularNeighborsOfAtom(element, neighborStructure, selstr, str(self.utils.config.customHRdistance))
# if element has contacts in the neighborStructure, the hessian needs an update in the 3*3 matrix on the diagonal of this element atom
if contactsOfI:
print "intermolecular contacts: ", contactsOfI.numAtoms()
contactsCounter += contactsOfI.numAtoms()
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructureSelections(neighborStructureCalpha, contactsOfI, neighborStructureCalpha)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
overallTerm = np.zeros((3,3))
#self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
#self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
r_ij = calcDistance(element, elementcontact)
if equilibriumAt == "r_ij":
r_ij_b = r_ij
#if element is not in matched reference or contact is not in matched counterpart: r_ij_b = r_ij
elif not(element in refchain.select('calpha')) or not(elementcontact in unboundCounterpartChain.select('calpha')):
r_ij_b = r_ij
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, refchain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, unboundCounterpartChain.select('calpha'))
r_ij_b = calcDistance(mobChain.select('calpha')[elementPositionInChain], boundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
#
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(element, elementcontact, r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# print overallTerm
offDiagonalHessianMatrix = self.add3By3MatrixtoOffDiagonalHessianMatrixIJ(overallTerm, offDiagonalHessianMatrix, idx*3, contacts_counterpartChainIndex*3)
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
#print ""
# add the overallterm to the hessian matrix
###elementPosition = encounter.accessANMs().getCalphaPosition(element, encounter.getReference().select('calpha'))
print "added custom terms to offDiagonalHessianMatrix"
print "total intermolecular contacts: ", contactsCounter
return offDiagonalHessianMatrix
def calcCustomH_ANew_U1(self, HR, referenceStructure, neighborStructure, encounter, areStructuresChains, equilibriumAt, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian HR or HL by adding additonal terms for intramolecular contacts.
Args:
HR: The original HR as calculated by prody
referenceStructure: structure to take calphas from, the hessian HR belongs to it or to its superset if I is a chain
neighborStructure: structure to apply the neighborhood calculations on
encounter: object with all encounter information
areStructuresChains: boolean to describe if the structures are chains (subsets)
equilibriumAt: is the equilibrium set to r_ij or r_ij_b
workonReceptor: is the Hessian and the referenceStructure receptor or ligand
selstr: atomType of the course grained ANM (by default calpha)
"""
assert equilibriumAt == "r_ij" or equilibriumAt == "r_ij_b"
if workOnReceptor:
refchain = encounter.getRefChain()
mobile = encounter.getMobile()
mobChain = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpart()
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
else:
refchain = encounter.getUnboundCounterpartChain()
mobile = encounter.getBoundCounterpart()
mobChain = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobile()
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
neighborStructureCalpha = neighborStructure.select('calpha')
for idx, element in enumerate(referenceStructure.select('calpha')):
contactsOfI = encounter.getIntermolecularNeighborsOfAtom(element, neighborStructure, selstr, str(self.utils.config.customHRdistance))
# if element has contacts in the neighborStructure, the hessian needs an update in the 3*3 matrix on the diagonal of this element atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructureSelections(neighborStructureCalpha, contactsOfI, neighborStructureCalpha)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
#self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
#self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
if equilibriumAt == "r_ij_b":
r_ij_b = calcDistance(element, elementcontact)
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
r_ij_b = r_ij
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
#r_ij_b = calcDistance(zip(mobile.select('calpha'))[idx][0], zip(boundCounterpart.select('calpha'))[contacts_counterpartChainIndex][0])
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], r_ij, r_ij_b)
print refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], " r_ij, rij_b: ", r_ij, r_ij_b
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# print overallTerm
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
#print ""
# add the overallterm to the hessian matrix
elementPosition = encounter.accessANMs().getCalphaPosition(refchain.select('calpha')[elementPositionInChain], encounter.getReference().select('calpha'))
HR = self.add3By3MatrixtoHessian(overallTerm, HR, elementPosition*3)
print "adding to hessian at: ", (elementPosition*3+1)
print "added custom terms to hessian"
return HR
def calcCustomH_A(self, HR, encounter, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
refChain = encounter.getRefChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
refChain = encounter.getUnboundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
#encounter.printIntermolecularNeighbors(encounter.getReference(), encounter.getUnboundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(refChain, unboundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(unboundCounterpartChainCalphas, contactsOfI, boundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
r_ij = calcDistance(refChainCalphas[i], elementcontact)
r_ij_b = calcDistance(mobChainCalphas[i], boundCounterpartChainCalphas[contacts_counterpartChainIndex])
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], elementcontact, r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# print overallTerm
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
# add the overallterm to the hessian matrix
HR = self.add3By3MatrixtoHessian(overallTerm, HR, loopCounter*3)
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return HR
def calcCustomH_A_IJ(self, encounter, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
refChain = encounter.getRefChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
mobileCalphas = encounter.getMobile().select('calpha')
unboundCounterpart = encounter.getUnboundCounterpart()
unboundCounterpartCalphas = encounter.getUnboundCounterpart().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
refChain = encounter.getUnboundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
mobileCalphas = encounter.getBoundCounterpart().select('calpha')
unboundCounterpart = encounter.getReference()
unboundCounterpartCalphas = encounter.getReference().select('calpha')
offDiagonalHessianMatrix = np.zeros(((referenceCalphas.numAtoms()*3), (unboundCounterpartCalphas.numAtoms()*3) ))
#encounter.printIntermolecularNeighbors(encounter.getReference(), encounter.getUnboundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(refChain, unboundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(unboundCounterpartChainCalphas, contactsOfI, boundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
overallTerm = np.zeros((3,3))
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
r_ij = calcDistance(refChainCalphas[i], elementcontact)
r_ij_b = calcDistance(mobChainCalphas[i], boundCounterpartChainCalphas[contacts_counterpartChainIndex])
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
counterPartCalphaPosition = encounter.accessANMs().getCalphaPosition(unboundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpart)
print "off diagonal i,j "+str(loopCounter*3)+" "+str(counterPartCalphaPosition*3)+ " term: ", overallTerm
offDiagonalHessianMatrix = self.add3By3MatrixtoOffDiagonalHessianMatrixIJ(overallTerm, offDiagonalHessianMatrix, loopCounter*3, counterPartCalphaPosition*3)
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return offDiagonalHessianMatrix
def calcCustomH_A_NeighborsBound(self, HR, encounter, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
mobileCalphas = encounter.getMobile().select('calpha')
#encounter.printIntermolecularNeighbors(encounter.getMobile(), encounter.getBoundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(encounter.getMobChain(), encounter.getBoundCounterpartChain(), i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(boundCounterpartChainCalphas, contactsOfI, unboundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(elementcontact, unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], elementcontact, useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
r_ij = calcDistance(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex])
r_ij_b = calcDistance(mobChainCalphas[i], elementcontact)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
# if customHR_B, just use the distance d_0, else use the true distance in the bound pairs for the second derivatives
if self.utils.config.customHR_B:
if r_ij >= self.utils.config.customHRdistance:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, self.utils.config.customHRdistance)
overallTerm += deltaTerm
else:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
#print overallTerm
print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print contactsOfI.getSelstr()
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
# add the overallterm to the hessian matrix
HR = self.add3By3MatrixtoHessian(overallTerm, HR, loopCounter*3)
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return HR
def calcCustomH_A_NeighborsBoundGeneral(self, HR, encounter, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
#encounter.printIntermolecularNeighbors(encounter.getMobile(), encounter.getBoundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(mobChain, boundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(boundCounterpartChainCalphas, contactsOfI, unboundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(elementcontact, unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], elementcontact, useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
r_ij = calcDistance(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex])
r_ij_b = calcDistance(mobChainCalphas[i], elementcontact)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
# if customHR_B, just use the distance d_0, else use the true distance in the bound pairs for the second derivatives
if self.utils.config.customHR_B:
if r_ij >= self.utils.config.customHRdistance:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, self.utils.config.customHRdistance)
overallTerm += deltaTerm
else:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
#print overallTerm
print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print contactsOfI.getSelstr()
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
# add the overallterm to the hessian matrix
HR = self.add3By3MatrixtoHessian(overallTerm, HR, loopCounter*3)
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return HR
def calcOffDiagonalHessianBlockMatrixGeneral_IJ(self, encounter, workOnReceptor=True, selstr='calpha'):
""" Creates the off diagonal hessian block matrix and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
mobileCalphas = encounter.getMobile().select('calpha')
unboundCounterpart = encounter.getUnboundCounterpart()
unboundCounterpartCalphas = encounter.getUnboundCounterpart().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
mobileCalphas = encounter.getBoundCounterpart().select('calpha')
unboundCounterpart = encounter.getReference()
unboundCounterpartCalphas = encounter.getReference().select('calpha')
offDiagonalHessianMatrix = np.zeros(((referenceCalphas.numAtoms()*3), (unboundCounterpartCalphas.numAtoms()*3) ))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(mobChain, boundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the bound counterpart, the off diagonal part of the hessian needs an update in the 3*3 matrix of this atom and its neighbor
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(boundCounterpartChainCalphas, contactsOfI, unboundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
overallTerm = np.zeros((3,3))
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(elementcontact, unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], elementcontact, useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
r_ij = calcDistance(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex])
r_ij_b = calcDistance(mobChainCalphas[i], elementcontact)
# make the 3*3 hessian term for this contact
# if customHR_B, just use the distance d_0, else use the true distance in the bound pairs for the second derivatives
if self.utils.config.customHR_B:
if r_ij >= self.utils.config.customHRdistance:
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, self.utils.config.customHRdistance)
overallTerm += deltaTerm
else:
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij_b)
overallTerm += deltaTerm
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# add the overall Term to the correct off diagonal super element in the hessian
counterPartCalphaPosition = encounter.accessANMs().getCalphaPosition(unboundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpart)
offDiagonalHessianMatrix = self.add3By3MatrixtoOffDiagonalHessianMatrixIJ(overallTerm, offDiagonalHessianMatrix, loopCounter*3, counterPartCalphaPosition*3)
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
#print overallTerm
print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print contactsOfI.getSelstr()
#print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return offDiagonalHessianMatrix
# origs
def secondDerivativeTermOnDiagonal(self, x_i, x_j, r_ij, r_ij_b):
""" @V / @x_i@x_i (excluding gamma)"""
result = 1 + (r_ij_b * np.power(x_j - x_i, 2) ) / np.power(r_ij, 3) - r_ij_b/r_ij
return result
def secondDerivateTermOffDiagonal(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
""" @V / @x_i@y_j (excluding gamma) """
result = r_ij_b * (x_j - x_i) * ((y_j - y_i)/np.power(r_ij, 3))
return result
def secondDerivateTermOffDiagonalAtomsIJ(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
""" Equation 21 before reducing, Atilgan paper, @V / @x_i@y_j (excluding gamma) """
result = -1.0 * r_ij_b * (x_j - x_i) * ((y_j - y_i)/np.power(r_ij, 3))
return result
#
# using r_ij_b
# def secondDerivativeTermOnDiagonal(self, x_i, x_j, r_ij, r_ij_b):
# """ @V / @x_i@x_i (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = np.power(x_j - x_i, 2) / np.power(r_ij_b, 2)
# return result
#
# def secondDerivateTermOffDiagonal(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
# """ @V / @x_i@y_j (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = ((x_j - x_i)*(y_j - y_i))/ np.power(r_ij_b, 2)
# return result
# using r_ij
# def secondDerivativeTermOnDiagonal(self, x_i, x_j, r_ij, r_ij_b):
# """ @V / @x_i@x_i (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = np.power(x_j - x_i, 2) / np.power(r_ij, 2)
# return result
#
# def secondDerivateTermOffDiagonal(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
# """ @V / @x_i@y_j (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = ((x_j - x_i)*(y_j - y_i))/ np.power(r_ij, 2)
# return result
def make3By3HessianTerm(self, refChainCalpha, elementcontact, r_ij, r_ij_b):
""" Create a 3 by 3 matrix with the added terms for the hessian diagnonal (excluding multiplication with gamma)"""
x_i = refChainCalpha.getCoords()[0]
y_i = refChainCalpha.getCoords()[1]
z_i = refChainCalpha.getCoords()[2]
x_j = elementcontact.getCoords()[0]
y_j = elementcontact.getCoords()[1]
z_j = elementcontact.getCoords()[2]
deltaTerm = np.zeros((3,3))
deltaTerm[0][0] = self.secondDerivativeTermOnDiagonal(x_i, x_j, r_ij, r_ij_b)
deltaTerm[0][1] = self.secondDerivateTermOffDiagonal(x_i, x_j, y_i, y_j, r_ij, r_ij_b)
deltaTerm[0][2] = self.secondDerivateTermOffDiagonal(x_i, x_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[1][0] = deltaTerm[0][1]
deltaTerm[1][1] = self.secondDerivativeTermOnDiagonal(y_i, y_j, r_ij, r_ij_b)
deltaTerm[1][2] = self.secondDerivateTermOffDiagonal(y_i, y_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[2][0] = deltaTerm[0][2]
deltaTerm[2][1] = deltaTerm[1][2]
deltaTerm[2][2] = self.secondDerivativeTermOnDiagonal(z_i, z_j, r_ij, r_ij_b)
return deltaTerm
def add3By3MatrixtoHessian(self, delta3by3, HR, topleftIndex):
""" Add the delta3by3 matrix to its corresponding position of HR, located by
the topleftIndex. """
HR[topleftIndex][topleftIndex] += delta3by3[0][0]
HR[topleftIndex][topleftIndex+1] += delta3by3[0][1]
HR[topleftIndex][topleftIndex+2] += delta3by3[0][2]
HR[topleftIndex+1][topleftIndex] += delta3by3[1][0]
HR[topleftIndex+1][topleftIndex+1] += delta3by3[1][1]
HR[topleftIndex+1][topleftIndex+2] += delta3by3[1][2]
HR[topleftIndex+2][topleftIndex] += delta3by3[2][0]
HR[topleftIndex+2][topleftIndex+1] += delta3by3[2][1]
HR[topleftIndex+2][topleftIndex+2] += delta3by3[2][2]
return HR
def add3By3MatrixtoOffDiagonalHessianMatrixIJ(self, delta3by3, offDiagonalHessianMatrix, topleftIndex, counterpartTopleftIndex):
""" Add the delta3by3 matrix to its corresponding position of HR, located by
the topleftIndex. """
offDiagonalHessianMatrix[topleftIndex][counterpartTopleftIndex] += delta3by3[0][0]
offDiagonalHessianMatrix[topleftIndex][counterpartTopleftIndex+1] += delta3by3[0][1]
offDiagonalHessianMatrix[topleftIndex][counterpartTopleftIndex+2] += delta3by3[0][2]
offDiagonalHessianMatrix[topleftIndex+1][counterpartTopleftIndex] += delta3by3[1][0]
offDiagonalHessianMatrix[topleftIndex+1][counterpartTopleftIndex+1] += delta3by3[1][1]
offDiagonalHessianMatrix[topleftIndex+1][counterpartTopleftIndex+2] += delta3by3[1][2]
offDiagonalHessianMatrix[topleftIndex+2][counterpartTopleftIndex] += delta3by3[2][0]
offDiagonalHessianMatrix[topleftIndex+2][counterpartTopleftIndex+1] += delta3by3[2][1]
offDiagonalHessianMatrix[topleftIndex+2][counterpartTopleftIndex+2] += delta3by3[2][2]
return offDiagonalHessianMatrix
def make3By3OffDiagonalHessianTermIJ(self, refChainCalpha, elementcontact, r_ij, r_ij_b):
""" Create a 3 by 3 matrix with the added terms for the hessian super element off the diagnonal (excluding multiplication with gamma). """
x_i = refChainCalpha.getCoords()[0]
y_i = refChainCalpha.getCoords()[1]
z_i = refChainCalpha.getCoords()[2]
x_j = elementcontact.getCoords()[0]
y_j = elementcontact.getCoords()[1]
z_j = elementcontact.getCoords()[2]
deltaTerm = np.zeros((3,3))
deltaTerm[0][0] = self.secondDerivateTermOffDiagonalAtomsIJ(x_i, x_j, x_i, x_j, r_ij, r_ij_b)
deltaTerm[0][1] = self.secondDerivateTermOffDiagonalAtomsIJ(x_i, x_j, y_i, y_j, r_ij, r_ij_b)
deltaTerm[0][2] = self.secondDerivateTermOffDiagonalAtomsIJ(x_i, x_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[1][0] = deltaTerm[0][1]
deltaTerm[1][1] = self.secondDerivateTermOffDiagonalAtomsIJ(y_i, y_j, y_i, y_j, r_ij, r_ij_b)
deltaTerm[1][2] = self.secondDerivateTermOffDiagonalAtomsIJ(y_i, y_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[2][0] = deltaTerm[0][2]
deltaTerm[2][1] = deltaTerm[1][2]
deltaTerm[2][2] = self.secondDerivateTermOffDiagonalAtomsIJ(z_i, z_j, z_i, z_j, r_ij, r_ij_b)
return deltaTerm
def getCalphaPosition(self, atom1, reference):
""" Returns the position of atom1 among the calphas of reference. Useful if one
desires to know the index of an calpha atom in the ANM hessian made from reference calphas.
Args:
atom1: the calpha atom that the position is desired to know
reference: the reference structure where the calpha position is obtained from
Returns: Positive integer denoting the calpha position
"""
assert atom1.getName() == 'CA'
referenceCalphas = reference.select('calpha')
# try:
# idx = zip(referenceCalphas).index((atom1, ))
# return idx
# except ValueError:
# print "Exception in getCalphaPosition. This calpha cannot be located in the structure provided. "
for idx, referenceCalpha in enumerate(referenceCalphas):
if atom1 == referenceCalpha:
return idx
raise StopIteration("Exception in getCalphaPosition. This calpha cannot be located in the structure provided. ")
def normalizeM(self, M):
""" Normalize a set of modes, which are the columnvectors in M.
Args:
M: set of modes as columnvectors
Returns: normalized (magnitude of each mode is 1) set of modes as columnvectors in M
"""
Mnormed = None
if M.ndim == 1:
modeVector = Vector(M)
return modeVector.getNormed().getArray()
else:
for element in M.T:
modeVector = Vector(element)
modeNormalized = modeVector.getNormed()
if Mnormed is None:
Mnormed = modeNormalized.getArray()
else:
Mnormed = np.column_stack((Mnormed, modeNormalized.getArray()))
return Mnormed
def getNoOfZeroEigvals(self, anm):
""" Return the number of zero eigenvalues, the treshold is defined in the constant ZERO.
Args:
anm: the anm
Returns: number of zero eigenvalues
"""
ZERO = 1e-10
return sum(anm.getEigvals() < ZERO)
def removeInterAtoms(self, arr, interCalphaIndices):
""" Set x,y,z coordinations of atoms indicated by calphasInterIndices to 0,0,0 in arr.
Args:
arr: the array with x,y,z coordinates
interCalphaIndices: calphas with intermolecular contacts
Returns: arr with x,y,z positions of atoms from interCalphaIndices set to 0,0,0
"""
for calphaIndex in interCalphaIndices:
arr[(calphaIndex*3)] = 0.0
arr[(calphaIndex*3+1)] = 0.0
arr[(calphaIndex*3+2)] = 0.0
return arr
| {
"content_hash": "40a75da62c6aac523955957538523e11",
"timestamp": "",
"source": "github",
"line_count": 2169,
"max_line_length": 470,
"avg_line_length": 60.751498386353155,
"alnum_prop": 0.7214919936252562,
"repo_name": "Shen-Lab/cNMA",
"id": "e8f41406cd05cfd60f730812638a362bc9a38230",
"size": "131770",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Software/ANMs.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "779570"
},
{
"name": "Shell",
"bytes": "45785"
}
],
"symlink_target": ""
} |
"""Tests for tfx.orchestration.portable.input_resolution.channel_resolver."""
from absl.testing.absltest import mock
import tensorflow as tf
from tfx.orchestration import mlmd_connection_manager as mlmd_cm
from tfx.orchestration.portable.input_resolution import channel_resolver
from tfx.proto.orchestration import pipeline_pb2
from tfx.utils import test_case_utils
from google.protobuf import text_format
class ChannelResolverTest(test_case_utils.TfxTest, test_case_utils.MlmdMixins):
def setUp(self):
super().setUp()
self.init_mlmd()
# We have to __enter__ Metadata which activates the MetadataStore so that
# we can use the same fake in-memory MetadataStore instance during the
# single test.
self.enter_context(self.mlmd_handle)
def make_channel_spec(self, channel_spec_str: str):
return text_format.Parse(channel_spec_str, pipeline_pb2.InputSpec.Channel())
def testResolveSingleChannel_NoContextQueries_Empty(self):
ch = self.make_channel_spec("""
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
def testResolveSingleChannel_BadContextQuery(self):
with self.subTest('No type'):
ch = self.make_channel_spec("""
context_queries {
name {
field_value {
string_value: "my-pipeline"
}
}
}
""")
with self.assertRaises(ValueError):
channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
with self.subTest('No type.name'):
ch = self.make_channel_spec("""
context_queries {
type {
id: 123
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
""")
with self.assertRaises(ValueError):
channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
with self.subTest('No name'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
}
""")
with self.assertRaises(ValueError):
channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
with self.subTest('Non-existential'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "i dont exist"
}
name {
field_value {
string_value: "i dont exist"
}
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
def testResolveSingleChannel_AllContexts(self):
p = self.put_context('pipeline', 'my-pipeline')
r1 = self.put_context('pipeline_run', 'run-001')
r2 = self.put_context('pipeline_run', 'run-002')
self.put_context('hahaha', 'i-am-a-troll')
e1 = self.put_artifact('Examples')
e2 = self.put_artifact('Examples')
self.put_execution(
'ExampleGen',
inputs={},
outputs={'examples': [e1]},
contexts=[p, r1])
self.put_execution(
'ExampleGen',
inputs={},
outputs={'examples': [e2]},
contexts=[p, r2])
with self.subTest('Pipeline'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertLen(resolved, 2)
self.assertEqual({a.id for a in resolved}, {e1.id, e2.id})
with self.subTest('Pipeline + PipelineRun'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "run-001"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertLen(resolved, 1)
self.assertEqual(resolved[0].id, e1.id)
with self.subTest('Pipeline + PipelineRun + Else'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "run-001"
}
}
}
context_queries {
type {
name: "hahaha"
}
name {
field_value {
string_value: "i-am-a-troll"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
def testResolveSingleChannel_OutputKey(self):
p = self.put_context('pipeline', 'my-pipeline')
e1 = self.put_artifact('Examples')
e2 = self.put_artifact('Examples')
self.put_execution(
'CustomExampleGen',
inputs={},
outputs={'first': [e1], 'second': [e2]},
contexts=[p])
with self.subTest('Correct output_key'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "first"
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertLen(resolved, 1)
self.assertEqual(resolved[0].id, e1.id)
with self.subTest('Wrong output_key'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "third"
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
with self.subTest('No output_key -> merged'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEqual({a.id for a in resolved}, {e1.id, e2.id})
def testResolveSingleChannel_BadArtifactQuery(self):
p = self.put_context('pipeline', 'my-pipeline')
self.put_execution(
'ExampleGen',
inputs={},
outputs={'examples': [self.put_artifact('Examples')]},
contexts=[p])
with self.subTest('No type'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {}
""")
with self.assertRaises(ValueError):
channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
with self.subTest('No type.name'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
id: 123
}
}
""")
with self.assertRaises(ValueError):
channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
with self.subTest('Non-existential'):
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "i dont exist"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
def testResolveSingleChannel_NoExecutions(self):
self.put_context('pipeline', 'my-pipeline')
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
def testResolveSingleChannel_NoArtifacts(self):
p = self.put_context('pipeline', 'my-pipeline')
self.put_execution('Dummy', inputs={}, outputs={}, contexts=[p])
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
""")
resolved = channel_resolver.resolve_single_channel(
self.mlmd_handle, ch)
self.assertEmpty(resolved)
@mock.patch.object(mlmd_cm, 'get_primary_handle')
def testResolveSingleChannel_ExternalDB(self, mock_get_mlmd_handle):
p = self.put_context('pipeline', 'pipeline')
artifact = self.put_artifact('Examples')
self.put_execution(
'ExampleGen', inputs={}, outputs={'examples': [artifact]}, contexts=[p])
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
""")
mock_mlmd_connection_manager = mock.create_autospec(
mlmd_cm.MLMDConnectionManager, instance=True)
mock_get_mlmd_handle.return_value = self.mlmd_handle
resolved = channel_resolver.resolve_single_channel(
mock_mlmd_connection_manager, ch)
mock_mlmd_connection_manager.get_mlmd_service_handle.assert_not_called()
self.assertLen(resolved, 1)
def testResolveUnionChannels_Deduplication(self):
p = self.put_context('pipeline', 'my-pipeline')
e1 = self.put_artifact('Examples')
self.put_execution(
'ExampleGen',
inputs={},
outputs={'examples': [e1]},
contexts=[p])
ch = self.make_channel_spec("""
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "my-pipeline"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
""")
resolved = channel_resolver.resolve_union_channels(
self.mlmd_handle, [ch, ch])
self.assertLen(resolved, 1)
self.assertEqual(resolved[0].id, e1.id)
if __name__ == '__main__':
tf.test.main()
| {
"content_hash": "410780c03a6cad092682b6d2d458b382",
"timestamp": "",
"source": "github",
"line_count": 482,
"max_line_length": 80,
"avg_line_length": 25.672199170124482,
"alnum_prop": 0.5113140455794407,
"repo_name": "tensorflow/tfx",
"id": "3197431b8b528ba406893a4aa737a440264c2994",
"size": "12970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfx/orchestration/portable/input_resolution/channel_resolver_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "7405"
},
{
"name": "Jupyter Notebook",
"bytes": "38579"
},
{
"name": "Python",
"bytes": "6009050"
},
{
"name": "Shell",
"bytes": "34056"
},
{
"name": "Starlark",
"bytes": "20324"
}
],
"symlink_target": ""
} |
import dis
from ..java import (
Code as JavaCode,
opcodes as JavaOpcodes,
ExceptionInfo as JavaExceptionInfo,
LineNumberTable
)
from .utils import extract_command, find_blocks
from .opcodes import Ref, resolve_jump, jump, Opcode, ALOAD_name, ICONST_val
class IgnoreBlock(Exception):
"""An escape hatch; enable a block to be flagged as ignorable"""
pass
class Block:
def __init__(self, parent=None, commands=None, verbosity=0):
self.parent = parent
self.commands = commands if commands else []
self.verbosity = verbosity
self.local_vars = {}
self.deleted_vars = set()
self.generator = None
self.yield_points = []
self.opcodes = []
self.try_catches = []
self.blocks = []
self.jumps = []
self.loops = []
self.jump_targets = {}
self.unknown_jump_targets = {}
self.returns = {
'annotation': None
}
self.next_resolve_list = []
self.next_opcode_starts_line = None
@property
def module(self):
return self.parent
def store_name(self, name, use_locals):
raise NotImplementedError('Abstract class `block` cannot be used directly.')
def store_dynamic(self):
raise NotImplementedError('Abstract class `block` cannot be used directly.')
def load_name(self, name, use_locals):
raise NotImplementedError('Abstract class `block` cannot be used directly.')
def delete_name(self, name, use_locals):
raise NotImplementedError('Abstract class `block` cannot be used directly.')
def extract(self, code):
"""Break a code object into the parts it defines, populating the
provided block.
"""
self.code = code
instructions = list(dis.Bytecode(code))
if self.verbosity > 1:
print ('=' * len(str(code)))
print (code)
print ('-' * len(str(code)))
# for i, inst in enumerate(instructions):
# print (i, inst.offset, inst.opname, inst.argval)
blocks = find_blocks(instructions)
i = len(instructions)
commands = []
while i > 0:
i, command = extract_command(instructions, blocks, i)
commands.append(command)
commands.reverse()
if self.verbosity > 1:
for command in commands:
command.dump()
# Append the extracted commands to any pre-existing ones.
self.commands.extend(commands)
@property
def can_ignore_empty(self):
return False
def add_opcodes(self, *opcodes):
# Add the opcodes to the code list and process them.
for opcode in opcodes:
# print("ADD OPCODE", id(opcode), opcode)
if opcode.process(self):
# self.opcodes.extend([
# JavaOpcodes.LDC_W(str(opcode)),
# JavaOpcodes.INVOKESTATIC('org/Python', 'debug', '(Ljava/lang/String;)V')
# ])
self.opcodes.append(opcode)
# If we've flagged a code line change, attach that to the opcode
if self.next_opcode_starts_line:
opcode.starts_line = self.next_opcode_starts_line
self.next_opcode_starts_line = None
# Resolve any references to the "next" opcode.
for (obj, attr) in self.next_resolve_list:
# print(" resolve %s reference on %s %s with %s %s" % (attr, obj, id(obj), opcode, id(opcode)))
setattr(obj, attr, opcode)
self.next_resolve_list = []
def stack_depth(self):
"Evaluate the maximum stack depth required by a sequence of Java opcodes"
depth = 0
max_depth = 0
for opcode in self.opcodes:
depth = depth + opcode.stack_effect
# print(" ", opcode, depth)
if depth > max_depth:
max_depth = depth
return max_depth
def materialize(self):
for cmd in self.commands:
cmd.materialize(self)
def transpile_setup(self):
"""Tweak the bytecode generated for this block."""
pass
def transpile_teardown(self):
"""Tweak the bytecode generated for this block."""
pass
def transpile_commands(self):
"""Create a JavaCode object representing the commands stored in the block
May raise ``IgnoreBlock`` if the block should be ignored.
"""
argument_vars = len(self.local_vars)
# Insert content that needs to occur before the main block commands
self.transpile_setup()
# Convert the sequence of commands into instructions.
# Most of the instructions will be opcodes. However, some will
# be instructions to add exception blocks, line number references, etc
for cmd in self.commands:
cmd.transpile(self)
# Insert content that needs to occur after the main block commands
self.transpile_teardown()
# Install the shortcut jump points for yield statements.
yield_jumps = []
for i, yield_point in enumerate(self.yield_points):
yield_jumps.extend([
ALOAD_name(self, '<generator>'),
JavaOpcodes.GETFIELD('org/python/types/Generator', 'yield_point', 'I'),
ICONST_val(i + 1),
jump(JavaOpcodes.IF_ICMPEQ(0), self, Ref(self, yield_point), Opcode.YIELD)
])
self.opcodes = yield_jumps + self.opcodes
# Make sure every local variable slot has been initialized
# as an object. This is needed because Python allows a variable
# to be instantiated in a sub-block, and used outside that block.
# The JVM doesn't, and raises a verify error if you try. By
# initializing all variables, we can trick the verifier.
# TODO: Ideally, we'd only initialize the variables that are ambiguous.
init_vars = []
for i in range(argument_vars, len(self.local_vars) + len(self.deleted_vars)):
if i == 0:
opcode = JavaOpcodes.ASTORE_0()
elif i == 1:
opcode = JavaOpcodes.ASTORE_1()
elif i == 2:
opcode = JavaOpcodes.ASTORE_2()
elif i == 3:
opcode = JavaOpcodes.ASTORE_3()
else:
opcode = JavaOpcodes.ASTORE(i)
init_vars.extend([
JavaOpcodes.ACONST_NULL(),
opcode
])
self.opcodes = init_vars + self.opcodes
# Since we've processed all the Python opcodes, we can now resolve
# all the unknown jump targets.
# print('>>>>> Resolve references')
for target, references in self.unknown_jump_targets.items():
# print(" resolving %s references to %s" % (len(references), target))
for opcode, position in references:
resolve_jump(opcode, self, target, position)
# If the block has no content in it, and the block allows,
# ignore this block.
if self.can_ignore_empty:
if len(self.opcodes) == 1 and isinstance(self.opcodes[0], JavaOpcodes.RETURN):
raise IgnoreBlock()
elif len(self.opcodes) == 2 and isinstance(self.opcodes[1], JavaOpcodes.ARETURN):
raise IgnoreBlock()
# Now that we have a complete opcode list, postprocess the list
# with the known offsets.
offset = 0
# print('>>>>> set offsets', self)
for index, instruction in enumerate(self.opcodes):
# print("%4d:%4d (0x%x) %s" % (index, offset, id(instruction), instruction))
instruction.java_index = index
instruction.java_offset = offset
offset += len(instruction)
# print('>>>>> end set offsets')
# Construct the exception table, updating any
# end-of-exception GOTO operations with the right opcode.
# Record a frame range for each one.
exceptions = []
for try_catch in self.try_catches:
# print("TRY CATCH START", id(try_catch), try_catch.start_op, try_catch.start_op.java_offset)
# print(" TRY END", try_catch.try_end_op, try_catch.try_end_op.java_offset)
# print(" END", try_catch.end_op, try_catch.end_op.java_offset)
for handler in try_catch.handlers:
# print(" HANDLER", handler.start_op, handler.end_op, handler.descriptors)
if handler.descriptors:
for descriptor in handler.descriptors:
exceptions.append(JavaExceptionInfo(
try_catch.start_op.java_offset,
try_catch.try_end_op.java_offset,
handler.start_op.java_offset,
descriptor
))
else:
exceptions.append(JavaExceptionInfo(
try_catch.start_op.java_offset,
try_catch.try_end_op.java_offset,
handler.start_op.java_offset,
'org/python/exceptions/BaseException'
))
# Add definitions for the finally block
if try_catch.finally_handler:
# print(" FINALLY", try_catch.finally_handler.start_op.java_offset, try_catch.finally_handler.end_op.java_offset)
exceptions.append(JavaExceptionInfo(
try_catch.start_op.java_offset,
try_catch.try_end_op.java_offset,
try_catch.finally_handler.start_op.java_offset,
None
))
for handler in try_catch.handlers:
# print(" h", handler.descriptors)
exceptions.append(JavaExceptionInfo(
handler.start_op.java_offset,
handler.catch_end_op.java_offset,
try_catch.finally_handler.start_op.java_offset,
None
))
# Update any jump instructions
# print ("There are %s jumps" % len(self.jumps))
for jmp in self.jumps:
# print ("JUMP", hex(id(jmp)), jmp, jmp.java_offset, jmp.jump_op, hex(id(jmp.jump_op)))
try:
jmp.offset = jmp.jump_op.java_offset - jmp.java_offset
except AttributeError:
jmp.offset = jmp.jump_op.start_op.java_offset - jmp.java_offset
# Construct a line number table from
# the source code reference data on opcodes.
line_numbers = []
for opcode in self.opcodes:
if opcode.starts_line is not None:
line_numbers.append((opcode.java_offset, opcode.starts_line))
line_number_table = LineNumberTable(line_numbers)
return JavaCode(
max_stack=self.stack_depth() + len(exceptions),
max_locals=len(self.local_vars) + len(self.deleted_vars),
code=self.opcodes,
exceptions=exceptions,
attributes=[
line_number_table
]
)
def transpile(self):
return self.transpile_commands()
| {
"content_hash": "6d7119488533243654b41b1d22278c4c",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 130,
"avg_line_length": 37.62376237623762,
"alnum_prop": 0.5628947368421052,
"repo_name": "glasnt/voc",
"id": "6ed2103fa6abef6c99f173de0b5153a4052d49e3",
"size": "11400",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "voc/python/blocks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "498452"
},
{
"name": "Python",
"bytes": "826200"
}
],
"symlink_target": ""
} |
"""
AMF metadata (inside Flash video, FLV file) parser.
Documentation:
- flashticle: Python project to read Flash (formats SWF, FLV and AMF)
http://undefined.org/python/#flashticle
Author: Victor Stinner
Creation date: 4 november 2006
"""
from lib.hachoir_core.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, PascalString16, Float64)
from lib.hachoir_core.tools import timestampUNIX
def parseUTF8(parent):
yield PascalString16(parent, "value", charset="UTF-8")
def parseDouble(parent):
yield Float64(parent, "value")
def parseBool(parent):
yield UInt8(parent, "value")
def parseArray(parent):
yield UInt32(parent, "count")
for index in xrange(parent["count"].value):
yield AMFObject(parent, "item[]")
def parseObjectAttributes(parent):
while True:
item = Attribute(parent, "attr[]")
yield item
if item["key"].value == "":
break
def parseMixedArray(parent):
yield UInt32(parent, "count")
for index in xrange(parent["count"].value + 1):
item = Attribute(parent, "item[]")
yield item
if not item['key'].value:
break
def parseDate(parent):
yield Float64(parent, "timestamp_microsec")
yield UInt16(parent, "timestamp_sec")
def parseNothing(parent):
raise StopIteration()
class AMFObject(FieldSet):
CODE_DATE = 11
tag_info = {
# http://osflash.org/amf/astypes
0: (parseDouble, "Double"),
1: (parseBool, "Boolean"),
2: (parseUTF8, "UTF-8 string"),
3: (parseObjectAttributes, "Object attributes"),
#MOVIECLIP = '\x04',
#NULL = '\x05',
#UNDEFINED = '\x06',
#REFERENCE = '\x07',
8: (parseMixedArray, "Mixed array"),
9: (parseNothing, "End of object"),
10: (parseArray, "Array"),
CODE_DATE: (parseDate, "Date"),
#LONGUTF8 = '\x0c',
#UNSUPPORTED = '\x0d',
## Server-to-client only
#RECORDSET = '\x0e',
#XML = '\x0f',
#TYPEDOBJECT = '\x10',
}
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
code = self["type"].value
try:
self.parser, desc = self.tag_info[code]
if code == self.CODE_DATE:
self.createValue = self.createValueDate
except KeyError:
raise ParserError("AMF: Unable to parse type %s" % code)
def createFields(self):
yield UInt8(self, "type")
for field in self.parser(self):
yield field
def createValueDate(self):
value = (self["timestamp_microsec"].value * 0.001) \
- (self["timestamp_sec"].value * 60)
return timestampUNIX(value)
class Attribute(AMFObject):
def __init__(self, *args):
AMFObject.__init__(self, *args)
self._description = None
def createFields(self):
yield PascalString16(self, "key", charset="UTF-8")
yield UInt8(self, "type")
for field in self.parser(self):
yield field
def createDescription(self):
return 'Attribute "%s"' % self["key"].value
| {
"content_hash": "4243f8c90efc61fa9f8e500327d350f1",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 70,
"avg_line_length": 28.536363636363635,
"alnum_prop": 0.5931825422108952,
"repo_name": "Branlala/docker-sickbeardfr",
"id": "963f3207be40182e39c6f1eae0743539797b70c1",
"size": "3139",
"binary": false,
"copies": "90",
"ref": "refs/heads/master",
"path": "sickbeard/lib/hachoir_parser/video/amf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "83278"
},
{
"name": "CSS",
"bytes": "155616"
},
{
"name": "JavaScript",
"bytes": "248414"
},
{
"name": "Python",
"bytes": "8146521"
},
{
"name": "Ruby",
"bytes": "2461"
},
{
"name": "Shell",
"bytes": "8791"
}
],
"symlink_target": ""
} |
from tiddlyweb.config import config
from tiddlywebwiki import init as twinit
from tiddlywebplugins.utils import get_store
from tiddlyweb.model.bag import Bag
from tiddlyweb.model.tiddler import Tiddler
from tiddlyweb.serializer import Serializer
def setup_module(module):
twinit(config)
store = get_store(config)
bag = Bag('test')
store.put(bag)
environ = {'tiddlyweb.config': config}
tiddler = Tiddler('monkey', 'test')
tiddler.text = 'I am uniquely999'
store.put(tiddler)
module.store = store
module.lserializer = Serializer('tiddlywebplugins.lazy.serialization',
environ)
module.fserializer = Serializer('tiddlywebwiki.serialization',
environ)
def test_lazy():
tiddlers = (store.get(tiddler) for tiddler in store.list_bag_tiddlers(store.get(Bag('test'))))
output = ''.join(lserializer.list_tiddlers(tiddlers))
assert 'I am uniquely999' not in output
tiddlers = (store.get(tiddler) for tiddler in store.list_bag_tiddlers(store.get(Bag('test'))))
output = ''.join(fserializer.list_tiddlers(tiddlers))
assert 'I am uniquely999' in output, output
| {
"content_hash": "c2fa879814cdaeda6368644a582b0330",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 98,
"avg_line_length": 31.027027027027028,
"alnum_prop": 0.7116724738675958,
"repo_name": "cdent/lazytiddlywebwiki",
"id": "451dc6b7f4e10d2e4219175da4f597cb574c9361",
"size": "1149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_serialize.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "6674"
}
],
"symlink_target": ""
} |
from uuid import uuid4
from itertools import cycle
# Neutron Agent superclass
from rpcdaemon.lib.neutronagent import NeutronAgent, NeutronAgentException
# RPC superclass
from rpcdaemon.lib.rpc import RPC
# Logger wrapper
from rpcdaemon.lib.logger import Logger
# Config parser
from rpcdaemon.lib.config import Config
# Specific L3 agent handler
class L3Agent(NeutronAgent, RPC):
def __init__(self, connection, config, handler=None):
# Grab a copy of our config section
self.config = config.section('L3Agent')
# grab relevant settings
queue_expire = int(self.config.get('queue_expire', 60))
# Initialize logger
self.logger = Logger(
name='l3agent',
level=self.config['loglevel'],
handler=handler
)
# Parse agent config
self.qconfig = Config(self.config['conffile'], 'AGENT')
# Initialize super
NeutronAgent.__init__(self, self.qconfig, self.config, 'L3 agent')
# Initialize RPC bits
RPC.__init__(
self,
connection,
exopts={
'name': self.event_queue(),
'durable': False,
'type': 'topic'
},
qopts={
'name': 'rpcdaemon-l3_%s' % uuid4(),
'auto_delete': True,
'durable': False,
'routing_key': 'q-plugin',
'queue_arguments': {
'x-expires': queue_expire * 1000,
}
}
)
# L3 specific handler
def handle(self, agent, state):
# All alive agents
targets = dict([(target['id'], target)
for target in self.agents.values()
if target['alive']])
# If agent is down, remove routers first
if not state:
routerlist = self.retryable(
lambda: self.client.list_routers_on_l3_agent(agent['id']))['routers']
for router in routerlist:
self.logger.info(
'Removing router %s from %s/%s [%s]' % (
router['id'],
agent['host'],
agent['agent_type'],
str(agent['id'])
)
)
self.retryable(
lambda: self.client.remove_router_from_l3_agent(agent['id'],
router['id']))
self.logger.debug('Targets: %s' % targets.keys())
# get all routers
routerlist = self.retryable(
lambda: self.client.list_routers())['routers']
# Get routers on agents
binds = dict([(router['id'], router)
for target in targets
for router in
self.retryable(lambda: self.client.list_routers_on_l3_agent(target))['routers']])
self.logger.debug('Bound Routers: %s' % binds.keys())
# And routers not on agents
routers = dict([(router['id'], router)
for router in routerlist
if not router['id'] in binds])
self.logger.debug('Free Routers: %s' % routers.keys())
# Map free routers to agents
mapping = zip(routers, cycle(targets))
self.logger.debug('Mapping: %s' % mapping)
# Any agents alive?
if targets:
# Schedule routers to them
for router, target in mapping:
self.logger.info(
'Scheduling %s [%s] -> %s/%s [%s].' % (
routers[router]['name'],
str(router),
targets[target]['host'],
targets[target]['agent_type'],
str(target)
)
)
# this can cause errors if multiple rpcdaemons are running
msg = 'Router %s already added to agent %s' % (router, target)
self.retryable(
lambda: self.client.add_router_to_l3_agent(
target,
{'router_id': router}),
retries=1, delay=0,
on_fail=lambda x:self.logger.warn(msg))
# No agents, any routers?
elif routers:
self.logger.warn('No agents found to schedule routers to.')
| {
"content_hash": "39d562c3fed2fb0964b78fd730900994",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 103,
"avg_line_length": 32.94117647058823,
"alnum_prop": 0.4870535714285714,
"repo_name": "rcbops/rpcdaemon-buildpackage",
"id": "1c40c7795441294a3bd652eabafe254e598f4060",
"size": "4490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rpcdaemon/plugins/l3agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "30875"
}
],
"symlink_target": ""
} |
"""Tests for fft operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_spectral_ops
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.signal import fft_ops
from tensorflow.python.platform import test
VALID_FFT_RANKS = (1, 2, 3)
# TODO(rjryan): Investigate precision issues. We should be able to achieve
# better tolerances, at least for the complex128 tests.
class BaseFFTOpsTest(test.TestCase):
def _compare(self, x, rank, fft_length=None, use_placeholder=False,
rtol=1e-4, atol=1e-4):
self._compare_forward(x, rank, fft_length, use_placeholder, rtol, atol)
self._compare_backward(x, rank, fft_length, use_placeholder, rtol, atol)
def _compare_forward(self, x, rank, fft_length=None, use_placeholder=False,
rtol=1e-4, atol=1e-4):
if test.is_built_with_rocm() and x.dtype in (np.complex64, np.complex128):
self.skipTest("Complex datatype not yet supported in ROCm.")
return
x_np = self._np_fft(x, rank, fft_length)
if use_placeholder:
x_ph = array_ops.placeholder(dtype=dtypes.as_dtype(x.dtype))
x_tf = self._tf_fft(x_ph, rank, fft_length, feed_dict={x_ph: x})
else:
x_tf = self._tf_fft(x, rank, fft_length)
self.assertAllClose(x_np, x_tf, rtol=rtol, atol=atol)
def _compare_backward(self, x, rank, fft_length=None, use_placeholder=False,
rtol=1e-4, atol=1e-4):
if test.is_built_with_rocm() and x.dtype in (np.complex64, np.complex128):
self.skipTest("Complex datatype not yet supported in ROCm.")
return
x_np = self._np_ifft(x, rank, fft_length)
if use_placeholder:
x_ph = array_ops.placeholder(dtype=dtypes.as_dtype(x.dtype))
x_tf = self._tf_ifft(x_ph, rank, fft_length, feed_dict={x_ph: x})
else:
x_tf = self._tf_ifft(x, rank, fft_length)
self.assertAllClose(x_np, x_tf, rtol=rtol, atol=atol)
def _check_memory_fail(self, x, rank):
config = config_pb2.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 1e-2
with self.cached_session(config=config, force_gpu=True):
self._tf_fft(x, rank, fft_length=None)
@test.disable_with_predicate(
pred=test.is_built_with_rocm,
skip_message="Complex datatype not yet supported in ROCm.")
def _check_grad_complex(self, func, x, y, result_is_complex=True,
rtol=1e-2, atol=1e-2):
if test.is_built_with_rocm():
self.skipTest("Complex datatype not yet supported in ROCm.")
return
with self.cached_session():
def f(inx, iny):
inx.set_shape(x.shape)
iny.set_shape(y.shape)
# func is a forward or inverse, real or complex, batched or unbatched
# FFT function with a complex input.
z = func(math_ops.complex(inx, iny))
# loss = sum(|z|^2)
loss = math_ops.reduce_sum(math_ops.real(z * math_ops.conj(z)))
return loss
((x_jacob_t, y_jacob_t), (x_jacob_n, y_jacob_n)) = (
gradient_checker_v2.compute_gradient(f, [x, y], delta=1e-2))
self.assertAllClose(x_jacob_t, x_jacob_n, rtol=rtol, atol=atol)
self.assertAllClose(y_jacob_t, y_jacob_n, rtol=rtol, atol=atol)
def _check_grad_real(self, func, x, rtol=1e-2, atol=1e-2):
def f(inx):
inx.set_shape(x.shape)
# func is a forward RFFT function (batched or unbatched).
z = func(inx)
# loss = sum(|z|^2)
loss = math_ops.reduce_sum(math_ops.real(z * math_ops.conj(z)))
return loss
(x_jacob_t,), (x_jacob_n,) = gradient_checker_v2.compute_gradient(
f, [x], delta=1e-2)
self.assertAllClose(x_jacob_t, x_jacob_n, rtol=rtol, atol=atol)
@test_util.run_all_in_graph_and_eager_modes
class FFTOpsTest(BaseFFTOpsTest, parameterized.TestCase):
def _tf_fft(self, x, rank, fft_length=None, feed_dict=None):
# fft_length unused for complex FFTs.
with self.cached_session() as sess:
return sess.run(self._tf_fft_for_rank(rank)(x), feed_dict=feed_dict)
def _tf_ifft(self, x, rank, fft_length=None, feed_dict=None):
# fft_length unused for complex FFTs.
with self.cached_session() as sess:
return sess.run(self._tf_ifft_for_rank(rank)(x), feed_dict=feed_dict)
def _np_fft(self, x, rank, fft_length=None):
if rank == 1:
return np.fft.fft2(x, s=fft_length, axes=(-1,))
elif rank == 2:
return np.fft.fft2(x, s=fft_length, axes=(-2, -1))
elif rank == 3:
return np.fft.fft2(x, s=fft_length, axes=(-3, -2, -1))
else:
raise ValueError("invalid rank")
def _np_ifft(self, x, rank, fft_length=None):
if rank == 1:
return np.fft.ifft2(x, s=fft_length, axes=(-1,))
elif rank == 2:
return np.fft.ifft2(x, s=fft_length, axes=(-2, -1))
elif rank == 3:
return np.fft.ifft2(x, s=fft_length, axes=(-3, -2, -1))
else:
raise ValueError("invalid rank")
def _tf_fft_for_rank(self, rank):
if rank == 1:
return fft_ops.fft
elif rank == 2:
return fft_ops.fft2d
elif rank == 3:
return fft_ops.fft3d
else:
raise ValueError("invalid rank")
def _tf_ifft_for_rank(self, rank):
if rank == 1:
return fft_ops.ifft
elif rank == 2:
return fft_ops.ifft2d
elif rank == 3:
return fft_ops.ifft3d
else:
raise ValueError("invalid rank")
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (np.complex64, np.complex128)))
def test_empty(self, rank, extra_dims, np_type):
dims = rank + extra_dims
x = np.zeros((0,) * dims).astype(np_type)
self.assertEqual(x.shape, self._tf_fft(x, rank).shape)
self.assertEqual(x.shape, self._tf_ifft(x, rank).shape)
@parameterized.parameters(
itertools.product(VALID_FFT_RANKS, range(3),
(np.complex64, np.complex128)))
@test.disable_with_predicate(
pred=test.is_built_with_rocm,
skip_message="Complex datatype not yet supported in ROCm.")
def test_basic(self, rank, extra_dims, np_type):
dims = rank + extra_dims
tol = 1e-4 if np_type == np.complex64 else 1e-8
self._compare(
np.mod(np.arange(np.power(4, dims)), 10).reshape(
(4,) * dims).astype(np_type), rank, rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
(1,), range(3), (np.complex64, np.complex128)))
def test_large_batch(self, rank, extra_dims, np_type):
dims = rank + extra_dims
tol = 1e-4 if np_type == np.complex64 else 5e-5
self._compare(
np.mod(np.arange(np.power(128, dims)), 10).reshape(
(128,) * dims).astype(np_type), rank, rtol=tol, atol=tol)
# TODO(yangzihao): Disable before we can figure out a way to
# properly test memory fail for large batch fft.
# def test_large_batch_memory_fail(self):
# if test.is_gpu_available(cuda_only=True):
# rank = 1
# for dims in xrange(rank, rank + 3):
# self._check_memory_fail(
# np.mod(np.arange(np.power(128, dims)), 64).reshape(
# (128,) * dims).astype(np.complex64), rank)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (np.complex64, np.complex128)))
def test_placeholder(self, rank, extra_dims, np_type):
if context.executing_eagerly():
return
tol = 1e-4 if np_type == np.complex64 else 1e-8
dims = rank + extra_dims
self._compare(
np.mod(np.arange(np.power(4, dims)), 10).reshape(
(4,) * dims).astype(np_type),
rank, use_placeholder=True, rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (np.complex64, np.complex128)))
def test_random(self, rank, extra_dims, np_type):
tol = 1e-4 if np_type == np.complex64 else 5e-6
dims = rank + extra_dims
def gen(shape):
n = np.prod(shape)
re = np.random.uniform(size=n)
im = np.random.uniform(size=n)
return (re + im * 1j).reshape(shape)
self._compare(gen((4,) * dims).astype(np_type), rank,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS,
# Check a variety of sizes (power-of-2, odd, etc.)
[128, 256, 512, 1024, 127, 255, 511, 1023],
(np.complex64, np.complex128)))
def test_random_1d(self, rank, dim, np_type):
has_gpu = test.is_gpu_available(cuda_only=True)
tol = {(np.complex64, True): 1e-4,
(np.complex64, False): 1e-2,
(np.complex128, True): 1e-4,
(np.complex128, False): 1e-2}[(np_type, has_gpu)]
def gen(shape):
n = np.prod(shape)
re = np.random.uniform(size=n)
im = np.random.uniform(size=n)
return (re + im * 1j).reshape(shape)
self._compare(gen((dim,)).astype(np_type), 1, rtol=tol, atol=tol)
def test_error(self):
# TODO(rjryan): Fix this test under Eager.
if context.executing_eagerly():
return
for rank in VALID_FFT_RANKS:
for dims in xrange(0, rank):
x = np.zeros((1,) * dims).astype(np.complex64)
with self.assertRaisesWithPredicateMatch(
ValueError, "Shape must be .*rank {}.*".format(rank)):
self._tf_fft(x, rank)
with self.assertRaisesWithPredicateMatch(
ValueError, "Shape must be .*rank {}.*".format(rank)):
self._tf_ifft(x, rank)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(2), (np.float32, np.float64)))
def test_grad_simple(self, rank, extra_dims, np_type):
tol = 1e-4 if np_type == np.float32 else 1e-10
dims = rank + extra_dims
re = np.ones(shape=(4,) * dims, dtype=np_type) / 10.0
im = np.zeros(shape=(4,) * dims, dtype=np_type)
self._check_grad_complex(self._tf_fft_for_rank(rank), re, im,
rtol=tol, atol=tol)
self._check_grad_complex(self._tf_ifft_for_rank(rank), re, im,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(2), (np.float32, np.float64)))
def test_grad_random(self, rank, extra_dims, np_type):
dims = rank + extra_dims
tol = 1e-2 if np_type == np.float32 else 1e-10
re = np.random.rand(*((3,) * dims)).astype(np_type) * 2 - 1
im = np.random.rand(*((3,) * dims)).astype(np_type) * 2 - 1
self._check_grad_complex(self._tf_fft_for_rank(rank), re, im,
rtol=tol, atol=tol)
self._check_grad_complex(self._tf_ifft_for_rank(rank), re, im,
rtol=tol, atol=tol)
@test_util.run_all_in_graph_and_eager_modes
class RFFTOpsTest(BaseFFTOpsTest, parameterized.TestCase):
def _tf_fft(self, x, rank, fft_length=None, feed_dict=None):
with self.cached_session() as sess:
return sess.run(
self._tf_fft_for_rank(rank)(x, fft_length), feed_dict=feed_dict)
def _tf_ifft(self, x, rank, fft_length=None, feed_dict=None):
with self.cached_session() as sess:
return sess.run(
self._tf_ifft_for_rank(rank)(x, fft_length), feed_dict=feed_dict)
def _np_fft(self, x, rank, fft_length=None):
if rank == 1:
return np.fft.rfft2(x, s=fft_length, axes=(-1,))
elif rank == 2:
return np.fft.rfft2(x, s=fft_length, axes=(-2, -1))
elif rank == 3:
return np.fft.rfft2(x, s=fft_length, axes=(-3, -2, -1))
else:
raise ValueError("invalid rank")
def _np_ifft(self, x, rank, fft_length=None):
if rank == 1:
return np.fft.irfft2(x, s=fft_length, axes=(-1,))
elif rank == 2:
return np.fft.irfft2(x, s=fft_length, axes=(-2, -1))
elif rank == 3:
return np.fft.irfft2(x, s=fft_length, axes=(-3, -2, -1))
else:
raise ValueError("invalid rank")
def _tf_fft_for_rank(self, rank):
if rank == 1:
return fft_ops.rfft
elif rank == 2:
return fft_ops.rfft2d
elif rank == 3:
return fft_ops.rfft3d
else:
raise ValueError("invalid rank")
def _tf_ifft_for_rank(self, rank):
if rank == 1:
return fft_ops.irfft
elif rank == 2:
return fft_ops.irfft2d
elif rank == 3:
return fft_ops.irfft3d
else:
raise ValueError("invalid rank")
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (np.float32, np.float64)))
def test_empty(self, rank, extra_dims, np_rtype):
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
dims = rank + extra_dims
x = np.zeros((0,) * dims).astype(np_rtype)
self.assertEqual(x.shape, self._tf_fft(x, rank).shape)
x = np.zeros((0,) * dims).astype(np_ctype)
self.assertEqual(x.shape, self._tf_ifft(x, rank).shape)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (5, 6), (np.float32, np.float64)))
def test_basic(self, rank, extra_dims, size, np_rtype):
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
tol = 1e-4 if np_rtype == np.float32 else 5e-5
dims = rank + extra_dims
inner_dim = size // 2 + 1
r2c = np.mod(np.arange(np.power(size, dims)), 10).reshape(
(size,) * dims)
self._compare_forward(r2c.astype(np_rtype), rank, (size,) * rank,
rtol=tol, atol=tol)
c2r = np.mod(np.arange(np.power(size, dims - 1) * inner_dim),
10).reshape((size,) * (dims - 1) + (inner_dim,))
self._compare_backward(
c2r.astype(np_ctype), rank, (size,) * rank,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
(1,), range(3), (64, 128), (np.float32, np.float64)))
def test_large_batch(self, rank, extra_dims, size, np_rtype):
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
tol = 1e-4 if np_rtype == np.float32 else 1e-5
dims = rank + extra_dims
inner_dim = size // 2 + 1
r2c = np.mod(np.arange(np.power(size, dims)), 10).reshape(
(size,) * dims)
self._compare_forward(r2c.astype(np_rtype), rank, (size,) * rank,
rtol=tol, atol=tol)
c2r = np.mod(np.arange(np.power(size, dims - 1) * inner_dim),
10).reshape((size,) * (dims - 1) + (inner_dim,))
self._compare_backward(c2r.astype(np_ctype), rank, (size,) * rank,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (5, 6), (np.float32, np.float64)))
def test_placeholder(self, rank, extra_dims, size, np_rtype):
if context.executing_eagerly():
return
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
tol = 1e-4 if np_rtype == np.float32 else 1e-8
dims = rank + extra_dims
inner_dim = size // 2 + 1
r2c = np.mod(np.arange(np.power(size, dims)), 10).reshape(
(size,) * dims)
self._compare_forward(
r2c.astype(np_rtype),
rank, (size,) * rank,
use_placeholder=True,
rtol=tol, atol=tol)
c2r = np.mod(np.arange(np.power(size, dims - 1) * inner_dim),
10).reshape((size,) * (dims - 1) + (inner_dim,))
self._compare_backward(
c2r.astype(np_ctype),
rank, (size,) * rank,
use_placeholder=True,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (5, 6), (np.float32, np.float64)))
def test_fft_lenth_truncate(self, rank, extra_dims, size, np_rtype):
"""Test truncation (FFT size < dimensions)."""
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
tol = 1e-4 if np_rtype == np.float32 else 8e-5
dims = rank + extra_dims
inner_dim = size // 2 + 1
r2c = np.mod(np.arange(np.power(size, dims)), 10).reshape(
(size,) * dims)
c2r = np.mod(np.arange(np.power(size, dims - 1) * inner_dim),
10).reshape((size,) * (dims - 1) + (inner_dim,))
fft_length = (size - 2,) * rank
self._compare_forward(r2c.astype(np_rtype), rank, fft_length,
rtol=tol, atol=tol)
self._compare_backward(c2r.astype(np_ctype), rank, fft_length,
rtol=tol, atol=tol)
# Confirm it works with unknown shapes as well.
if not context.executing_eagerly():
self._compare_forward(
r2c.astype(np_rtype),
rank,
fft_length,
use_placeholder=True,
rtol=tol, atol=tol)
self._compare_backward(
c2r.astype(np_ctype),
rank,
fft_length,
use_placeholder=True,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (5, 6), (np.float32, np.float64)))
def test_fft_lenth_pad(self, rank, extra_dims, size, np_rtype):
"""Test padding (FFT size > dimensions)."""
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
tol = 1e-4 if np_rtype == np.float32 else 8e-5
dims = rank + extra_dims
inner_dim = size // 2 + 1
r2c = np.mod(np.arange(np.power(size, dims)), 10).reshape(
(size,) * dims)
c2r = np.mod(np.arange(np.power(size, dims - 1) * inner_dim),
10).reshape((size,) * (dims - 1) + (inner_dim,))
fft_length = (size + 2,) * rank
self._compare_forward(r2c.astype(np_rtype), rank, fft_length,
rtol=tol, atol=tol)
self._compare_backward(c2r.astype(np_ctype), rank, fft_length,
rtol=tol, atol=tol)
# Confirm it works with unknown shapes as well.
if not context.executing_eagerly():
self._compare_forward(
r2c.astype(np_rtype),
rank,
fft_length,
use_placeholder=True,
rtol=tol, atol=tol)
self._compare_backward(
c2r.astype(np_ctype),
rank,
fft_length,
use_placeholder=True,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(3), (5, 6), (np.float32, np.float64)))
def test_random(self, rank, extra_dims, size, np_rtype):
def gen_real(shape):
n = np.prod(shape)
re = np.random.uniform(size=n)
ret = re.reshape(shape)
return ret
def gen_complex(shape):
n = np.prod(shape)
re = np.random.uniform(size=n)
im = np.random.uniform(size=n)
ret = (re + im * 1j).reshape(shape)
return ret
np_ctype = np.complex64 if np_rtype == np.float32 else np.complex128
tol = 1e-4 if np_rtype == np.float32 else 1e-5
dims = rank + extra_dims
inner_dim = size // 2 + 1
self._compare_forward(gen_real((size,) * dims).astype(np_rtype),
rank, (size,) * rank,
rtol=tol, atol=tol)
complex_dims = (size,) * (dims - 1) + (inner_dim,)
self._compare_backward(
gen_complex(complex_dims).astype(np_ctype),
rank, (size,) * rank,
rtol=tol, atol=tol)
def test_error(self):
# TODO(rjryan): Fix this test under Eager.
if context.executing_eagerly():
return
for rank in VALID_FFT_RANKS:
for dims in xrange(0, rank):
x = np.zeros((1,) * dims).astype(np.complex64)
with self.assertRaisesWithPredicateMatch(
ValueError, "Shape .* must have rank at least {}".format(rank)):
self._tf_fft(x, rank)
with self.assertRaisesWithPredicateMatch(
ValueError, "Shape .* must have rank at least {}".format(rank)):
self._tf_ifft(x, rank)
for dims in xrange(rank, rank + 2):
x = np.zeros((1,) * rank)
# Test non-rank-1 fft_length produces an error.
fft_length = np.zeros((1, 1)).astype(np.int32)
with self.assertRaisesWithPredicateMatch(ValueError,
"Shape .* must have rank 1"):
self._tf_fft(x, rank, fft_length)
with self.assertRaisesWithPredicateMatch(ValueError,
"Shape .* must have rank 1"):
self._tf_ifft(x, rank, fft_length)
# Test wrong fft_length length.
fft_length = np.zeros((rank + 1,)).astype(np.int32)
with self.assertRaisesWithPredicateMatch(
ValueError, "Dimension must be .*but is {}.*".format(rank + 1)):
self._tf_fft(x, rank, fft_length)
with self.assertRaisesWithPredicateMatch(
ValueError, "Dimension must be .*but is {}.*".format(rank + 1)):
self._tf_ifft(x, rank, fft_length)
# Test that calling the kernel directly without padding to fft_length
# produces an error.
rffts_for_rank = {
1: [gen_spectral_ops.rfft, gen_spectral_ops.irfft],
2: [gen_spectral_ops.rfft2d, gen_spectral_ops.irfft2d],
3: [gen_spectral_ops.rfft3d, gen_spectral_ops.irfft3d]
}
rfft_fn, irfft_fn = rffts_for_rank[rank]
with self.assertRaisesWithPredicateMatch(
errors.InvalidArgumentError,
"Input dimension .* must have length of at least 6 but got: 5"):
x = np.zeros((5,) * rank).astype(np.float32)
fft_length = [6] * rank
with self.cached_session():
self.evaluate(rfft_fn(x, fft_length))
with self.assertRaisesWithPredicateMatch(
errors.InvalidArgumentError,
"Input dimension .* must have length of at least .* but got: 3"):
x = np.zeros((3,) * rank).astype(np.complex64)
fft_length = [6] * rank
with self.cached_session():
self.evaluate(irfft_fn(x, fft_length))
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(2), (5, 6), (np.float32, np.float64)))
def test_grad_simple(self, rank, extra_dims, size, np_rtype):
# rfft3d/irfft3d do not have gradients yet.
if rank == 3:
return
dims = rank + extra_dims
tol = 1e-3 if np_rtype == np.float32 else 1e-10
re = np.ones(shape=(size,) * dims, dtype=np_rtype)
im = -np.ones(shape=(size,) * dims, dtype=np_rtype)
self._check_grad_real(self._tf_fft_for_rank(rank), re,
rtol=tol, atol=tol)
self._check_grad_complex(
self._tf_ifft_for_rank(rank), re, im, result_is_complex=False,
rtol=tol, atol=tol)
@parameterized.parameters(itertools.product(
VALID_FFT_RANKS, range(2), (5, 6), (np.float32, np.float64)))
def test_grad_random(self, rank, extra_dims, size, np_rtype):
# rfft3d/irfft3d do not have gradients yet.
if rank == 3:
return
dims = rank + extra_dims
tol = 1e-2 if np_rtype == np.float32 else 1e-10
re = np.random.rand(*((size,) * dims)).astype(np_rtype) * 2 - 1
im = np.random.rand(*((size,) * dims)).astype(np_rtype) * 2 - 1
self._check_grad_real(self._tf_fft_for_rank(rank), re,
rtol=tol, atol=tol)
self._check_grad_complex(
self._tf_ifft_for_rank(rank), re, im, result_is_complex=False,
rtol=tol, atol=tol)
@test_util.run_all_in_graph_and_eager_modes
class FFTShiftTest(test.TestCase, parameterized.TestCase):
def test_definition(self):
with self.session():
x = [0, 1, 2, 3, 4, -4, -3, -2, -1]
y = [-4, -3, -2, -1, 0, 1, 2, 3, 4]
self.assertAllEqual(fft_ops.fftshift(x), y)
self.assertAllEqual(fft_ops.ifftshift(y), x)
x = [0, 1, 2, 3, 4, -5, -4, -3, -2, -1]
y = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
self.assertAllEqual(fft_ops.fftshift(x), y)
self.assertAllEqual(fft_ops.ifftshift(y), x)
def test_axes_keyword(self):
with self.session():
freqs = [[0, 1, 2], [3, 4, -4], [-3, -2, -1]]
shifted = [[-1, -3, -2], [2, 0, 1], [-4, 3, 4]]
self.assertAllEqual(fft_ops.fftshift(freqs, axes=(0, 1)), shifted)
self.assertAllEqual(
fft_ops.fftshift(freqs, axes=0),
fft_ops.fftshift(freqs, axes=(0,)))
self.assertAllEqual(fft_ops.ifftshift(shifted, axes=(0, 1)), freqs)
self.assertAllEqual(
fft_ops.ifftshift(shifted, axes=0),
fft_ops.ifftshift(shifted, axes=(0,)))
self.assertAllEqual(fft_ops.fftshift(freqs), shifted)
self.assertAllEqual(fft_ops.ifftshift(shifted), freqs)
def test_numpy_compatibility(self):
with self.session():
x = [0, 1, 2, 3, 4, -4, -3, -2, -1]
y = [-4, -3, -2, -1, 0, 1, 2, 3, 4]
self.assertAllEqual(fft_ops.fftshift(x), np.fft.fftshift(x))
self.assertAllEqual(fft_ops.ifftshift(y), np.fft.ifftshift(y))
x = [0, 1, 2, 3, 4, -5, -4, -3, -2, -1]
y = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
self.assertAllEqual(fft_ops.fftshift(x), np.fft.fftshift(x))
self.assertAllEqual(fft_ops.ifftshift(y), np.fft.ifftshift(y))
freqs = [[0, 1, 2], [3, 4, -4], [-3, -2, -1]]
shifted = [[-1, -3, -2], [2, 0, 1], [-4, 3, 4]]
self.assertAllEqual(
fft_ops.fftshift(freqs, axes=(0, 1)),
np.fft.fftshift(freqs, axes=(0, 1)))
self.assertAllEqual(
fft_ops.ifftshift(shifted, axes=(0, 1)),
np.fft.ifftshift(shifted, axes=(0, 1)))
@parameterized.parameters(None, 1, ([1, 2],))
def test_placeholder(self, axes):
if context.executing_eagerly():
return
x = array_ops.placeholder(shape=[None, None, None], dtype="float32")
y_fftshift = fft_ops.fftshift(x, axes=axes)
y_ifftshift = fft_ops.ifftshift(x, axes=axes)
x_np = np.random.rand(16, 256, 256)
with self.session() as sess:
y_fftshift_res, y_ifftshift_res = sess.run(
[y_fftshift, y_ifftshift],
feed_dict={x: x_np})
self.assertAllClose(y_fftshift_res, np.fft.fftshift(x_np, axes=axes))
self.assertAllClose(y_ifftshift_res, np.fft.ifftshift(x_np, axes=axes))
def test_negative_axes(self):
with self.session():
freqs = [[0, 1, 2], [3, 4, -4], [-3, -2, -1]]
shifted = [[-1, -3, -2], [2, 0, 1], [-4, 3, 4]]
self.assertAllEqual(fft_ops.fftshift(freqs, axes=(0, -1)), shifted)
self.assertAllEqual(fft_ops.ifftshift(shifted, axes=(0, -1)), freqs)
self.assertAllEqual(
fft_ops.fftshift(freqs, axes=-1), fft_ops.fftshift(freqs, axes=(1,)))
self.assertAllEqual(
fft_ops.ifftshift(shifted, axes=-1),
fft_ops.ifftshift(shifted, axes=(1,)))
if __name__ == "__main__":
test.main()
| {
"content_hash": "dd0a662f4fd7bbf913229647206081e1",
"timestamp": "",
"source": "github",
"line_count": 670,
"max_line_length": 79,
"avg_line_length": 39.82089552238806,
"alnum_prop": 0.6026986506746627,
"repo_name": "petewarden/tensorflow",
"id": "1730287bd9bced66aa80ff7352ae1b4e6aceba34",
"size": "27369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/signal/fft_ops_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "31796"
},
{
"name": "Batchfile",
"bytes": "55269"
},
{
"name": "C",
"bytes": "895451"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "82100676"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "112853"
},
{
"name": "Go",
"bytes": "1867248"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "984477"
},
{
"name": "Jupyter Notebook",
"bytes": "550862"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1982867"
},
{
"name": "Makefile",
"bytes": "66496"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "317461"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "20422"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "37425809"
},
{
"name": "RobotFramework",
"bytes": "1779"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "8992"
},
{
"name": "Shell",
"bytes": "700106"
},
{
"name": "Smarty",
"bytes": "35725"
},
{
"name": "Starlark",
"bytes": "3613406"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
from typing import Iterator, TYPE_CHECKING
from cirq import ops
from cirq.interop.quirk.cells.cell import CellMaker
if TYPE_CHECKING:
import cirq
def generate_all_scalar_cell_makers() -> Iterator[CellMaker]:
yield _scalar("NeGate", ops.global_phase_operation(-1))
yield _scalar("i", ops.global_phase_operation(1j))
yield _scalar("-i", ops.global_phase_operation(-1j))
yield _scalar("√i", ops.global_phase_operation(1j**0.5))
yield _scalar("√-i", ops.global_phase_operation((-1j) ** 0.5))
def _scalar(identifier: str, operation: 'cirq.Operation') -> CellMaker:
return CellMaker(identifier, size=1, maker=lambda _: operation)
| {
"content_hash": "40f54a8c20c91c7e529f4c5a4eab102c",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 71,
"avg_line_length": 34.63157894736842,
"alnum_prop": 0.7051671732522796,
"repo_name": "quantumlib/Cirq",
"id": "0236b354d78f4de698bd23eea8ffddf550306231",
"size": "1246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cirq-core/cirq/interop/quirk/cells/scalar_cells.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "4616"
},
{
"name": "HTML",
"bytes": "262"
},
{
"name": "JavaScript",
"bytes": "660"
},
{
"name": "Jupyter Notebook",
"bytes": "672675"
},
{
"name": "Makefile",
"bytes": "634"
},
{
"name": "Python",
"bytes": "8643017"
},
{
"name": "Scilab",
"bytes": "735"
},
{
"name": "Shell",
"bytes": "64230"
},
{
"name": "TypeScript",
"bytes": "91766"
}
],
"symlink_target": ""
} |
import os
def find_database(dbname):
cwd = os.getcwd()
while True:
dbpath = os.path.join(cwd,dbname)
if os.path.isfile(dbpath):
return dbpath
nwd = os.path.dirname(cwd)
if (nwd == cwd or not os.path.isdir(nwd)):
return None
cwd = nwd
database = find_database('taqi.db')
wiki_root = os.path.dirname(database)
print wiki_root
| {
"content_hash": "628297d40d8400f1f8a2a0d36831ed45",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 50,
"avg_line_length": 19.130434782608695,
"alnum_prop": 0.5363636363636364,
"repo_name": "lionicsheriff/tagi",
"id": "f6ef2ca2bbe53351fc97f5b8729c7065d979b726",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tagi/tagi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "1773"
},
{
"name": "Python",
"bytes": "14703"
}
],
"symlink_target": ""
} |
import threading
from time import sleep
class SMPPKeepAlive(threading.Thread):
def __init__(self, obj):
self.obj = obj
threading.Thread.__init__(self)
self.daemon = True
def run(self):
while 1:
self.obj._keepalive()
sleep(30)
| {
"content_hash": "469f5d3a4fb5609e935c4b4bd76dc2fe",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 47,
"avg_line_length": 26.307692307692307,
"alnum_prop": 0.48830409356725146,
"repo_name": "henrythor/SMPP.py",
"id": "8a46abc308fa09e406d578de03f44239142dcb3b",
"size": "342",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smpp/keepalive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39684"
}
],
"symlink_target": ""
} |
import base64
import os
import re
import shutil
import subprocess
import tempfile
import urllib
from contextlib import contextmanager
from datetime import timedelta
from typing import (
Any,
Callable,
Collection,
Dict,
Iterable,
Iterator,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Union,
cast,
)
from unittest import TestResult, mock
import lxml.html
import orjson
from django.apps import apps
from django.conf import settings
from django.core.mail import EmailMessage
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.state import StateApps
from django.db.utils import IntegrityError
from django.http import HttpRequest, HttpResponse
from django.http.response import StreamingHttpResponse
from django.test import TestCase
from django.test.client import BOUNDARY, MULTIPART_CONTENT, encode_multipart
from django.test.testcases import SerializeMixin
from django.urls import resolve
from django.utils import translation
from django.utils.module_loading import import_string
from django.utils.timezone import now as timezone_now
from fakeldap import MockLDAP
from two_factor.models import PhoneDevice
from corporate.models import Customer, CustomerPlan, LicenseLedger
from zerver.decorator import do_two_factor_login
from zerver.lib.actions import (
bulk_add_subscriptions,
bulk_remove_subscriptions,
check_send_message,
check_send_stream_message,
do_set_realm_property,
gather_subscriptions,
)
from zerver.lib.cache import bounce_key_prefix_for_testing
from zerver.lib.initial_password import initial_password
from zerver.lib.notification_data import UserMessageNotificationsData
from zerver.lib.rate_limiter import bounce_redis_key_prefix_for_testing
from zerver.lib.sessions import get_session_dict_user
from zerver.lib.stream_subscription import get_stream_subscriptions_for_user
from zerver.lib.streams import (
create_stream_if_needed,
get_default_value_for_history_public_to_subscribers,
)
from zerver.lib.test_console_output import (
ExtraConsoleOutputFinder,
ExtraConsoleOutputInTestException,
tee_stderr_and_find_extra_console_output,
tee_stdout_and_find_extra_console_output,
)
from zerver.lib.test_helpers import find_key_by_email, instrument_url
from zerver.lib.user_groups import get_system_user_group_for_user
from zerver.lib.users import get_api_key
from zerver.lib.validator import check_string
from zerver.lib.webhooks.common import (
check_send_webhook_message,
get_fixture_http_headers,
standardize_headers,
)
from zerver.models import (
Client,
Message,
Reaction,
Realm,
RealmEmoji,
Recipient,
Stream,
Subscription,
UserGroupMembership,
UserMessage,
UserProfile,
UserStatus,
clear_supported_auth_backends_cache,
flush_per_request_caches,
get_display_recipient,
get_realm,
get_realm_stream,
get_stream,
get_system_bot,
get_user,
get_user_by_delivery_email,
)
from zerver.openapi.openapi import validate_against_openapi_schema, validate_request
from zerver.tornado.event_queue import clear_client_event_queues_for_testing
if settings.ZILENCER_ENABLED:
from zilencer.models import get_remote_server_by_uuid
class EmptyResponseError(Exception):
pass
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
because settings.LOCAL_UPLOADS_DIR is used in URL pattern and URLs
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
"""
lockfile = "var/upload_lock"
@classmethod
def setUpClass(cls: Any, *args: Any, **kwargs: Any) -> None:
if not os.path.exists(cls.lockfile):
with open(cls.lockfile, "w"): # nocoverage - rare locking case
pass
super().setUpClass(*args, **kwargs)
# We could be more specific about which arguments are bool (Django's
# follow and secure, our intentionally_undocumented) and which are str
# (everything else), but explaining that to mypy is tedious.
ClientArg = Union[str, bool]
class ZulipTestCase(TestCase):
# Ensure that the test system just shows us diffs
maxDiff: Optional[int] = None
def setUp(self) -> None:
super().setUp()
self.API_KEYS: Dict[str, str] = {}
test_name = self.id()
bounce_key_prefix_for_testing(test_name)
bounce_redis_key_prefix_for_testing(test_name)
def tearDown(self) -> None:
super().tearDown()
# Important: we need to clear event queues to avoid leaking data to future tests.
clear_client_event_queues_for_testing()
clear_supported_auth_backends_cache()
flush_per_request_caches()
translation.activate(settings.LANGUAGE_CODE)
# Clean up after using fakeldap in LDAP tests:
if hasattr(self, "mock_ldap") and hasattr(self, "mock_initialize"):
if self.mock_ldap is not None:
self.mock_ldap.reset()
self.mock_initialize.stop()
def run(self, result: Optional[TestResult] = None) -> Optional[TestResult]: # nocoverage
if not settings.BAN_CONSOLE_OUTPUT:
return super().run(result)
extra_output_finder = ExtraConsoleOutputFinder()
with tee_stderr_and_find_extra_console_output(
extra_output_finder
), tee_stdout_and_find_extra_console_output(extra_output_finder):
test_result = super().run(result)
if extra_output_finder.full_extra_output:
exception_message = f"""
---- UNEXPECTED CONSOLE OUTPUT DETECTED ----
To ensure that we never miss important error output/warnings,
we require test-backend to have clean console output.
This message usually is triggered by forgotten debugging print()
statements or new logging statements. For the latter, you can
use `with self.assertLogs()` to capture and verify the log output;
use `git grep assertLogs` to see dozens of correct examples.
You should be able to quickly reproduce this failure with:
test-backend --ban-console-output {self.id()}
Output:
{extra_output_finder.full_extra_output.decode(errors="replace")}
--------------------------------------------
"""
raise ExtraConsoleOutputInTestException(exception_message)
return test_result
"""
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regex.
"""
DEFAULT_SUBDOMAIN = "zulip"
TOKENIZED_NOREPLY_REGEX = settings.TOKENIZED_NOREPLY_EMAIL_ADDRESS.format(token="[a-z0-9_]{24}")
def set_http_headers(self, kwargs: Dict[str, ClientArg]) -> None:
if "subdomain" in kwargs:
assert isinstance(kwargs["subdomain"], str)
kwargs["HTTP_HOST"] = Realm.host_for_subdomain(kwargs["subdomain"])
del kwargs["subdomain"]
elif "HTTP_HOST" not in kwargs:
kwargs["HTTP_HOST"] = Realm.host_for_subdomain(self.DEFAULT_SUBDOMAIN)
# set User-Agent
if "HTTP_AUTHORIZATION" in kwargs:
# An API request; use mobile as the default user agent
default_user_agent = "ZulipMobile/26.22.145 (iOS 10.3.1)"
else:
# A web app request; use a browser User-Agent string.
default_user_agent = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/79.0.3945.130 Safari/537.36"
)
if kwargs.get("skip_user_agent"):
# Provide a way to disable setting User-Agent if desired.
assert "HTTP_USER_AGENT" not in kwargs
del kwargs["skip_user_agent"]
elif "HTTP_USER_AGENT" not in kwargs:
kwargs["HTTP_USER_AGENT"] = default_user_agent
def extract_api_suffix_url(self, url: str) -> Tuple[str, Dict[str, List[str]]]:
"""
Function that extracts the URL after `/api/v1` or `/json` and also
returns the query data in the URL, if there is any.
"""
url_split = url.split("?")
data = {}
if len(url_split) == 2:
data = urllib.parse.parse_qs(url_split[1])
url = url_split[0]
url = url.replace("/json/", "/").replace("/api/v1/", "/")
return (url, data)
def validate_api_response_openapi(
self,
url: str,
method: str,
result: HttpResponse,
data: Union[str, bytes, Dict[str, Any]],
kwargs: Dict[str, ClientArg],
intentionally_undocumented: bool = False,
) -> None:
"""
Validates all API responses received by this test against Zulip's API documentation,
declared in zerver/openapi/zulip.yaml. This powerful test lets us use Zulip's
extensive test coverage of corner cases in the API to ensure that we've properly
documented those corner cases.
"""
if not (url.startswith("/json") or url.startswith("/api/v1")):
return
try:
content = orjson.loads(result.content)
except orjson.JSONDecodeError:
return
json_url = False
if url.startswith("/json"):
json_url = True
url, query_data = self.extract_api_suffix_url(url)
if len(query_data) != 0:
# In some cases the query parameters are defined in the URL itself. In such cases
# The `data` argument of our function is not used. Hence get `data` argument
# from url.
data = query_data
response_validated = validate_against_openapi_schema(
content, url, method, str(result.status_code)
)
if response_validated:
http_headers = {k: v for k, v in kwargs.items() if isinstance(v, str)}
validate_request(
url,
method,
data,
http_headers,
json_url,
str(result.status_code),
intentionally_undocumented=intentionally_undocumented,
)
@instrument_url
def client_patch(
self,
url: str,
info: Dict[str, Any] = {},
intentionally_undocumented: bool = False,
**kwargs: ClientArg,
) -> HttpResponse:
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
result = django_client.patch(url, encoded, **kwargs)
self.validate_api_response_openapi(
url,
"patch",
result,
info,
kwargs,
intentionally_undocumented=intentionally_undocumented,
)
return result
@instrument_url
def client_patch_multipart(
self, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
result = django_client.patch(url, encoded, content_type=MULTIPART_CONTENT, **kwargs)
self.validate_api_response_openapi(url, "patch", result, info, kwargs)
return result
def json_patch(
self, url: str, payload: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
data = orjson.dumps(payload)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
return django_client.patch(url, data=data, content_type="application/json", **kwargs)
@instrument_url
def client_put(self, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
return django_client.put(url, encoded, **kwargs)
def json_put(self, url: str, payload: Dict[str, Any] = {}, **kwargs: ClientArg) -> HttpResponse:
data = orjson.dumps(payload)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
return django_client.put(url, data=data, content_type="application/json", **kwargs)
@instrument_url
def client_delete(
self, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
result = django_client.delete(url, encoded, **kwargs)
self.validate_api_response_openapi(url, "delete", result, info, kwargs)
return result
@instrument_url
def client_options(
self, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
return django_client.options(url, encoded, **kwargs)
@instrument_url
def client_head(self, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
return django_client.head(url, encoded, **kwargs)
@instrument_url
def client_post(
self,
url: str,
info: Union[str, bytes, Dict[str, Any]] = {},
**kwargs: ClientArg,
) -> HttpResponse:
intentionally_undocumented = kwargs.pop("intentionally_undocumented", False)
assert isinstance(intentionally_undocumented, bool)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
result = django_client.post(url, info, **kwargs)
self.validate_api_response_openapi(
url, "post", result, info, kwargs, intentionally_undocumented=intentionally_undocumented
)
return result
@instrument_url
def client_post_request(self, url: str, req: Any) -> HttpResponse:
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg) -> HttpResponse:
intentionally_undocumented = kwargs.pop("intentionally_undocumented", False)
assert isinstance(intentionally_undocumented, bool)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_headers(kwargs)
result = django_client.get(url, info, **kwargs)
self.validate_api_response_openapi(
url, "get", result, info, kwargs, intentionally_undocumented=intentionally_undocumented
)
return result
example_user_map = dict(
hamlet="hamlet@zulip.com",
cordelia="cordelia@zulip.com",
iago="iago@zulip.com",
prospero="prospero@zulip.com",
othello="othello@zulip.com",
AARON="AARON@zulip.com",
aaron="aaron@zulip.com",
ZOE="ZOE@zulip.com",
polonius="polonius@zulip.com",
desdemona="desdemona@zulip.com",
shiva="shiva@zulip.com",
webhook_bot="webhook-bot@zulip.com",
welcome_bot="welcome-bot@zulip.com",
outgoing_webhook_bot="outgoing-webhook@zulip.com",
default_bot="default-bot@zulip.com",
)
mit_user_map = dict(
sipbtest="sipbtest@mit.edu",
starnine="starnine@mit.edu",
espuser="espuser@mit.edu",
)
lear_user_map = dict(
cordelia="cordelia@zulip.com",
king="king@lear.org",
)
# Non-registered test users
nonreg_user_map = dict(
test="test@zulip.com",
test1="test1@zulip.com",
alice="alice@zulip.com",
newuser="newuser@zulip.com",
bob="bob@zulip.com",
cordelia="cordelia@zulip.com",
newguy="newguy@zulip.com",
me="me@zulip.com",
)
example_user_ldap_username_map = dict(
hamlet="hamlet",
cordelia="cordelia",
# aaron's uid in our test directory is "letham".
aaron="letham",
)
def nonreg_user(self, name: str) -> UserProfile:
email = self.nonreg_user_map[name]
return get_user_by_delivery_email(email, get_realm("zulip"))
def example_user(self, name: str) -> UserProfile:
email = self.example_user_map[name]
return get_user_by_delivery_email(email, get_realm("zulip"))
def mit_user(self, name: str) -> UserProfile:
email = self.mit_user_map[name]
return get_user(email, get_realm("zephyr"))
def lear_user(self, name: str) -> UserProfile:
email = self.lear_user_map[name]
return get_user(email, get_realm("lear"))
def nonreg_email(self, name: str) -> str:
return self.nonreg_user_map[name]
def example_email(self, name: str) -> str:
return self.example_user_map[name]
def mit_email(self, name: str) -> str:
return self.mit_user_map[name]
def notification_bot(self, realm: Realm) -> UserProfile:
return get_system_bot(settings.NOTIFICATION_BOT, realm.id)
def create_test_bot(
self, short_name: str, user_profile: UserProfile, full_name: str = "Foo Bot", **extras: Any
) -> UserProfile:
self.login_user(user_profile)
bot_info = {
"short_name": short_name,
"full_name": full_name,
}
bot_info.update(extras)
result = self.client_post("/json/bots", bot_info)
self.assert_json_success(result)
bot_email = f"{short_name}-bot@zulip.testserver"
bot_profile = get_user(bot_email, user_profile.realm)
return bot_profile
def fail_to_create_test_bot(
self,
short_name: str,
user_profile: UserProfile,
full_name: str = "Foo Bot",
*,
assert_json_error_msg: str,
**extras: Any,
) -> None:
self.login_user(user_profile)
bot_info = {
"short_name": short_name,
"full_name": full_name,
}
bot_info.update(extras)
result = self.client_post("/json/bots", bot_info)
self.assert_json_error(result, assert_json_error_msg)
def _get_page_params(self, result: HttpResponse) -> Dict[str, Any]:
"""Helper for parsing page_params after fetching the web app's home view."""
doc = lxml.html.document_fromstring(result.content)
div = cast(lxml.html.HtmlMixin, doc).get_element_by_id("page-params")
assert div is not None
page_params_json = div.get("data-params")
assert page_params_json is not None
page_params = orjson.loads(page_params_json)
return page_params
def check_rendered_logged_in_app(self, result: HttpResponse) -> None:
"""Verifies that a visit of / was a 200 that rendered page_params
and not for a (logged-out) spectator."""
self.assertEqual(result.status_code, 200)
page_params = self._get_page_params(result)
# It is important to check `is_spectator` to verify
# that we treated this request as a normal logged-in session,
# not as a spectator.
self.assertEqual(page_params["is_spectator"], False)
def login_with_return(
self, email: str, password: Optional[str] = None, **kwargs: ClientArg
) -> HttpResponse:
if password is None:
password = initial_password(email)
result = self.client_post(
"/accounts/login/", {"username": email, "password": password}, **kwargs
)
self.assertNotEqual(result.status_code, 500)
return result
def login(self, name: str) -> None:
"""
Use this for really simple tests where you just need
to be logged in as some user, but don't need the actual
user object for anything else. Try to use 'hamlet' for
non-admins and 'iago' for admins:
self.login('hamlet')
Try to use 'cordelia' or 'othello' as "other" users.
"""
assert "@" not in name, "use login_by_email for email logins"
user = self.example_user(name)
self.login_user(user)
def login_by_email(self, email: str, password: str) -> None:
realm = get_realm("zulip")
request = HttpRequest()
request.session = self.client.session
self.assertTrue(
self.client.login(
request=request,
username=email,
password=password,
realm=realm,
),
)
def assert_login_failure(self, email: str, password: str) -> None:
realm = get_realm("zulip")
request = HttpRequest()
request.session = self.client.session
self.assertFalse(
self.client.login(
request=request,
username=email,
password=password,
realm=realm,
),
)
def login_user(self, user_profile: UserProfile) -> None:
email = user_profile.delivery_email
realm = user_profile.realm
password = initial_password(email)
request = HttpRequest()
request.session = self.client.session
self.assertTrue(
self.client.login(request=request, username=email, password=password, realm=realm)
)
def login_2fa(self, user_profile: UserProfile) -> None:
"""
We need this function to call request.session.save().
do_two_factor_login doesn't save session; in normal request-response
cycle this doesn't matter because middleware will save the session
when it finds it dirty; however,in tests we will have to do that
explicitly.
"""
request = HttpRequest()
request.session = self.client.session
request.user = user_profile
do_two_factor_login(request, user_profile)
request.session.save()
def logout(self) -> None:
self.client.logout()
def register(self, email: str, password: str, **kwargs: Any) -> HttpResponse:
self.client_post("/accounts/home/", {"email": email}, **kwargs)
return self.submit_reg_form_for_user(email, password, **kwargs)
def submit_reg_form_for_user(
self,
email: str,
password: Optional[str],
realm_name: str = "Zulip Test",
realm_subdomain: str = "zuliptest",
from_confirmation: str = "",
full_name: Optional[str] = None,
timezone: str = "",
realm_in_root_domain: Optional[str] = None,
default_stream_groups: Sequence[str] = [],
source_realm_id: str = "",
key: Optional[str] = None,
realm_type: int = Realm.ORG_TYPES["business"]["id"],
enable_marketing_emails: Optional[bool] = None,
is_demo_organization: bool = False,
**kwargs: ClientArg,
) -> HttpResponse:
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
if full_name is None:
full_name = email.replace("@", "_")
payload = {
"full_name": full_name,
"realm_name": realm_name,
"realm_subdomain": realm_subdomain,
"realm_type": realm_type,
"key": key if key is not None else find_key_by_email(email),
"timezone": timezone,
"terms": True,
"from_confirmation": from_confirmation,
"default_stream_group": default_stream_groups,
"source_realm_id": source_realm_id,
"is_demo_organization": is_demo_organization,
}
if enable_marketing_emails is not None:
payload["enable_marketing_emails"] = enable_marketing_emails
if password is not None:
payload["password"] = password
if realm_in_root_domain is not None:
payload["realm_in_root_domain"] = realm_in_root_domain
return self.client_post("/accounts/register/", payload, **kwargs)
def get_confirmation_url_from_outbox(
self,
email_address: str,
*,
url_pattern: Optional[str] = None,
email_subject_contains: Optional[str] = None,
email_body_contains: Optional[str] = None,
) -> str:
from django.core.mail import outbox
if url_pattern is None:
# This is a bit of a crude heuristic, but good enough for most tests.
url_pattern = settings.EXTERNAL_HOST + r"(\S+)>"
for message in reversed(outbox):
if any(
addr == email_address or addr.endswith(f" <{email_address}>") for addr in message.to
):
match = re.search(url_pattern, message.body)
assert match is not None
if email_subject_contains:
self.assertIn(email_subject_contains, message.subject)
if email_body_contains:
self.assertIn(email_body_contains, message.body)
[confirmation_url] = match.groups()
return confirmation_url
else:
raise AssertionError("Couldn't find a confirmation email.")
def encode_uuid(self, uuid: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
if uuid in self.API_KEYS:
api_key = self.API_KEYS[uuid]
else:
api_key = get_remote_server_by_uuid(uuid).api_key
self.API_KEYS[uuid] = api_key
return self.encode_credentials(uuid, api_key)
def encode_user(self, user: UserProfile) -> str:
email = user.delivery_email
api_key = user.api_key
return self.encode_credentials(email, api_key)
def encode_email(self, email: str, realm: str = "zulip") -> str:
# TODO: use encode_user where possible
assert "@" in email
user = get_user_by_delivery_email(email, get_realm(realm))
api_key = get_api_key(user)
return self.encode_credentials(email, api_key)
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = f"{identifier}:{api_key}"
return "Basic " + base64.b64encode(credentials.encode()).decode()
def uuid_get(
self, identifier: str, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_uuid(identifier)
return self.client_get(url, info, **kwargs)
def uuid_post(
self,
identifier: str,
url: str,
info: Union[str, bytes, Dict[str, Any]] = {},
**kwargs: ClientArg,
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_uuid(identifier)
return self.client_post(url, info, **kwargs)
def api_get(
self, user: UserProfile, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_user(user)
return self.client_get(url, info, **kwargs)
def api_post(
self,
user: UserProfile,
url: str,
info: Union[str, bytes, Dict[str, Any]] = {},
**kwargs: ClientArg,
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_user(user)
return self.client_post(url, info, **kwargs)
def api_patch(
self,
user: UserProfile,
url: str,
info: Dict[str, Any] = {},
intentionally_undocumented: bool = False,
**kwargs: ClientArg,
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_user(user)
return self.client_patch(
url, info, intentionally_undocumented=intentionally_undocumented, **kwargs
)
def api_delete(
self, user: UserProfile, url: str, info: Dict[str, Any] = {}, **kwargs: ClientArg
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_user(user)
return self.client_delete(url, info, **kwargs)
def get_streams(self, user_profile: UserProfile) -> List[str]:
"""
Helper function to get the stream names for a user
"""
subs = get_stream_subscriptions_for_user(user_profile).filter(
active=True,
)
return [check_string("recipient", get_display_recipient(sub.recipient)) for sub in subs]
def send_personal_message(
self,
from_user: UserProfile,
to_user: UserProfile,
content: str = "test content",
sending_client_name: str = "test suite",
) -> int:
recipient_list = [to_user.id]
(sending_client, _) = Client.objects.get_or_create(name=sending_client_name)
return check_send_message(
from_user,
sending_client,
"private",
recipient_list,
None,
content,
)
def send_huddle_message(
self,
from_user: UserProfile,
to_users: List[UserProfile],
content: str = "test content",
sending_client_name: str = "test suite",
) -> int:
to_user_ids = [u.id for u in to_users]
assert len(to_user_ids) >= 2
(sending_client, _) = Client.objects.get_or_create(name=sending_client_name)
return check_send_message(
from_user,
sending_client,
"private",
to_user_ids,
None,
content,
)
def send_stream_message(
self,
sender: UserProfile,
stream_name: str,
content: str = "test content",
topic_name: str = "test",
recipient_realm: Optional[Realm] = None,
sending_client_name: str = "test suite",
allow_unsubscribed_sender: bool = False,
) -> int:
(sending_client, _) = Client.objects.get_or_create(name=sending_client_name)
message_id = check_send_stream_message(
sender=sender,
client=sending_client,
stream_name=stream_name,
topic=topic_name,
body=content,
realm=recipient_realm,
)
if not UserMessage.objects.filter(user_profile=sender, message_id=message_id).exists():
if not sender.is_bot and not allow_unsubscribed_sender:
raise AssertionError(
f"""
It appears that the sender did not get a UserMessage row, which is
almost certainly an artificial symptom that in your test setup you
have decided to send a message to a stream without the sender being
subscribed.
Please do self.subscribe(<user for {sender.full_name}>, {repr(stream_name)}) first.
Or choose a stream that the user is already subscribed to:
{self.subscribed_stream_name_list(sender)}
"""
)
return message_id
def get_messages_response(
self,
anchor: Union[int, str] = 1,
num_before: int = 100,
num_after: int = 100,
use_first_unread_anchor: bool = False,
) -> Dict[str, List[Dict[str, Any]]]:
post_params = {
"anchor": anchor,
"num_before": num_before,
"num_after": num_after,
"use_first_unread_anchor": orjson.dumps(use_first_unread_anchor).decode(),
}
result = self.client_get("/json/messages", dict(post_params))
data = result.json()
return data
def get_messages(
self,
anchor: Union[str, int] = 1,
num_before: int = 100,
num_after: int = 100,
use_first_unread_anchor: bool = False,
) -> List[Dict[str, Any]]:
data = self.get_messages_response(anchor, num_before, num_after, use_first_unread_anchor)
return data["messages"]
def users_subscribed_to_stream(self, stream_name: str, realm: Realm) -> List[UserProfile]:
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_streaming_content(self, response: HttpResponse, result: bytes) -> None:
assert isinstance(response, StreamingHttpResponse)
data = b"".join(response.streaming_content)
self.assertEqual(result, data)
def assert_json_success(self, result: HttpResponse) -> Dict[str, Any]:
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
try:
json = orjson.loads(result.content)
except orjson.JSONDecodeError: # nocoverage
json = {"msg": "Error parsing JSON in response!"}
self.assertEqual(result.status_code, 200, json["msg"])
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
self.assertNotEqual(json["msg"], "Error parsing JSON in response!")
return json
def get_json_error(self, result: HttpResponse, status_code: int = 400) -> str:
try:
json = orjson.loads(result.content)
except orjson.JSONDecodeError: # nocoverage
json = {"msg": "Error parsing JSON in response!"}
self.assertEqual(result.status_code, status_code, msg=json.get("msg"))
self.assertEqual(json.get("result"), "error")
return json["msg"]
def assert_json_error(self, result: HttpResponse, msg: str, status_code: int = 400) -> None:
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, items: Collection[Any], count: int) -> None:
actual_count = len(items)
if actual_count != count: # nocoverage
print("\nITEMS:\n")
for item in items:
print(item)
print(f"\nexpected length: {count}\nactual length: {actual_count}")
raise AssertionError(f"{str(type(items))} is of unexpected size!")
def assert_json_error_contains(
self, result: HttpResponse, msg_substring: str, status_code: int = 400
) -> None:
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_in_response(self, substring: str, response: HttpResponse) -> None:
self.assertIn(substring, response.content.decode())
def assert_in_success_response(self, substrings: List[str], response: HttpResponse) -> None:
self.assertEqual(response.status_code, 200)
decoded = response.content.decode()
for substring in substrings:
self.assertIn(substring, decoded)
def assert_not_in_success_response(self, substrings: List[str], response: HttpResponse) -> None:
self.assertEqual(response.status_code, 200)
decoded = response.content.decode()
for substring in substrings:
self.assertNotIn(substring, decoded)
def assert_logged_in_user_id(self, user_id: Optional[int]) -> None:
"""
Verifies the user currently logged in for the test client has the provided user_id.
Pass None to verify no user is logged in.
"""
self.assertEqual(get_session_dict_user(self.client.session), user_id)
def webhook_fixture_data(self, type: str, action: str, file_type: str = "json") -> str:
fn = os.path.join(
os.path.dirname(__file__),
f"../webhooks/{type}/fixtures/{action}.{file_type}",
)
with open(fn) as f:
return f.read()
def fixture_file_name(self, file_name: str, type: str = "") -> str:
return os.path.join(
os.path.dirname(__file__),
f"../tests/fixtures/{type}/{file_name}",
)
def fixture_data(self, file_name: str, type: str = "") -> str:
fn = self.fixture_file_name(file_name, type)
with open(fn) as f:
return f.read()
def make_stream(
self,
stream_name: str,
realm: Optional[Realm] = None,
invite_only: bool = False,
is_web_public: bool = False,
history_public_to_subscribers: Optional[bool] = None,
) -> Stream:
if realm is None:
realm = get_realm("zulip")
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
realm, invite_only, history_public_to_subscribers
)
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
is_web_public=is_web_public,
history_public_to_subscribers=history_public_to_subscribers,
)
except IntegrityError: # nocoverage -- this is for bugs in the tests
raise Exception(
f"""
{stream_name} already exists
Please call make_stream with a stream name
that is not already in use."""
)
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
stream.recipient = recipient
stream.save(update_fields=["recipient"])
return stream
INVALID_STREAM_ID = 999999
def get_stream_id(self, name: str, realm: Optional[Realm] = None) -> int:
if not realm:
realm = get_realm("zulip")
try:
stream = get_realm_stream(name, realm.id)
except Stream.DoesNotExist:
return self.INVALID_STREAM_ID
return stream.id
# Subscribe to a stream directly
def subscribe(self, user_profile: UserProfile, stream_name: str) -> Stream:
realm = user_profile.realm
try:
stream = get_stream(stream_name, user_profile.realm)
except Stream.DoesNotExist:
stream, from_stream_creation = create_stream_if_needed(realm, stream_name)
bulk_add_subscriptions(realm, [stream], [user_profile], acting_user=None)
return stream
def unsubscribe(self, user_profile: UserProfile, stream_name: str) -> None:
realm = user_profile.realm
stream = get_stream(stream_name, user_profile.realm)
bulk_remove_subscriptions(realm, [user_profile], [stream], acting_user=None)
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(
self,
user: UserProfile,
streams: Iterable[str],
extra_post_data: Dict[str, Any] = {},
invite_only: bool = False,
is_web_public: bool = False,
allow_fail: bool = False,
**kwargs: ClientArg,
) -> HttpResponse:
post_data = {
"subscriptions": orjson.dumps([{"name": stream} for stream in streams]).decode(),
"is_web_public": orjson.dumps(is_web_public).decode(),
"invite_only": orjson.dumps(invite_only).decode(),
}
post_data.update(extra_post_data)
result = self.api_post(user, "/api/v1/users/me/subscriptions", post_data, **kwargs)
if not allow_fail:
self.assert_json_success(result)
return result
def subscribed_stream_name_list(self, user: UserProfile) -> str:
# This is currently only used for producing error messages.
subscribed_streams = gather_subscriptions(user)[0]
return "".join(sorted(f" * {stream['name']}\n" for stream in subscribed_streams))
def check_user_subscribed_only_to_streams(self, user_name: str, streams: List[Stream]) -> None:
streams = sorted(streams, key=lambda x: x.name)
subscribed_streams = gather_subscriptions(self.nonreg_user(user_name))[0]
self.assert_length(subscribed_streams, len(streams))
for x, y in zip(subscribed_streams, streams):
self.assertEqual(x["name"], y.name)
def send_webhook_payload(
self,
user_profile: UserProfile,
url: str,
payload: Union[str, Dict[str, Any]],
**post_params: ClientArg,
) -> Message:
"""
Send a webhook payload to the server, and verify that the
post is successful.
This is a pretty low-level function. For most use cases
see the helpers that call this function, which do additional
checks.
Occasionally tests will call this directly, for unique
situations like having multiple messages go to a stream,
where the other helper functions are a bit too rigid,
and you'll want the test itself do various assertions.
Even in those cases, you're often better to simply
call client_post and assert_json_success.
If the caller expects a message to be sent to a stream,
the caller should make sure the user is subscribed.
"""
prior_msg = self.get_last_message()
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
if msg.id == prior_msg.id:
raise EmptyResponseError(
"""
Your test code called an endpoint that did
not write any new messages. It is probably
broken (but still returns 200 due to exception
handling).
One possible gotcha is that you forgot to
subscribe the test user to the stream that
the webhook sends to.
"""
) # nocoverage
self.assertEqual(msg.sender.email, user_profile.email)
return msg
def get_last_message(self) -> Message:
return Message.objects.latest("id")
def get_second_to_last_message(self) -> Message:
return Message.objects.all().order_by("-id")[1]
@contextmanager
def simulated_markdown_failure(self) -> Iterator[None]:
"""
This raises a failure inside of the try/except block of
markdown.__init__.do_convert.
"""
with self.settings(ERROR_BOT=None), mock.patch(
"zerver.lib.markdown.timeout", side_effect=subprocess.CalledProcessError(1, [])
), self.assertLogs(
level="ERROR"
): # For markdown_logger.exception
yield
def create_default_device(
self, user_profile: UserProfile, number: str = "+12125550100"
) -> None:
phone_device = PhoneDevice(
user=user_profile,
name="default",
confirmed=True,
number=number,
key="abcd",
method="sms",
)
phone_device.save()
def rm_tree(self, path: str) -> None:
if os.path.exists(path):
shutil.rmtree(path)
def make_import_output_dir(self, exported_from: str) -> str:
output_dir = tempfile.mkdtemp(
dir=settings.TEST_WORKER_DIR, prefix="test-" + exported_from + "-import-"
)
os.makedirs(output_dir, exist_ok=True)
return output_dir
def get_set(self, data: List[Dict[str, Any]], field: str) -> Set[str]:
values = {r[field] for r in data}
return values
def find_by_id(self, data: List[Dict[str, Any]], db_id: int) -> Dict[str, Any]:
return [r for r in data if r["id"] == db_id][0]
def init_default_ldap_database(self) -> None:
"""
Takes care of the mock_ldap setup, loads
a directory from zerver/tests/fixtures/ldap/directory.json with various entries
to be used by tests.
If a test wants to specify its own directory, it can just replace
self.mock_ldap.directory with its own content, but in most cases it should be
enough to use change_user_attr to make simple modifications to the pre-loaded
directory. If new user entries are needed to test for some additional unusual
scenario, it's most likely best to add that to directory.json.
"""
directory = orjson.loads(self.fixture_data("directory.json", type="ldap"))
for dn, attrs in directory.items():
if "uid" in attrs:
# Generate a password for the LDAP account:
attrs["userPassword"] = [self.ldap_password(attrs["uid"][0])]
# Load binary attributes. If in "directory", an attribute as its value
# has a string starting with "file:", the rest of the string is assumed
# to be a path to the file from which binary data should be loaded,
# as the actual value of the attribute in LDAP.
for attr, value in attrs.items():
if isinstance(value, str) and value.startswith("file:"):
with open(value[5:], "rb") as f:
attrs[attr] = [f.read()]
ldap_patcher = mock.patch("django_auth_ldap.config.ldap.initialize")
self.mock_initialize = ldap_patcher.start()
self.mock_ldap = MockLDAP(directory)
self.mock_initialize.return_value = self.mock_ldap
def change_ldap_user_attr(
self, username: str, attr_name: str, attr_value: Union[str, bytes], binary: bool = False
) -> None:
"""
Method for changing the value of an attribute of a user entry in the mock
directory. Use option binary=True if you want binary data to be loaded
into the attribute from a file specified at attr_value. This changes
the attribute only for the specific test function that calls this method,
and is isolated from other tests.
"""
dn = f"uid={username},ou=users,dc=zulip,dc=com"
if binary:
with open(attr_value, "rb") as f:
# attr_value should be a path to the file with the binary data
data: Union[str, bytes] = f.read()
else:
data = attr_value
self.mock_ldap.directory[dn][attr_name] = [data]
def remove_ldap_user_attr(self, username: str, attr_name: str) -> None:
"""
Method for removing the value of an attribute of a user entry in the mock
directory. This changes the attribute only for the specific test function
that calls this method, and is isolated from other tests.
"""
dn = f"uid={username},ou=users,dc=zulip,dc=com"
self.mock_ldap.directory[dn].pop(attr_name, None)
def ldap_username(self, username: str) -> str:
"""
Maps Zulip username to the name of the corresponding LDAP user
in our test directory at zerver/tests/fixtures/ldap/directory.json,
if the LDAP user exists.
"""
return self.example_user_ldap_username_map[username]
def ldap_password(self, uid: str) -> str:
return f"{uid}_ldap_password"
def email_display_from(self, email_message: EmailMessage) -> str:
"""
Returns the email address that will show in email clients as the
"From" field.
"""
# The extra_headers field may contain a "From" which is used
# for display in email clients, and appears in the RFC822
# header as `From`. The `.from_email` accessor is the
# "envelope from" address, used by mail transfer agents if
# the email bounces.
return email_message.extra_headers.get("From", email_message.from_email)
def email_envelope_from(self, email_message: EmailMessage) -> str:
"""
Returns the email address that will be used if the email bounces.
"""
# See email_display_from, above.
return email_message.from_email
def check_has_permission_policies(
self, policy: str, validation_func: Callable[[UserProfile], bool]
) -> None:
realm = get_realm("zulip")
owner_user = self.example_user("desdemona")
admin_user = self.example_user("iago")
moderator_user = self.example_user("shiva")
member_user = self.example_user("hamlet")
new_member_user = self.example_user("othello")
guest_user = self.example_user("polonius")
do_set_realm_property(realm, "waiting_period_threshold", 1000, acting_user=None)
new_member_user.date_joined = timezone_now() - timedelta(
days=(realm.waiting_period_threshold - 1)
)
new_member_user.save()
member_user.date_joined = timezone_now() - timedelta(
days=(realm.waiting_period_threshold + 1)
)
member_user.save()
do_set_realm_property(realm, policy, Realm.POLICY_NOBODY, acting_user=None)
self.assertFalse(validation_func(owner_user))
self.assertFalse(validation_func(admin_user))
self.assertFalse(validation_func(moderator_user))
self.assertFalse(validation_func(member_user))
self.assertFalse(validation_func(new_member_user))
self.assertFalse(validation_func(guest_user))
do_set_realm_property(realm, policy, Realm.POLICY_OWNERS_ONLY, acting_user=None)
self.assertTrue(validation_func(owner_user))
self.assertFalse(validation_func(admin_user))
self.assertFalse(validation_func(moderator_user))
self.assertFalse(validation_func(member_user))
self.assertFalse(validation_func(new_member_user))
self.assertFalse(validation_func(guest_user))
do_set_realm_property(realm, policy, Realm.POLICY_ADMINS_ONLY, acting_user=None)
self.assertTrue(validation_func(owner_user))
self.assertTrue(validation_func(admin_user))
self.assertFalse(validation_func(moderator_user))
self.assertFalse(validation_func(member_user))
self.assertFalse(validation_func(new_member_user))
self.assertFalse(validation_func(guest_user))
do_set_realm_property(realm, policy, Realm.POLICY_MODERATORS_ONLY, acting_user=None)
self.assertTrue(validation_func(owner_user))
self.assertTrue(validation_func(admin_user))
self.assertTrue(validation_func(moderator_user))
self.assertFalse(validation_func(member_user))
self.assertFalse(validation_func(new_member_user))
self.assertFalse(validation_func(guest_user))
do_set_realm_property(realm, policy, Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None)
self.assertTrue(validation_func(owner_user))
self.assertTrue(validation_func(admin_user))
self.assertTrue(validation_func(moderator_user))
self.assertTrue(validation_func(member_user))
self.assertFalse(validation_func(new_member_user))
self.assertFalse(validation_func(guest_user))
do_set_realm_property(realm, policy, Realm.POLICY_MEMBERS_ONLY, acting_user=None)
self.assertTrue(validation_func(owner_user))
self.assertTrue(validation_func(admin_user))
self.assertTrue(validation_func(moderator_user))
self.assertTrue(validation_func(member_user))
self.assertTrue(validation_func(new_member_user))
self.assertFalse(validation_func(guest_user))
do_set_realm_property(realm, policy, Realm.POLICY_EVERYONE, acting_user=None)
self.assertTrue(validation_func(owner_user))
self.assertTrue(validation_func(admin_user))
self.assertTrue(validation_func(moderator_user))
self.assertTrue(validation_func(member_user))
self.assertTrue(validation_func(new_member_user))
self.assertTrue(validation_func(guest_user))
def subscribe_realm_to_manual_license_management_plan(
self, realm: Realm, licenses: int, licenses_at_next_renewal: int, billing_schedule: int
) -> Tuple[CustomerPlan, LicenseLedger]:
customer, _ = Customer.objects.get_or_create(realm=realm)
plan = CustomerPlan.objects.create(
customer=customer,
automanage_licenses=False,
billing_cycle_anchor=timezone_now(),
billing_schedule=billing_schedule,
tier=CustomerPlan.STANDARD,
)
ledger = LicenseLedger.objects.create(
plan=plan,
is_renewal=True,
event_time=timezone_now(),
licenses=licenses,
licenses_at_next_renewal=licenses_at_next_renewal,
)
realm.plan_type = Realm.PLAN_TYPE_STANDARD
realm.save(update_fields=["plan_type"])
return plan, ledger
def subscribe_realm_to_monthly_plan_on_manual_license_management(
self, realm: Realm, licenses: int, licenses_at_next_renewal: int
) -> Tuple[CustomerPlan, LicenseLedger]:
return self.subscribe_realm_to_manual_license_management_plan(
realm, licenses, licenses_at_next_renewal, CustomerPlan.MONTHLY
)
@contextmanager
def tornado_redirected_to_list(
self, lst: List[Mapping[str, Any]], expected_num_events: int
) -> Iterator[None]:
lst.clear()
# process_notification takes a single parameter called 'notice'.
# lst.append takes a single argument called 'object'.
# Some code might call process_notification using keyword arguments,
# so mypy doesn't allow assigning lst.append to process_notification
# So explicitly change parameter name to 'notice' to work around this problem
with mock.patch(
"zerver.tornado.event_queue.process_notification", lambda notice: lst.append(notice)
):
# Some `send_event` calls need to be executed only after the current transaction
# commits (using `on_commit` hooks). Because the transaction in Django tests never
# commits (rather, gets rolled back after the test completes), such events would
# never be sent in tests, and we would be unable to verify them. Hence, we use
# this helper to make sure the `send_event` calls actually run.
with self.captureOnCommitCallbacks(execute=True):
yield
self.assert_length(lst, expected_num_events)
def create_user_notifications_data_object(
self, *, user_id: int, **kwargs: Any
) -> UserMessageNotificationsData:
return UserMessageNotificationsData(
user_id=user_id,
online_push_enabled=kwargs.get("online_push_enabled", False),
pm_email_notify=kwargs.get("pm_email_notify", False),
pm_push_notify=kwargs.get("pm_push_notify", False),
mention_email_notify=kwargs.get("mention_email_notify", False),
mention_push_notify=kwargs.get("mention_push_notify", False),
wildcard_mention_email_notify=kwargs.get("wildcard_mention_email_notify", False),
wildcard_mention_push_notify=kwargs.get("wildcard_mention_push_notify", False),
stream_email_notify=kwargs.get("stream_email_notify", False),
stream_push_notify=kwargs.get("stream_push_notify", False),
sender_is_muted=kwargs.get("sender_is_muted", False),
)
def get_maybe_enqueue_notifications_parameters(
self, *, message_id: int, user_id: int, acting_user_id: int, **kwargs: Any
) -> Dict[str, Any]:
"""
Returns a dictionary with the passed parameters, after filling up the
missing data with default values, for testing what was passed to the
`maybe_enqueue_notifications` method.
"""
user_notifications_data = self.create_user_notifications_data_object(
user_id=user_id, **kwargs
)
return dict(
user_notifications_data=user_notifications_data,
message_id=message_id,
acting_user_id=acting_user_id,
mentioned_user_group_id=kwargs.get("mentioned_user_group_id", None),
idle=kwargs.get("idle", True),
already_notified=kwargs.get(
"already_notified", {"email_notified": False, "push_notified": False}
),
)
def verify_emoji_code_foreign_keys(self) -> None:
"""
DB tables that refer to RealmEmoji use int(emoji_code) as the
foreign key. Those tables tend to de-normalize emoji_name due
to our inheritance-based setup. This helper makes sure those
invariants are intact, which is particularly tricky during
the import/export process (or during conversions from things
like Slack/RocketChat/MatterMost/etc.).
"""
dct = {}
for row in RealmEmoji.objects.all():
dct[row.id] = row
if not dct:
raise AssertionError("test needs RealmEmoji rows")
count = 0
for row in Reaction.objects.filter(reaction_type=Reaction.REALM_EMOJI):
realm_emoji_id = int(row.emoji_code)
assert realm_emoji_id in dct
self.assertEqual(dct[realm_emoji_id].name, row.emoji_name)
self.assertEqual(dct[realm_emoji_id].realm_id, row.user_profile.realm_id)
count += 1
for row in UserStatus.objects.filter(reaction_type=UserStatus.REALM_EMOJI):
realm_emoji_id = int(row.emoji_code)
assert realm_emoji_id in dct
self.assertEqual(dct[realm_emoji_id].name, row.emoji_name)
self.assertEqual(dct[realm_emoji_id].realm_id, row.user_profile.realm_id)
count += 1
if count == 0:
raise AssertionError("test is meaningless without any pertinent rows")
def check_user_added_in_system_group(self, user: UserProfile) -> None:
user_group = get_system_user_group_for_user(user)
self.assertTrue(
UserGroupMembership.objects.filter(user_profile=user, user_group=user_group).exists()
)
class WebhookTestCase(ZulipTestCase):
"""Shared test class for all incoming webhooks tests.
Used by configuring the below class attributes, and calling
send_and_test_message in individual tests.
* Tests can override build_webhook_url if the webhook requires a
different URL format.
* Tests can override get_body for cases where there is no
available fixture file.
* Tests should specify WEBHOOK_DIR_NAME to enforce that all event
types are declared in the @webhook_view decorator. This is
important for ensuring we document all fully supported event types.
"""
STREAM_NAME: Optional[str] = None
TEST_USER_EMAIL = "webhook-bot@zulip.com"
URL_TEMPLATE: str
WEBHOOK_DIR_NAME: Optional[str] = None
# This last parameter is a workaround to handle webhooks that do not
# name the main function api_{WEBHOOK_DIR_NAME}_webhook.
VIEW_FUNCTION_NAME: Optional[str] = None
@property
def test_user(self) -> UserProfile:
return get_user(self.TEST_USER_EMAIL, get_realm("zulip"))
def setUp(self) -> None:
super().setUp()
self.url = self.build_webhook_url()
if self.WEBHOOK_DIR_NAME is not None:
# If VIEW_FUNCTION_NAME is explicitly specified and
# WEBHOOK_DIR_NAME is not None, an exception will be
# raised when a test triggers events that are not
# explicitly specified via the event_types parameter to
# the @webhook_view decorator.
if self.VIEW_FUNCTION_NAME is None:
function = import_string(
f"zerver.webhooks.{self.WEBHOOK_DIR_NAME}.view.api_{self.WEBHOOK_DIR_NAME}_webhook"
)
else:
function = import_string(
f"zerver.webhooks.{self.WEBHOOK_DIR_NAME}.view.{self.VIEW_FUNCTION_NAME}"
)
all_event_types = None
if hasattr(function, "_all_event_types"):
all_event_types = function._all_event_types
if all_event_types is None:
return # nocoverage
def side_effect(*args: Any, **kwargs: Any) -> None:
complete_event_type = (
kwargs.get("complete_event_type")
if len(args) < 5
else args[4] # complete_event_type is the argument at index 4
)
if (
complete_event_type is not None
and all_event_types is not None
and complete_event_type not in all_event_types
):
raise Exception(
f"""
Error: This test triggered a message using the event "{complete_event_type}", which was not properly
registered via the @webhook_view(..., event_types=[...]). These registrations are important for Zulip
self-documenting the supported event types for this integration.
You can fix this by adding "{complete_event_type}" to ALL_EVENT_TYPES for this webhook.
""".strip()
)
check_send_webhook_message(*args, **kwargs)
self.patch = mock.patch(
f"zerver.webhooks.{self.WEBHOOK_DIR_NAME}.view.check_send_webhook_message",
side_effect=side_effect,
)
self.patch.start()
self.addCleanup(self.patch.stop)
def api_stream_message(
self,
user: UserProfile,
fixture_name: str,
expected_topic: Optional[str] = None,
expected_message: Optional[str] = None,
content_type: Optional[str] = "application/json",
expect_noop: bool = False,
**kwargs: ClientArg,
) -> HttpResponse:
kwargs["HTTP_AUTHORIZATION"] = self.encode_user(user)
return self.check_webhook(
fixture_name, expected_topic, expected_message, content_type, expect_noop, **kwargs
)
def check_webhook(
self,
fixture_name: str,
expected_topic: Optional[str] = None,
expected_message: Optional[str] = None,
content_type: Optional[str] = "application/json",
expect_noop: bool = False,
**kwargs: ClientArg,
) -> None:
"""
check_webhook is the main way to test "normal" webhooks that
work by receiving a payload from a third party and then writing
some message to a Zulip stream.
We use `fixture_name` to find the payload data in of our test
fixtures. Then we verify that a message gets sent to a stream:
self.STREAM_NAME: stream name
expected_topic: topic
expected_message: content
We simulate the delivery of the payload with `content_type`,
and you can pass other headers via `kwargs`.
For the rare cases of webhooks actually sending private messages,
see send_and_test_private_message.
When no message is expected to be sent, set `expect_noop` to True.
"""
assert self.STREAM_NAME is not None
self.subscribe(self.test_user, self.STREAM_NAME)
payload = self.get_payload(fixture_name)
if content_type is not None:
kwargs["content_type"] = content_type
if self.WEBHOOK_DIR_NAME is not None:
headers = get_fixture_http_headers(self.WEBHOOK_DIR_NAME, fixture_name)
headers = standardize_headers(headers)
kwargs.update(headers)
try:
msg = self.send_webhook_payload(
self.test_user,
self.url,
payload,
**kwargs,
)
except EmptyResponseError:
if expect_noop:
return
else:
raise AssertionError(
"No message was sent. Pass expect_noop=True if this is intentional."
)
if expect_noop:
raise Exception(
"""
While no message is expected given expect_noop=True,
your test code triggered an endpoint that did write
one or more new messages.
""".strip()
)
assert expected_message is not None and expected_topic is not None
self.assert_stream_message(
message=msg,
stream_name=self.STREAM_NAME,
topic_name=expected_topic,
content=expected_message,
)
def assert_stream_message(
self,
message: Message,
stream_name: str,
topic_name: str,
content: str,
) -> None:
self.assertEqual(get_display_recipient(message.recipient), stream_name)
self.assertEqual(message.topic_name(), topic_name)
self.assertEqual(message.content, content)
def send_and_test_private_message(
self,
fixture_name: str,
expected_message: str,
content_type: str = "application/json",
*,
sender: Optional[UserProfile] = None,
**kwargs: ClientArg,
) -> Message:
"""
For the rare cases that you are testing a webhook that sends
private messages, use this function.
Most webhooks send to streams, and you will want to look at
check_webhook.
"""
payload = self.get_payload(fixture_name)
kwargs["content_type"] = content_type
if self.WEBHOOK_DIR_NAME is not None:
headers = get_fixture_http_headers(self.WEBHOOK_DIR_NAME, fixture_name)
headers = standardize_headers(headers)
kwargs.update(headers)
if sender is None:
sender = self.test_user
msg = self.send_webhook_payload(
sender,
self.url,
payload,
**kwargs,
)
self.assertEqual(msg.content, expected_message)
return msg
def build_webhook_url(self, *args: str, **kwargs: str) -> str:
url = self.URL_TEMPLATE
if url.find("api_key") >= 0:
api_key = get_api_key(self.test_user)
url = self.URL_TEMPLATE.format(api_key=api_key, stream=self.STREAM_NAME)
else:
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
has_arguments = kwargs or args
if has_arguments and url.find("?") == -1:
url = f"{url}?" # nocoverage
else:
url = f"{url}&"
for key, value in kwargs.items():
url = f"{url}{key}={value}&"
for arg in args:
url = f"{url}{arg}&"
return url[:-1] if has_arguments else url
def get_payload(self, fixture_name: str) -> Union[str, Dict[str, str]]:
"""
Generally webhooks that override this should return dicts."""
return self.get_body(fixture_name)
def get_body(self, fixture_name: str) -> str:
assert self.WEBHOOK_DIR_NAME is not None
body = self.webhook_fixture_data(self.WEBHOOK_DIR_NAME, fixture_name)
# fail fast if we don't have valid json
orjson.loads(body)
return body
class MigrationsTestCase(ZulipTestCase): # nocoverage
"""
Test class for database migrations inspired by this blog post:
https://www.caktusgroup.com/blog/2016/02/02/writing-unit-tests-django-migrations/
Documented at https://zulip.readthedocs.io/en/latest/subsystems/schema-migrations.html
"""
@property
def app(self) -> str:
app_config = apps.get_containing_app_config(type(self).__module__)
assert app_config is not None
return app_config.name
migrate_from: Optional[str] = None
migrate_to: Optional[str] = None
def setUp(self) -> None:
assert (
self.migrate_from and self.migrate_to
), f"TestCase '{type(self).__name__}' must define migrate_from and migrate_to properties"
migrate_from: List[Tuple[str, str]] = [(self.app, self.migrate_from)]
migrate_to: List[Tuple[str, str]] = [(self.app, self.migrate_to)]
executor = MigrationExecutor(connection)
old_apps = executor.loader.project_state(migrate_from).apps
# Reverse to the original migration
executor.migrate(migrate_from)
self.setUpBeforeMigration(old_apps)
# Run the migration to test
executor = MigrationExecutor(connection)
executor.loader.build_graph() # reload.
executor.migrate(migrate_to)
self.apps = executor.loader.project_state(migrate_to).apps
def setUpBeforeMigration(self, apps: StateApps) -> None:
pass # nocoverage
| {
"content_hash": "a5fb0970736e8d35ba1954bee90e0435",
"timestamp": "",
"source": "github",
"line_count": 1822,
"max_line_length": 103,
"avg_line_length": 38.37980241492865,
"alnum_prop": 0.6168916599931358,
"repo_name": "kou/zulip",
"id": "6803902000ce003ddafe007dc71cf9624df70812",
"size": "69928",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "zerver/lib/test_classes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "433376"
},
{
"name": "Dockerfile",
"bytes": "2941"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "635452"
},
{
"name": "Handlebars",
"bytes": "235334"
},
{
"name": "JavaScript",
"bytes": "3361648"
},
{
"name": "Perl",
"bytes": "8594"
},
{
"name": "Puppet",
"bytes": "79932"
},
{
"name": "Python",
"bytes": "8142846"
},
{
"name": "Ruby",
"bytes": "8480"
},
{
"name": "Shell",
"bytes": "134587"
},
{
"name": "TypeScript",
"bytes": "20233"
}
],
"symlink_target": ""
} |
import web
import requests
import uuid
from config import *
accesstoken = PB_ACCESS_TOKEN
clientid = PB_CLIENT_ID
clientsecret = PB_CLIENT_SECRET
urls = (
'/', 'index'
'/auth_complete' , 'auth_complete'
)
app = web.application(urls, globals())
render = web.template.render('templates/')
class index(object):
def GET(self):
return render.shack_form()
def POST(self):
#Login to shack, if auth is good, register a new rich notification client with winchatty api using those same credentials
form = web.input()
shackusername = form.username
payload = {'username': form.username, 'password': form.password}
r = requests.post('https://www.winchatty.com/v2/verifyCredentials', data=payload)
namespace = shackusername + '.winchatty.com'
namespace = namespace.encode('utf-8')
winchattyuuid = uuid.uuid5(uuid.NAMESPACE_DNS, namespace)
payload = { 'id' : uuid, 'name' : 'Shackbullet', 'username' : form.username, 'password' : form.password }
r = requests.post("https://winchatty.com/v2/notifications/registerRichClient", data=payload)
#If notification client was successful, display a button(?) to link pushbullet account to shackbullet
pbclient_id = PB_CLIENT_ID
pbredirect_uri = 'http://localhost:8080/auth_complete?uuid=' + str(winchattyuuid.hex)
return render.pushbullet_form( pbclient_id = pbclient_id, pbredirect_uri = pbredirect_uri )
class auth_complete(object):
def GET(self):
user_data = web.input()
uuid = user_data.uuid
pbcode = user_data.code
#get client access token from code
headers = { 'Access-Token' : accesstoken }
payload = { 'client_id' : clientid, 'client_secret' = clientsecret, 'code' = pbcode, 'grant_type' = 'authorization_code' }
r = requests.post('https://api.pushbullet.com/oauth2/token', headers=headers, data=payload)
codedata = json.loads(r.text)
clientpbkey = codedata['access_token']
#save this key and the winchattyUUID somewhere for use with checker.py
if __name__ == "__main__":
app.run() | {
"content_hash": "532211b8583e39c314c66611bf5133c7",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 124,
"avg_line_length": 39.43396226415094,
"alnum_prop": 0.6813397129186602,
"repo_name": "Haxim/ShackBullet",
"id": "88c73d1b343caa5408a040477ecc0b1b147fa6a0",
"size": "2090",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shackbullet/webpage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "313"
},
{
"name": "Python",
"bytes": "4644"
}
],
"symlink_target": ""
} |
import time
import math
from whirlybird.server.devices.i2c import I2c
from enum import Enum
AdafruitPwmAddress = 0x40
class AdafruitPwmRegisters(Enum):
Mode1 = 0x00
Mode2 = 0x01
Subaddr1 = 0x02
Subaddr2 = 0x03
Subaddr3 = 0x04
Prescale = 0xFE
Led0OnL = 0x06
Led0OnH = 0x07
Led0OffL = 0x08
Led0OffH = 0x09
AllLedOnL = 0xFA
AllLedOnH = 0xFB
AllLedOffL = 0xFC
AllLedOffH = 0xFD
class AdafruitPwmBits(Enum):
Restart = 0x80
Sleep = 0x10
AllCall = 0x01
Invert = 0x10
OutDrv = 0x04
class AdafruitPwm(I2c):
def __init__(self, address=AdafruitPwmAddress):
I2c.__init__(self, address)
self.set_all_pwm(0, 0)
self.write_8(AdafruitPwmRegisters.Mode2.value, AdafruitPwmBits.OutDrv.value)
self.write_8(AdafruitPwmRegisters.Mode1.value, AdafruitPwmBits.AllCall.value)
# Wait for oscillator
time.sleep(0.005)
mode1 = self.read_byte(AdafruitPwmRegisters.Mode1.value)
# wake up (reset sleep)
mode1 &= ~AdafruitPwmBits.Sleep.value
self.write_8(AdafruitPwmRegisters.Mode1.value, mode1)
# Wait for oscillator
time.sleep(0.005)
def _check_connected_device(self):
return True, ''
def set_pwm_frequency(self, frequency):
prescaleval = 25000000.0 # 25MHz
prescaleval /= 4096.0 # 12-bit
prescaleval /= float(frequency)
prescaleval -= 1.0
prescale = math.floor(prescaleval + 0.5)
old_mode = self.read_byte(AdafruitPwmRegisters.Mode1.value)
# go to sleep
new_mode = (old_mode & 0x7F) | AdafruitPwmBits.Sleep.value
self.write_8(AdafruitPwmRegisters.Mode1.value, new_mode)
self.write_8(AdafruitPwmRegisters.Prescale.value, int(math.floor(prescale)))
self.write_8(AdafruitPwmRegisters.Mode1.value, old_mode)
time.sleep(0.005)
self.write_8(AdafruitPwmRegisters.Mode1.value, old_mode | AdafruitPwmBits.Restart.value)
def set_pwm(self, channel, on, off):
self.write_8(AdafruitPwmRegisters.Led0OnL.value + 4 * channel, on & 0xFF)
self.write_8(AdafruitPwmRegisters.Led0OnH.value + 4 * channel, on >> 8)
self.write_8(AdafruitPwmRegisters.Led0OffL.value + 4 * channel, off & 0xFF)
self.write_8(AdafruitPwmRegisters.Led0OffH.value + 4 * channel, off >> 8)
def set_all_pwm(self, on, off):
self.write_8(AdafruitPwmRegisters.AllLedOnL.value, on & 0xFF)
self.write_8(AdafruitPwmRegisters.AllLedOnH.value, on >> 8)
self.write_8(AdafruitPwmRegisters.AllLedOffL.value, off & 0xFF)
self.write_8(AdafruitPwmRegisters.AllLedOffH.value, off >> 8)
if __name__ == '__main__':
pwm = AdafruitPwm()
servo_min = 150 # Min pulse length out of 4096
servo_max = 600 # Max pulse length out of 4096
pwm.set_pwm_frequency(60)
while 1:
pwm.set_pwm(0, 0, servo_min)
time.sleep(1)
pwm.set_pwm(0, 0, servo_max)
time.sleep(1)
| {
"content_hash": "d48063233bd21c20c7fd439da1cd5023",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 96,
"avg_line_length": 30.424242424242426,
"alnum_prop": 0.649734395750332,
"repo_name": "levisaya/whirlybird",
"id": "e17743918f0f2c8db0e88d31204c732eebd4f6e1",
"size": "3031",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "whirlybird/server/devices/adafruit_pwm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Protocol Buffer",
"bytes": "231"
},
{
"name": "Python",
"bytes": "28171"
}
],
"symlink_target": ""
} |
class DfsNode:
White = 0
Grey = 1
Black = 2
def __init__( self ):
self._preVisit = -1
self._postVisit = -1
self._color = DfsNode.White
def setColor( self, color ):
assert color in ( DfsNode.White, DfsNode.Grey, DfsNode.Black ), "invalid color value"
self._color = color
def getColor( self ):
return self._color
def setColorRecursively( self, color ):
if self.getColor() == color:
return
self.setColor( color )
for child in self.getChildren():
child.setColorRecursively( color )
def setPreVisit( self, time ):
self._preVisit = time
def getPreVisit( self ):
return self._preVisit
def setPostVisit( self, time ):
if time <= self.getPreVisit():
raise Exception( "wrong post visit time: ", self.getPreVisit(), ", ", self.getPostVisit() )
self._postVisit = time
def getPostVisit( self ):
return self._postVisit
#
# DagNode
#
class DagNode( DfsNode ):
def __init__( self, data ):
DfsNode.__init__( self )
self._data = data
self._parents = set()
self._children = set()
def getData( self ):
return self._data
def setData( self, data ):
self._data = data
def addChild( self, node ):
self._children.add( node )
def getChildren( self ):
return self._children
def addParent( self, node ):
self._parents.add( node )
def getParents( self ):
return self._parents
def isRoot( self ):
return len( self.getParents() ) == 0
def isLeaf( self ):
return len( self.getChildren() ) == 0
def deepPrint( self, indent = 0 ):
print( indent * ' ', self._data )
for child in self.getChildren():
child.deepPrint( indent + 1 )
def __eq__( self, other ):
return self.getData() == other.getData()
def __hash__( self ):
return self._data.__hash__()
from stack import Stack
#
# Dag
#
class Dag:
def __init__( self, type = DagNode ):
self._type = type
self._nodes = {}
self._stack = Stack()
self._root = self._type( "root" )
self._stack.push( self._root )
def add( self, depth, object ):
assert depth > 0, 'depth cant be less equal zero'
if depth > self.__getDepth() + 1:
raise Exception( "Wrong depth, stack: ", self.__getDepth(), ", depth: ", depth )
depthDifference = self.__getDepth() - depth + 1
for i in range( 0, depthDifference ):
self._stack.pop()
assert self._stack.empty() == False, 'stack cant be empty'
header = self.__getOrCreate( object )
if self.__areConnected( self._stack.top(), header ) == False:
if self.__checkForCycle( self._stack.top(), header ) == False:
self.__connect( self._stack.top(), header )
self._stack.push( header )
return header
def get( self, object ):
if object not in self._nodes:
raise Exception( "object does not exist" )
return self._nodes[ object ]
def getNodes( self ):
return self._nodes.values()
def getRoot( self ):
return self._root
def deepPrint( self ):
self._root.deepPrint()
def __areConnected( self, node1, node2 ):
return node2 in node1.getChildren()
def __connect( self, node1, node2 ):
node1.addChild( node2 )
node2.addParent( node1 )
def __getDepth( self ):
return self._stack.size() - 1
def __getOrCreate( self, object ):
if object not in self._nodes:
self._nodes[ object ] = self._type( object )
return self._nodes[ object ]
def __checkForCycle( self, parent, node ):
result = self.__checkForCycleImpl( parent, node )
node.setColorRecursively( DfsNode.White )
return result
def __checkForCycleImpl( self, parent, node ):
if node.getColor() == DfsNode.Black:
return False
node.setColor( DfsNode.Black )
if parent == node:
return True
for child in node.getChildren():
if self.__checkForCycleImpl( parent, child ) == True:
return True
return False
| {
"content_hash": "c6c8a921f35a6269916f1bd061edbfeb",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 103,
"avg_line_length": 24.3876404494382,
"alnum_prop": 0.5549412577747063,
"repo_name": "wo3kie/pchGenerator",
"id": "b6bce09097370310ceb8be00f34f5345e6a27bdc",
"size": "4355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dag.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44587"
}
],
"symlink_target": ""
} |
from handlers.base import BaseHandler
import logging
logger = logging.getLogger('tutu.' + __name__)
class IndexHandler(BaseHandler):
def get(self):
self.render_jinja2("index.html")
| {
"content_hash": "ab7e9d95997b7b4f0a472f8239453eb0",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 46,
"avg_line_length": 21.77777777777778,
"alnum_prop": 0.7091836734693877,
"repo_name": "shizhz/tutu",
"id": "df6d7863dd8c9774096bf1446d6ed8a44b4b15e6",
"size": "221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "handlers/index.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "592491"
},
{
"name": "HTML",
"bytes": "5763"
},
{
"name": "JavaScript",
"bytes": "10442"
},
{
"name": "Python",
"bytes": "94027"
},
{
"name": "Shell",
"bytes": "548"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from pychron.git.hosts.gitlab import GitLabService
from pychron.git.tasks.base_git_plugin import BaseGitPlugin
from pychron.git.tasks.githost_preferences import GitLabPreferencesPane
class GitLabPlugin(BaseGitPlugin):
name = 'GitLab'
service_klass = GitLabService
def _preferences_panes_default(self):
return [GitLabPreferencesPane]
# ============= EOF =============================================
| {
"content_hash": "aaa60923b2a4d11bcceebcd844ebe583",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 71,
"avg_line_length": 33,
"alnum_prop": 0.6796536796536796,
"repo_name": "UManPychron/pychron",
"id": "817976880254e0ea9b4fd43cfce45dff2ee7803f",
"size": "1394",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pychron/git/tasks/gitlab_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "279"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "40346"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10234954"
},
{
"name": "Shell",
"bytes": "10753"
}
],
"symlink_target": ""
} |
from teafacto.blocks.seq.enc import SimpleSeq2Bool, SimpleSeq2Vec, SimpleSeq2Idx
from teafacto.blocks.lang.sentenc import WordCharSentEnc
from teafacto.blocks.seq.rnn import EncLastDim
from teafacto.blocks.basic import VectorEmbed, SMOWrap
from teafacto.core.base import Val
from teafacto.util import argprun, ticktock, tokenize
from teafacto.procutil import wordmat2chartensor
import csv, numpy as np, sys
def readdata(trainp, testp, mode=None, masksym=-1, maxlen=100):
assert(mode is not None)
if mode is "char":
return readdata_char(trainp, testp, maxlen=maxlen, masksym=masksym)
elif mode is "word":
return readdata_word(trainp, testp, maxlen=maxlen, masksym=masksym)
elif mode is "wordchar":
(traindata, traingold), (testdata, testgold), dic = readdata_word(trainp, testp, maxlen=maxlen, masksym=masksym)
traindata = wordmat2chartensor(traindata, dic)
testdata = wordmat2chartensor(testdata, dic)
allchars = set(list(np.unique(traindata))).union(set(list(np.unique(testdata))))
allchars.remove(masksym)
chardic = dict(zip(allchars, range(len(allchars))))
chardic[masksym] = masksym
chartrans = np.vectorize(lambda x: chardic[x])
traindata = chartrans(traindata)
testdata = chartrans(testdata)
del chardic[masksym]
chardic = {chr(k): v for k, v in chardic.items()}
return (traindata, traingold), (testdata, testgold), chardic
def readdata_word(trainp, testp, maxlen=100, masksym=-1):
tt = ticktock("data reader")
def readdataset(p, wdic, maxlen=100):
dataret = []
goldret = []
toolong = 0
realmaxlen = 0
with open(p) as f:
data = csv.reader(f, delimiter=",")
for row in data:
rowelems = tokenize(row[2])
realmaxlen = max(realmaxlen, len(rowelems))
if len(rowelems) > maxlen:
toolong += 1
for rowelem in set(rowelems):
if rowelem not in wdic:
wdic[rowelem] = len(wdic)
dataret.append([wdic[x] for x in rowelems])
goldret.append(row[0])
print "{} comments were too long".format(toolong)
maxlen = min(maxlen, realmaxlen)
datamat = np.ones((len(dataret) - 1, maxlen)).astype("int32") * masksym
for i in range(1, len(dataret)):
datamat[i - 1, :min(len(dataret[i]), maxlen)] = dataret[i][:min(len(dataret[i]), maxlen)]
return datamat, np.asarray(goldret[1:], dtype="int32"), wdic
tt.tick("reading data")
traindata, traingold, wdic = readdataset(trainp, {}, maxlen=maxlen)
testdata, testgold, wdic = readdataset(testp, wdic=wdic, maxlen=maxlen)
tt.tock("data read")
return (traindata, traingold), (testdata, testgold), wdic
def readdata_char(trainp, testp, maxlen=1000, masksym=-1):
tt = ticktock("data reader")
def readdataset(p):
dataret = []
goldret = []
toolong = 0
with open(p) as f:
data = csv.reader(f, delimiter=",")
for row in data:
if len(row[2]) > maxlen:
toolong += 1
dataret.append([ord(x) for x in row[2]])
goldret.append(row[0])
print "{} comments were too long".format(toolong)
datamat = np.ones((len(dataret)-1, maxlen)).astype("int32") * masksym
for i in range(1, len(dataret)):
datamat[i-1, :min(len(dataret[i]), maxlen)] = dataret[i][:min(len(dataret[i]), maxlen)]
return datamat, np.asarray(goldret[1:], dtype="int32")
tt.tick("reading data")
traindata, traingold = readdataset(trainp)
testdata, testgold = readdataset(testp)
allchars = set(list(np.unique(traindata))).union(set(list(np.unique(testdata))))
chardic = dict(zip(list(allchars), range(len(allchars))))
chardic[masksym] = masksym
traindata = np.vectorize(lambda x: chardic[x])(traindata)
testdata = np.vectorize(lambda x: chardic[x])(testdata)
chardic = {chr(k): v for k, v in chardic.items() if k != masksym}
tt.tock("data read")
return (traindata, traingold), (testdata, testgold), chardic
def run(epochs=50,
numbats=25,
lr=0.1,
layers=1,
embdim=100,
encdim=200,
bidir=False,
mode="wordchar", # "char" or "word" or "wordchar"
maxlen=75,
maxwordlen=15,
):
maskid = -1
(traindata, traingold), (testdata, testgold), dic = \
readdata("../../../data/hatespeech/train.csv",
"../../../data/hatespeech/test.csv",
masksym=maskid, mode=mode, maxlen=maxlen)
# data stats
print "class distribution in train: {}% positive".format(np.sum(traingold)*1. / np.sum(np.ones_like(traingold)))
print "class distribution in test: {}% positive".format(np.sum(testgold)*1. / np.sum(np.ones_like(testgold)))
inpemb = VectorEmbed(indim=len(dic), dim=embdim)
encdim = [encdim] * layers
if mode == "wordchar":
enc = WordCharSentEnc(charemb=inpemb, charinnerdim=embdim,
wordemb=False, wordinnerdim=encdim,
maskid=maskid, bidir=bidir)
else:
enc = SimpleSeq2Vec(inpemb=inpemb, innerdim=encdim, maskid=maskid, bidir=bidir)
m = SMOWrap(enc, outdim=2, nobias=True)
#print enc.predict(traindata[:5, :])
m = m.train([traindata], traingold)\
.adadelta(lr=lr).grad_total_norm(1.0)\
.cross_entropy().split_validate(6, random=True).cross_entropy().accuracy()\
.train(numbats=numbats, epochs=epochs)
m.save("hatemodel.{}.Emb{}D.Enc{}D.{}L.model".format(mode, embdim, encdim, layers))
if __name__ == "__main__":
argprun(run) | {
"content_hash": "45605b7a0b9b1368eb8c02bff810ae88",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 120,
"avg_line_length": 40.213793103448275,
"alnum_prop": 0.6100154347453267,
"repo_name": "lukovnikov/teafacto",
"id": "b44cab4055a428c6e8419c4b167ae553cb9e4d4d",
"size": "5831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "teafacto/scripts/hatespeech/classify.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "56665"
},
{
"name": "Python",
"bytes": "819448"
},
{
"name": "Shell",
"bytes": "102"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
cwd = os.getcwd()
parent = os.path.dirname(cwd)
sys.path.insert(0, parent)
import jinja2_stl
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Jinja2 Filestorage Template Loader'
copyright = u'2014, Jindřich Smitka'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = jinja2_stl.__version__
# The full version, including alpha/beta/rc tags.
release = jinja2_stl.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'jinja2_stldoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'jinja2_stl.tex', u'Jinja2 Filestorage Template Loader Documentation',
u'Jindřich Smitka', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jinja2_stl', u'Jinja2 Filestorage Template Loader Documentation',
[u'Jindřich Smitka'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'jinja2_stl', u'Jinja2 Filestorage Template Loader Documentation',
u'Jindřich Smitka', 'jinja2_stl', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False | {
"content_hash": "972ae75b08a0ce180750ef55c7571113",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 82,
"avg_line_length": 32.41908713692946,
"alnum_prop": 0.7060028158197875,
"repo_name": "s-m-i-t-a/jinja2-stl",
"id": "1f88c122a8b2fe0a50f6195f461a290bf34b64bc",
"size": "8238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "241452"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
} |
def run(whatweb, pluginname):
whatweb.recog_from_content(pluginname, "cnzz.com")
| {
"content_hash": "40cb3e86a137e4a2533018e3805daced",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 54,
"avg_line_length": 28.666666666666668,
"alnum_prop": 0.7325581395348837,
"repo_name": "cflq3/getcms",
"id": "fe9dcc9cc99062d645a6bf6963b567e361894ccb",
"size": "128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/cnzz_app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38646"
}
],
"symlink_target": ""
} |
from sklearn.utils import check_random_state
from .base import base_minimize
from ..utils import cook_estimator
def gbrt_minimize(func, dimensions, base_estimator=None,
n_calls=100, n_random_starts=None,
n_initial_points=10,
initial_point_generator="random",
acq_func="EI", acq_optimizer="auto",
x0=None, y0=None, random_state=None, verbose=False,
callback=None, n_points=10000, xi=0.01, kappa=1.96,
n_jobs=1, model_queue_size=None):
"""Sequential optimization using gradient boosted trees.
Gradient boosted regression trees are used to model the (very)
expensive to evaluate function `func`. The model is improved
by sequentially evaluating the expensive function at the next
best point. Thereby finding the minimum of `func` with as
few evaluations as possible.
The total number of evaluations, `n_calls`, are performed like the
following. If `x0` is provided but not `y0`, then the elements of `x0`
are first evaluated, followed by `n_initial_points` evaluations.
Finally, `n_calls - len(x0) - n_initial_points` evaluations are
made guided by the surrogate model. If `x0` and `y0` are both
provided then `n_initial_points` evaluations are first made then
`n_calls - n_initial_points` subsequent evaluations are made
guided by the surrogate model.
The first `n_initial_points` are generated by the
`initial_point_generator`.
Parameters
----------
func : callable
Function to minimize. Should take a single list of parameters
and return the objective value.
If you have a search-space where all dimensions have names,
then you can use `skopt.utils.use_named_args` as a decorator
on your objective function, in order to call it directly
with the named arguments. See `use_named_args` for an example.
dimensions : list, shape (n_dims,)
List of search space dimensions.
Each search dimension can be defined either as
- a `(lower_bound, upper_bound)` tuple (for `Real` or `Integer`
dimensions),
- a `(lower_bound, upper_bound, "prior")` tuple (for `Real`
dimensions),
- as a list of categories (for `Categorical` dimensions), or
- an instance of a `Dimension` object (`Real`, `Integer` or
`Categorical`).
base_estimator : `GradientBoostingQuantileRegressor`
The regressor to use as surrogate model
n_calls : int, default: 100
Number of calls to `func`.
n_random_starts : int, default: None
Number of evaluations of `func` with random points before
approximating it with `base_estimator`.
.. deprecated:: 0.8
use `n_initial_points` instead.
n_initial_points : int, default: 10
Number of evaluations of `func` with initialization points
before approximating it with `base_estimator`. Initial point
generator can be changed by setting `initial_point_generator`.
initial_point_generator : str, InitialPointGenerator instance, \
default: `"random"`
Sets a initial points generator. Can be either
- `"random"` for uniform random numbers,
- `"sobol"` for a Sobol' sequence,
- `"halton"` for a Halton sequence,
- `"hammersly"` for a Hammersly sequence,
- `"lhs"` for a latin hypercube sequence,
- `"grid"` for a uniform grid sequence
acq_func : string, default: `"LCB"`
Function to minimize over the forest posterior. Can be either
- `"LCB"` for lower confidence bound.
- `"EI"` for negative expected improvement.
- `"PI"` for negative probability of improvement.
- ``"EIps"`` for negated expected improvement per second to take into
account the function compute time. Then, the objective function is
assumed to return two values, the first being the objective value and
the second being the time taken.
- `"PIps"` for negated probability of improvement per second.
x0 : list, list of lists or `None`
Initial input points.
- If it is a list of lists, use it as a list of input points.
- If it is a list, use it as a single initial input point.
- If it is `None`, no initial input points are used.
y0 : list, scalar or `None`
Evaluation of initial input points.
- If it is a list, then it corresponds to evaluations of the function
at each element of `x0` : the i-th element of `y0` corresponds
to the function evaluated at the i-th element of `x0`.
- If it is a scalar, then it corresponds to the evaluation of the
function at `x0`.
- If it is None and `x0` is provided, then the function is evaluated
at each element of `x0`.
random_state : int, RandomState instance, or None (default)
Set random state to something other than None for reproducible
results.
verbose : boolean, default: False
Control the verbosity. It is advised to set the verbosity to True
for long optimization runs.
callback : callable, optional
If provided, then `callback(res)` is called after call to func.
n_points : int, default: 10000
Number of points to sample when minimizing the acquisition function.
xi : float, default: 0.01
Controls how much improvement one wants over the previous best
values. Used when the acquisition is either `"EI"` or `"PI"`.
kappa : float, default: 1.96
Controls how much of the variance in the predicted values should be
taken into account. If set to be very high, then we are favouring
exploration over exploitation and vice versa.
Used when the acquisition is `"LCB"`.
n_jobs : int, default: 1
The number of jobs to run in parallel for `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
model_queue_size : int or None, default: None
Keeps list of models only as long as the argument given. In the
case of None, the list has no capped length.
Returns
-------
res : `OptimizeResult`, scipy object
The optimization result returned as a OptimizeResult object.
Important attributes are:
- `x` [list]: location of the minimum.
- `fun` [float]: function value at the minimum.
- `models`: surrogate models used for each iteration.
- `x_iters` [list of lists]: location of function evaluation for each
iteration.
- `func_vals` [array]: function value for each iteration.
- `space` [Space]: the optimization space.
- `specs` [dict]`: the call specifications.
- `rng` [RandomState instance]: State of the random state
at the end of minimization.
For more details related to the OptimizeResult object, refer
http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.OptimizeResult.html
.. seealso:: functions :class:`skopt.gp_minimize`,
:class:`skopt.dummy_minimize`, :class:`skopt.forest_minimize`
"""
# Check params
rng = check_random_state(random_state)
if base_estimator is None:
base_estimator = cook_estimator("GBRT", random_state=rng,
n_jobs=n_jobs)
return base_minimize(func, dimensions, base_estimator,
n_calls=n_calls, n_points=n_points,
n_random_starts=n_random_starts,
n_initial_points=n_initial_points,
initial_point_generator=initial_point_generator,
x0=x0, y0=y0, random_state=random_state, xi=xi,
kappa=kappa, acq_func=acq_func, verbose=verbose,
callback=callback, acq_optimizer="sampling",
model_queue_size=model_queue_size)
| {
"content_hash": "5a7ff8a12ba8487f23ec3444f59052d2",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 94,
"avg_line_length": 43.376344086021504,
"alnum_prop": 0.6378284581060981,
"repo_name": "scikit-optimize/scikit-optimize",
"id": "de43d6fccf3a1b232ed23383278671f3b358e7ae",
"size": "8069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "skopt/optimizer/gbrt.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1667"
},
{
"name": "Python",
"bytes": "512627"
},
{
"name": "Shell",
"bytes": "1645"
}
],
"symlink_target": ""
} |
import argparse
import itertools
import json
import collections
import datetime
import sys
import mercantile
import shapely.geometry
MIN_DATE = '0000-00-00'
MAX_DATE = '9999-99-99'
MIN_ZOOM = 0
MAX_ZOOM = 19
cache_down = {}
cache_up = {}
cache_center = {}
cache_date = {}
cache_in_bound = {}
def get_down_tiles(x, y, z, target_zoom):
assert z <= target_zoom, 'target zoom less than zoom %s <= %s' % (z, target_zoom)
k = (x, y, z, target_zoom)
if k not in cache_down:
if z == target_zoom:
result = [(x, y, z)]
else:
result = []
for t in mercantile.children(x, y, z):
result += get_down_tiles(t.x, t.y, t.z, target_zoom)
cache_down[k] = tuple(result)
return result
return cache_down[k]
def get_up_tile(x, y, z, target_zoom):
assert z >= target_zoom, 'target zoom more than zoom %s >= %s' % (z, target_zoom)
k = (x, y, z, target_zoom)
if k not in cache_up:
if z == target_zoom:
result = (x, y, z)
else:
t = mercantile.parent(x, y, z)
result = get_up_tile(t.x, t.y, t.z, target_zoom)
cache_up[k] = result
return result
return cache_up[k]
def get_date_precision(date, date_prec, date_prec_measure):
if date not in cache_date:
old_date = date
if date_prec_measure == 'd':
old_part = int(date[8:])
new_part = old_part // date_prec * date_prec + (1 if old_part % date_prec else 0)
date = '%s-%02d' % (date[:7], new_part)
elif date_prec_measure == 'm':
old_part = int(date[5:7])
new_part = old_part // date_prec * date_prec + (1 if old_part % date_prec else 0)
date = '%s-%02d-01' % (date[:4], new_part)
elif date_prec_measure == 'y':
old_part = int(date[:4])
new_part = old_part // date_prec * date_prec + (1 if old_part % date_prec else 0)
date = '%04d-01-01' % (new_part)
else:
raise TypeError('unknown date precision measure %s' % date_prec_measure)
cache_date[old_date] = date
return date
return cache_date[date]
def calculate_center(x, y, z):
k = (x, y, z)
if k not in cache_center:
bounds = mercantile.bounds(x, y, z)
height = bounds.north - bounds.south
width = bounds.east - bounds.west
center = (bounds.north + height / 2, bounds.west + width / 2)
cache_center[k] = center
return center
return cache_center[k]
def in_boundaries(k, lat, lon, boundary, west, south, east, north):
if k not in cache_in_bound:
in_bounds = lat < north and lat > south and lon > west and lon < east
if in_bounds:
in_bounds = boundary.contains(shapely.geometry.Point(lon, lat))
cache_in_bound[k] = in_bounds
return in_bounds
return cache_in_bound[k]
FIELD_VALUES = (
('data', lambda k, date, count, *args, **kwargs: date, []),
('count', lambda k, date, count, *args, **kwargs: count, []),
('z', lambda k, date, count, z, x, y, *args, **kwargs: z, ['no_xyz']),
('x', lambda k, date, count, z, x, y, *args, **kwargs: x, ['no_xyz']),
('y', lambda k, date, count, z, x, y, *args, **kwargs: y, ['no_xyz']),
('lat', lambda k, date, count, z, x, y, lat, lon, *args, **kwargs: lat, ['no_latlon']),
('lon', lambda k, date, count, z, x, y, lat, lon, *args, **kwargs: lon, ['no_latlon']),
('per_day', lambda k, date, count, *args, **kwargs: count / kwargs['days'], ['no_per_day']),
('countries', lambda k, date, count, z, x, y, lat, lon, countries, *args, **kwargs: countries, ['no_countries']),
)
def flush_fields(stdout, date, count, z, x, y, lat, lon, countries, extra, headers=False, **kwargs):
k = '%s/%s/%s' % (z, x, y)
values = []
for field, applier, filters in FIELD_VALUES:
if any(kwargs.get(filter) for filter in filters):
continue
if headers:
values.append(field)
else:
values.append(applier(k, date, count, z, x, y, lat, lon, countries, extra, **kwargs))
if extra is not None:
values.append(extra)
stdout.write(('%s\n' % ','.join(str(value) for value in values)).encode())
def flush(stdout, tiles, min_count, max_count, boundaries, **kwargs):
for k, count in tiles.items():
if min_count and count < min_count:
continue
if max_count and count > max_count:
continue
date, z, x, y, countries = k
lat, lon = calculate_center(x, y, z)
if boundaries is None:
flush_fields(stdout, date, count, z, x, y, lat, lon, countries, None, **kwargs)
continue
for boundary, boundary_bounds, extra, hash in boundaries:
cache_key = '%s/%s/%s' % (lat, lon, hash)
if not in_boundaries(cache_key, lat, lon, boundary, *boundary_bounds):
continue
flush_fields(stdout, date, count, z, x, y, lat, lon, countries, extra, **kwargs)
return collections.defaultdict(int)
def split(stdin, stdout, date_precision=None, per_day=False,
boundaries=tuple(), boundary_buffer=None,
date_from=None, date_to=None,
min_count=None, max_count=None,
min_zoom=None, max_zoom=None,
min_subz=None, max_subz=None,
extras=tuple(), extra_header=None, **kwargs):
if not kwargs.get('no_per_day'):
date_from_parsed = datetime.datetime.strptime(date_from, '%Y-%m-%d')
date_to_parsed = datetime.datetime.strptime(date_to, '%Y-%m-%d')
assert date_from_parsed
assert date_to_parsed
assert date_from_parsed < date_to_parsed
kwargs['days'] = (date_to_parsed - date_from_parsed).days
if not kwargs.get('no_header'):
flush_fields(stdout, 'date', 'count', 'z', 'x', 'y', 'lat', 'lon', 'countries',
','.join(extras) or None, headers=True, **kwargs)
boudaries_geom = []
for boundary, extra in itertools.zip_longest(boundaries, extras):
if isinstance(boundary, str):
boundary = shapely.geometry.shape(json.load(open(boundary)))
if boundary_buffer is not None:
boundary = boundary.buffer(boundary_buffer)
boudaries_geom.append((boundary, boundary.bounds, extra, id(boundary)))
boudaries_geom = boudaries_geom or None
if date_precision:
date_prec = float(date_precision[:-1])
date_prec_measure = date_precision[-1:]
date_from = date_from or MIN_DATE
date_to = date_to or MAX_DATE
min_zoom = min_zoom or MIN_ZOOM
max_zoom = max_zoom or MAX_ZOOM
min_subz = min_subz or min_zoom
max_subz = max_subz or max_zoom
assert date_from <= date_to
assert min_zoom <= max_zoom
assert min_subz <= max_subz
tiles = flush(stdout, {}, min_count, max_count, boudaries_geom, **kwargs)
start = datetime.datetime.now()
flush_date = None
for line in stdin:
date, z, x, y, count, lat, lon, countries = line.decode().strip().split(',')
if not date_from <= date <= date_to:
continue
count = int(count)
x = int(x)
y = int(y)
z = int(z)
if not min_zoom <= z <= max_zoom:
continue
if date_precision is not None:
date = get_date_precision(date, date_prec, date_prec_measure)
if flush_date is None:
start = datetime.datetime.now()
flush_date = date
if date != flush_date:
sys.stderr.write('%s - %s\n' % (flush_date, datetime.datetime.now() - start))
flush(stdout, tiles, min_count, max_count, boudaries_geom, **kwargs)
flush_date = date
start = datetime.datetime.now()
if z < min_subz:
for x, y, z in get_down_tiles(x, y, z, min_subz):
tiles[(date, z, x, y, countries)] += count
if z > max_subz:
x, y, z = get_up_tile(x, y, z, max_subz)
tiles[(date, z, x, y, countries)] += count
if min_subz <= z <= max_subz:
tiles[(date, z, x, y, countries)] += count
sys.stderr.write('%s - %s\n' % (flush_date, datetime.datetime.now() - start))
flush(stdout, tiles, min_count, max_count, boudaries_geom, **kwargs)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Agregate OSM access logs.')
parser.add_argument('--date_from', default=None)
parser.add_argument('--date_to', default=None)
parser.add_argument('--date_precision', default=None)
parser.add_argument('--boundaries', action='append', default=[])
parser.add_argument('--boundary_buffer', type=float, default=None)
parser.add_argument('--min_zoom', type=int, default=None)
parser.add_argument('--max_zoom', type=int, default=None)
parser.add_argument('--min_subz', type=int, default=None)
parser.add_argument('--max_subz', type=int, default=None)
parser.add_argument('--min_count', type=int, default=None)
parser.add_argument('--max_count', type=int, default=None)
parser.add_argument('--no_header', action='store_true')
parser.add_argument('--no_xyz', action='store_true')
parser.add_argument('--no_latlon', action='store_true')
parser.add_argument('--no_per_day', action='store_true')
parser.add_argument('--no_countries', action='store_true')
stdin = sys.stdin if sys.version_info.major == 2 else sys.stdin.buffer
stdout = sys.stdout if sys.version_info.major == 2 else sys.stdout.buffer
split(stdin, stdout, **parser.parse_args().__dict__)
| {
"content_hash": "e7296ccea7a9441e6a76eb4a3188ab8a",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 117,
"avg_line_length": 37.792156862745095,
"alnum_prop": 0.580886167894573,
"repo_name": "tbicr/map-trends",
"id": "b5e2a5a7b1f6893f2124fe6d38a67986357a1b82",
"size": "9637",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bubble.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37626"
},
{
"name": "Shell",
"bytes": "1746"
}
],
"symlink_target": ""
} |
from sample_module.example import redis_cache
def test_redislite_can_set():
r = redis_cache()
assert r.set('foo', 'bar') is True
assert r.get('foo') == b'bar'
| {
"content_hash": "cefa0b5f9d1df1d875708fa6f065515a",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 45,
"avg_line_length": 24.714285714285715,
"alnum_prop": 0.6416184971098265,
"repo_name": "bionikspoon/isolate_redislite_tox_psutil",
"id": "a7e0691da6728f5adfbc20d231ddefb3926301cc",
"size": "210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_example.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1890"
},
{
"name": "Python",
"bytes": "2449"
}
],
"symlink_target": ""
} |
"""Classes supporting configuration property editor and REST operations."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import cgi
import urllib
from controllers import sites
from controllers.utils import BaseRESTHandler
from controllers.utils import XsrfTokenManager
from models import config
from models import courses
from models import models
from models import roles
from models import transforms
from modules.oeditor import oeditor
from google.appengine.api import users
from google.appengine.ext import db
# This is a template because the value type is not yet known.
SCHEMA_JSON_TEMPLATE = """
{
"id": "Configuration Property",
"type": "object",
"description": "Configuration Property Override",
"properties": {
"name" : {"type": "string"},
"value": {"optional": true, "type": "%s"},
"is_draft": {"type": "boolean"}
}
}
"""
# This is a template because the doc_string is not yet known.
SCHEMA_ANNOTATIONS_TEMPLATE = [
(['title'], 'Configuration Property Override'),
(['properties', 'name', '_inputex'], {
'label': 'Name', '_type': 'uneditable'}),
oeditor.create_bool_select_annotation(
['properties', 'is_draft'], 'Status', 'Pending', 'Active',
description='<strong>Active</strong>: This value is active and '
'overrides all other defaults.<br/><strong>Pending</strong>: This '
'value is not active yet, and the default settings still apply.')]
class ConfigPropertyRights(object):
"""Manages view/edit rights for configuration properties."""
@classmethod
def can_view(cls):
return cls.can_edit()
@classmethod
def can_edit(cls):
return roles.Roles.is_super_admin()
@classmethod
def can_delete(cls):
return cls.can_edit()
@classmethod
def can_add(cls):
return cls.can_edit()
class ConfigPropertyEditor(object):
"""An editor for any configuration property."""
# Map of configuration property type into inputex type.
type_map = {str: 'string', int: 'integer', bool: 'boolean'}
@classmethod
def get_schema_annotations(cls, config_property):
"""Gets editor specific schema annotations."""
doc_string = '%s Default: \'%s\'.' % (
config_property.doc_string, config_property.default_value)
item_dict = [] + SCHEMA_ANNOTATIONS_TEMPLATE
item_dict.append((
['properties', 'value', '_inputex'], {
'label': 'Value', '_type': '%s' % cls.get_value_type(
config_property),
'description': doc_string}))
return item_dict
@classmethod
def get_value_type(cls, config_property):
"""Gets an editor specific type for the property."""
value_type = cls.type_map[config_property.value_type]
if not value_type:
raise Exception('Unknown type: %s', config_property.value_type)
if config_property.value_type == str and config_property.multiline:
return 'text'
return value_type
@classmethod
def get_schema_json(cls, config_property):
"""Gets JSON schema for configuration property."""
return SCHEMA_JSON_TEMPLATE % cls.get_value_type(config_property)
def get_add_course(self):
"""Handles 'add_course' action and renders new course entry editor."""
exit_url = '/admin?action=courses'
rest_url = CoursesItemRESTHandler.URI
template_values = {}
template_values[
'page_title'] = 'U-MOOC - Add Course'
template_values['main_content'] = oeditor.ObjectEditor.get_html_for(
self, CoursesItemRESTHandler.SCHEMA_JSON,
CoursesItemRESTHandler.SCHEMA_ANNOTATIONS_DICT,
None, rest_url, exit_url,
auto_return=True,
save_button_caption='Add New Course')
self.render_page(template_values)
def get_config_edit(self):
"""Handles 'edit' property action."""
key = self.request.get('name')
if not key:
self.redirect('/admin?action=settings')
item = config.Registry.registered[key]
if not item:
self.redirect('/admin?action=settings')
template_values = {}
template_values[
'page_title'] = 'U-MOOC - Edit Settings'
exit_url = '/admin?action=settings#%s' % cgi.escape(key)
rest_url = '/rest/config/item'
delete_url = '/admin?%s' % urllib.urlencode({
'action': 'config_reset',
'name': key,
'xsrf_token': cgi.escape(self.create_xsrf_token('config_reset'))})
template_values['main_content'] = oeditor.ObjectEditor.get_html_for(
self, ConfigPropertyEditor.get_schema_json(item),
ConfigPropertyEditor.get_schema_annotations(item),
key, rest_url, exit_url, delete_url=delete_url)
self.render_page(template_values)
def post_config_override(self):
"""Handles 'override' property action."""
name = self.request.get('name')
# Find item in registry.
item = None
if name and name in config.Registry.registered.keys():
item = config.Registry.registered[name]
if not item:
self.redirect('/admin?action=settings')
# Add new entity if does not exist.
try:
entity = config.ConfigPropertyEntity.get_by_key_name(name)
except db.BadKeyError:
entity = None
if not entity:
entity = config.ConfigPropertyEntity(key_name=name)
entity.value = str(item.value)
entity.is_draft = True
entity.put()
models.EventEntity.record(
'override-property', users.get_current_user(), transforms.dumps({
'name': name, 'value': str(entity.value)}))
self.redirect('/admin?%s' % urllib.urlencode(
{'action': 'config_edit', 'name': name}))
def post_config_reset(self):
"""Handles 'reset' property action."""
name = self.request.get('name')
# Find item in registry.
item = None
if name and name in config.Registry.registered.keys():
item = config.Registry.registered[name]
if not item:
self.redirect('/admin?action=settings')
# Delete if exists.
try:
entity = config.ConfigPropertyEntity.get_by_key_name(name)
if entity:
old_value = entity.value
entity.delete()
models.EventEntity.record(
'delete-property', users.get_current_user(),
transforms.dumps({
'name': name, 'value': str(old_value)}))
except db.BadKeyError:
pass
self.redirect('/admin?action=settings')
class CoursesItemRESTHandler(BaseRESTHandler):
"""Provides REST API for course entries."""
URI = '/rest/courses/item'
SCHEMA_JSON = """
{
"id": "Course Entry",
"type": "object",
"description": "Course Entry",
"properties": {
"name": {"type": "string"},
"title": {"type": "string"},
"admin_email": {"type": "string"}
}
}
"""
SCHEMA_DICT = transforms.loads(SCHEMA_JSON)
SCHEMA_ANNOTATIONS_DICT = [
(['title'], 'New Course Entry'),
(['properties', 'name', '_inputex'], {'label': 'Unique Name'}),
(['properties', 'title', '_inputex'], {'label': 'Course Title'}),
(['properties', 'admin_email', '_inputex'], {
'label': 'Course Admin Email'})]
def get(self):
"""Handles HTTP GET verb."""
if not ConfigPropertyRights.can_view():
transforms.send_json_response(
self, 401, 'Access denied.')
return
transforms.send_json_response(
self, 200, 'Success.',
payload_dict={
'name': 'new_course',
'title': 'My New Course',
'admin_email': self.get_user().email()},
xsrf_token=XsrfTokenManager.create_xsrf_token(
'add-course-put'))
def put(self):
"""Handles HTTP PUT verb."""
request = transforms.loads(self.request.get('request'))
if not self.assert_xsrf_token_or_fail(
request, 'add-course-put', {}):
return
if not ConfigPropertyRights.can_edit():
transforms.send_json_response(
self, 401, 'Access denied.')
return
payload = request.get('payload')
json_object = transforms.loads(payload)
name = json_object.get('name')
title = json_object.get('title')
admin_email = json_object.get('admin_email')
# Add the new course entry.
errors = []
entry = sites.add_new_course_entry(name, title, admin_email, errors)
if not entry:
errors.append('Error adding a new course entry.')
if errors:
transforms.send_json_response(self, 412, '\n'.join(errors))
return
# We can't expect our new configuration being immediately available due
# to datastore queries consistency limitations. So we will instantiate
# our new course here and not use the normal sites.get_all_courses().
app_context = sites.get_all_courses(entry)[0]
# Update course with a new title and admin email.
new_course = courses.Course(None, app_context=app_context)
if not new_course.init_new_course_settings(title, admin_email):
transforms.send_json_response(
self, 412,
'Added new course entry, but failed to update title and/or '
'admin email. The course.yaml file already exists and must be '
'updated manually.')
return
transforms.send_json_response(
self, 200, 'Added.', {'entry': entry})
class ConfigPropertyItemRESTHandler(BaseRESTHandler):
"""Provides REST API for a configuration property."""
def get(self):
"""Handles REST GET verb and returns an object as JSON payload."""
key = self.request.get('key')
if not ConfigPropertyRights.can_view():
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
item = None
if key and key in config.Registry.registered.keys():
item = config.Registry.registered[key]
if not item:
self.redirect('/admin?action=settings')
try:
entity = config.ConfigPropertyEntity.get_by_key_name(key)
except db.BadKeyError:
entity = None
if not entity:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
else:
entity_dict = {'name': key, 'is_draft': entity.is_draft}
entity_dict['value'] = transforms.string_to_value(
entity.value, item.value_type)
json_payload = transforms.dict_to_json(
entity_dict,
transforms.loads(
ConfigPropertyEditor.get_schema_json(item)))
transforms.send_json_response(
self, 200, 'Success.',
payload_dict=json_payload,
xsrf_token=XsrfTokenManager.create_xsrf_token(
'config-property-put'))
def put(self):
"""Handles REST PUT verb with JSON payload."""
request = transforms.loads(self.request.get('request'))
key = request.get('key')
if not self.assert_xsrf_token_or_fail(
request, 'config-property-put', {'key': key}):
return
if not ConfigPropertyRights.can_edit():
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
item = None
if key and key in config.Registry.registered.keys():
item = config.Registry.registered[key]
if not item:
self.redirect('/admin?action=settings')
try:
entity = config.ConfigPropertyEntity.get_by_key_name(key)
except db.BadKeyError:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
payload = request.get('payload')
json_object = transforms.loads(payload)
new_value = item.value_type(json_object['value'])
# Validate the value.
errors = []
if item.validator:
item.validator(new_value, errors)
if errors:
transforms.send_json_response(self, 412, '\n'.join(errors))
return
# Update entity.
old_value = entity.value
entity.value = str(new_value)
entity.is_draft = json_object['is_draft']
entity.put()
models.EventEntity.record(
'put-property', users.get_current_user(), transforms.dumps({
'name': key,
'before': str(old_value), 'after': str(entity.value)}))
transforms.send_json_response(self, 200, 'Saved.')
| {
"content_hash": "ced08e69fc05d53ea3f532a0631bc447",
"timestamp": "",
"source": "github",
"line_count": 383,
"max_line_length": 79,
"avg_line_length": 34.775456919060055,
"alnum_prop": 0.5760192206622119,
"repo_name": "esacosta/u-mooc",
"id": "5d6451b4eb7375ba20441c4776b99358e6664f7a",
"size": "13917",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/admin/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "134"
},
{
"name": "CSS",
"bytes": "281621"
},
{
"name": "HTML",
"bytes": "6790134"
},
{
"name": "JavaScript",
"bytes": "1211720"
},
{
"name": "PHP",
"bytes": "25856"
},
{
"name": "Python",
"bytes": "1045317"
}
],
"symlink_target": ""
} |
from office365.runtime.paths.entity import EntityPath
class NonePath(EntityPath):
def __init__(self, parent=None):
super(NonePath, self).__init__(None, parent, parent)
| {
"content_hash": "ca2e6af0b5e302f29a6d82d90e8336a0",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 60,
"avg_line_length": 26.142857142857142,
"alnum_prop": 0.6994535519125683,
"repo_name": "vgrem/Office365-REST-Python-Client",
"id": "b07bff9603ad9c227fbc429c064949286b8fe62c",
"size": "183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "office365/runtime/paths/none.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1659292"
}
],
"symlink_target": ""
} |
from matplotlib import rc
import matplotlib
matplotlib.rcParams['text.latex.preamble']=[""]
params = {#'text.usetex' : False,
'font.size' : 15,
'font.family' : 'sans-serif',
#'text.latex.unicode': True,
}
matplotlib.rcParams.update(params)
matplotlib.rc('xtick', labelsize=15)
matplotlib.rc('ytick', labelsize=15)
matplotlib.rcParams['legend.numpoints'] = 5
#matplotlib.rcParams['legend.scatterpoints'] = 5
# magic legend handler thing that allows us to show a line with background in
# legends
from matplotlib.legend_handler import HandlerLine2D
class HandlerLine2DBG(HandlerLine2D):
"""
Handler for Line2D instances with colored background
"""
def __init__(self, marker_pad=0.3, numpoints=None, bgcolor=None, bgalpha=1.0, **kw):
HandlerLine2D.__init__(self, marker_pad=marker_pad, numpoints=numpoints, **kw)
self.bgcolor = bgcolor
self.bgalpha = bgalpha
def create_artists(self, legend, orig_handle, xdescent, ydescent, width,
height, fontsize, trans):
from matplotlib.patches import Rectangle
ls = super(HandlerLine2DBG,self).create_artists(legend, orig_handle, xdescent,
ydescent, width, height, fontsize, trans)
# add bg box behind line
ls.append(Rectangle((0,0), width, height,
facecolor=self.bgcolor, alpha=self.bgalpha, linewidth=0))
return ls
| {
"content_hash": "31802a45a64237da76f182d07aace0b6",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 88,
"avg_line_length": 37.5,
"alnum_prop": 0.6708771929824562,
"repo_name": "libsmelt/libsmelt",
"id": "9f28ca2cbecfe3bd508a2032ffa5cfa678696e9a",
"size": "1425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/plotsetup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "623653"
},
{
"name": "C++",
"bytes": "254189"
},
{
"name": "Makefile",
"bytes": "9926"
},
{
"name": "Python",
"bytes": "73054"
},
{
"name": "Shell",
"bytes": "23489"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='BlogEntry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=200)),
('body', models.TextField()),
('slug', models.SlugField(unique=True, max_length=200)),
('publish', models.BooleanField(default=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('modified_on', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['-created_on'],
'verbose_name': 'Blog Entry',
'verbose_name_plural': 'Blog Entries',
},
),
]
| {
"content_hash": "180de58b9ea04bd4b90cc5bc586af31e",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 114,
"avg_line_length": 33.48275862068966,
"alnum_prop": 0.5303810504634398,
"repo_name": "salmanwahed/pypanda_project",
"id": "441683f29f301e2ac12019c53fe3fed7f45ba0b1",
"size": "995",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pypanda/blog/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7749"
},
{
"name": "Python",
"bytes": "10485"
}
],
"symlink_target": ""
} |
import unittest2
from pyoauth.error import InvalidOAuthParametersError, \
InvalidAuthorizationHeaderError, \
InvalidSignatureMethodError, \
InvalidHttpResponseError, HttpError, InvalidContentTypeError, IllegalArgumentError
from pyoauth.http import RequestAdapter, ResponseAdapter
from pyoauth.protocol import parse_authorization_header
from pyoauth.oauth1 import Credentials
from pyoauth.oauth1.client import Client
class TestClient_OAuth_1_0_Example(unittest2.TestCase):
def setUp(self):
self.client_credentials = Credentials(identifier="dpf43f3p2l4k3l03", shared_secret="kd94hf93k423kf44")
self.client = Client(self.client_credentials,
temporary_credentials_request_uri="https://photos.example.net/initiate",
resource_owner_authorization_uri="https://photos.example.net/authorize",
token_credentials_request_uri="https://photos.example.net/token",
use_authorization_header=True)
self.temporary_credentials = Credentials(identifier="hh5s93j4hdidpola", shared_secret="hdhd0244k9j7ao03")
self.token_credentials = Credentials(identifier="nnch734d00sl2jdk", shared_secret="pfkkdhi9sl3r4s00")
def test___init__(self):
c = self.client
self.assertEqual(c._temporary_credentials_request_uri, "https://photos.example.net/initiate")
self.assertEqual(c._resource_owner_authorization_uri, "https://photos.example.net/authorize")
self.assertEqual(c._token_credentials_request_uri, "https://photos.example.net/token")
self.assertEqual(c._use_authorization_header, True)
self.assertEqual(c._client_credentials.identifier, "dpf43f3p2l4k3l03")
self.assertEqual(c._client_credentials.shared_secret, "kd94hf93k423kf44")
def test_oauth_version(self):
# OAuth version MUST be set to "1.0". Anything else is the responsibility of the API user.
self.assertEqual(self.client.oauth_version, "1.0")
def test_get_authorization_url(self):
url = self.client.get_authorization_url(self.temporary_credentials, a="something here", b=["another thing", 5], oauth_ignored="ignored")
self.assertEqual(url, "https://photos.example.net/authorize?a=something%20here&b=5&b=another%20thing&oauth_token=" + self.temporary_credentials.identifier)
def test_parse_temporary_credentials_response(self):
headers = {
"Content-Type": "application/x-www-form-urlencoded",
}
self.assertRaises(ValueError, self.client.parse_temporary_credentials_response, ResponseAdapter(200, "OK", "oauth_token=hh5s93j4hdidpola&oauth_token_secret=hdhd0244k9j7ao03&oauth_callback_confirmed=", headers))
self.assertRaises(ValueError, self.client.parse_temporary_credentials_response, ResponseAdapter(200, "OK", "oauth_token=hh5s93j4hdidpola&oauth_token_secret=hdhd0244k9j7ao03&oauth_callback_confirmed=false", headers))
credentials, params = self.client.parse_temporary_credentials_response(ResponseAdapter(200, "OK", "oauth_token=hh5s93j4hdidpola&oauth_token_secret=hdhd0244k9j7ao03&oauth_callback_confirmed=true", headers=headers))
self.assertDictEqual(params, {
"oauth_token": ["hh5s93j4hdidpola"],
"oauth_token_secret": ["hdhd0244k9j7ao03"],
"oauth_callback_confirmed": ["true"],
})
self.assertEqual(credentials, self.temporary_credentials)
def test_parse_token_credentials_response(self):
headers = {
"Content-Type": "application/x-www-form-urlencoded",
}
credentials, params = self.client.parse_token_credentials_response(ResponseAdapter(200, "OK", "oauth_token=nnch734d00sl2jdk&oauth_token_secret=pfkkdhi9sl3r4s00", headers=headers))
self.assertDictEqual(params, {
"oauth_token": ["nnch734d00sl2jdk"],
"oauth_token_secret": ["pfkkdhi9sl3r4s00"],
})
self.assertEqual(credentials, self.token_credentials)
def test__parse_credentials_response(self):
headers = {
"Content-Type": "application/x-www-form-urlencoded",
}
credentials, params = self.client._parse_credentials_response(ResponseAdapter(200, "OK", "oauth_token=hh5s93j4hdidpola&oauth_token_secret=hdhd0244k9j7ao03&oauth_callback_confirmed=true", headers=headers))
self.assertDictEqual(params, {
"oauth_token": ["hh5s93j4hdidpola"],
"oauth_token_secret": ["hdhd0244k9j7ao03"],
"oauth_callback_confirmed": ["true"],
})
self.assertEqual(credentials, self.temporary_credentials)
credentials, params = self.client._parse_credentials_response(ResponseAdapter(200, "OK", "oauth_token=nnch734d00sl2jdk&oauth_token_secret=pfkkdhi9sl3r4s00", headers=headers))
self.assertDictEqual(params, {
"oauth_token": ["nnch734d00sl2jdk"],
"oauth_token_secret": ["pfkkdhi9sl3r4s00"],
})
self.assertEqual(credentials, self.token_credentials)
def test_parse_credentials_response_validation(self):
status_code = 200
status = "OK"
body = "oauth_token=nnch734d00sl2jdk&oauth_token_secret=pfkkdhi9sl3r4s00"
headers = {
"Content-Type": "application/x-www-form-urlencoded",
}
self.assertRaises(InvalidHttpResponseError, self.client._parse_credentials_response, ResponseAdapter(status_code, None, body, headers))
self.assertRaises(InvalidHttpResponseError, self.client._parse_credentials_response, ResponseAdapter(None, status, body, headers))
self.assertRaises(InvalidHttpResponseError, self.client._parse_credentials_response, ResponseAdapter(status_code, status, None, headers))
self.assertRaises(InvalidHttpResponseError, self.client._parse_credentials_response, ResponseAdapter(status_code, status, body, None))
self.assertRaises(HttpError, self.client._parse_credentials_response, ResponseAdapter(300, "Multiple choices", body, headers))
self.assertRaises(HttpError, self.client._parse_credentials_response, ResponseAdapter(199, "continue", body, headers))
self.assertRaises(InvalidHttpResponseError, self.client._parse_credentials_response, ResponseAdapter(200, "OK" , "", headers))
self.assertRaises(InvalidContentTypeError, self.client._parse_credentials_response, ResponseAdapter(200, "OK", body, {"Content-Type": "invalid"}))
class Test_Client_build_temporary_credentials_request(unittest2.TestCase):
def setUp(self):
self.client_credentials = Credentials(identifier="dpf43f3p2l4k3l03", shared_secret="kd94hf93k423kf44")
self.client = Client(self.client_credentials,
temporary_credentials_request_uri="https://photos.example.net/initiate",
resource_owner_authorization_uri="https://photos.example.net/authorize",
token_credentials_request_uri="https://photos.example.net/token",
use_authorization_header=True)
self.temporary_credentials = Credentials(identifier="hh5s93j4hdidpola", shared_secret="hdhd0244k9j7ao03")
self.token_credentials = Credentials(identifier="nnch734d00sl2jdk", shared_secret="pfkkdhi9sl3r4s00")
def test_raises_ValueError_when_oauth_callback_is_invalid(self):
self.assertRaises(ValueError, self.client.build_temporary_credentials_request, oauth_callback="foobar")
class Test_Client_build_token_credentials_request(unittest2.TestCase):
def setUp(self):
self.client_credentials = Credentials(identifier="dpf43f3p2l4k3l03", shared_secret="kd94hf93k423kf44")
self.client = Client(self.client_credentials,
temporary_credentials_request_uri="https://photos.example.net/initiate",
resource_owner_authorization_uri="https://photos.example.net/authorize",
token_credentials_request_uri="https://photos.example.net/token",
use_authorization_header=True)
self.temporary_credentials = Credentials(identifier="hh5s93j4hdidpola", shared_secret="hdhd0244k9j7ao03")
self.token_credentials = Credentials(identifier="nnch734d00sl2jdk", shared_secret="pfkkdhi9sl3r4s00")
def test_raises_IllegalArgumentError_when_oauth_callback_specified(self):
self.assertRaises(IllegalArgumentError,
self.client.build_token_credentials_request,
temporary_credentials=self.temporary_credentials,
oauth_verifier="something",
oauth_callback="oob")
class Test_Client_build_resource_request(unittest2.TestCase):
def setUp(self):
self.client_credentials = Credentials(identifier="dpf43f3p2l4k3l03", shared_secret="kd94hf93k423kf44")
self.client = Client(self.client_credentials,
temporary_credentials_request_uri="https://photos.example.net/initiate",
resource_owner_authorization_uri="https://photos.example.net/authorize",
token_credentials_request_uri="https://photos.example.net/token",
use_authorization_header=True)
self.temporary_credentials = Credentials(identifier="hh5s93j4hdidpola", shared_secret="hdhd0244k9j7ao03")
self.token_credentials = Credentials(identifier="nnch734d00sl2jdk", shared_secret="pfkkdhi9sl3r4s00")
def test_raises_IllegalArgumentError_when_oauth_callback_specified(self):
self.assertRaises(IllegalArgumentError,
self.client.build_resource_request,
token_credentials=self.token_credentials,
method="POST",
url="http://photos.example.net/request",
oauth_callback="oob")
class Test_Client_build_request(unittest2.TestCase):
def setUp(self):
self.client_credentials = Credentials(identifier="dpf43f3p2l4k3l03", shared_secret="kd94hf93k423kf44")
self.client = Client(self.client_credentials,
temporary_credentials_request_uri="https://photos.example.net/initiate",
resource_owner_authorization_uri="https://photos.example.net/authorize",
token_credentials_request_uri="https://photos.example.net/token",
use_authorization_header=True)
self.temporary_credentials = Credentials(identifier="hh5s93j4hdidpola", shared_secret="hdhd0244k9j7ao03")
self.token_credentials = Credentials(identifier="nnch734d00sl2jdk", shared_secret="pfkkdhi9sl3r4s00")
def test_raises_InvalidSignatureMethodError_when_signature_method_invalid(self):
self.assertRaises(InvalidSignatureMethodError,
self.client._build_request,
"POST",
self.client._temporary_credentials_request_uri,
oauth_signature_method="BLAH")
def test_raises_ValueError_when_multiple_oauth_param_values(self):
self.assertRaises(InvalidOAuthParametersError,
self.client._build_request,
"POST",
self.client._temporary_credentials_request_uri,
oauth_something=[1, 2, 3])
def test_raises_IllegalArgumentError_when_overriding_reserved_oauth_params(self):
self.assertRaises(IllegalArgumentError,
self.client._build_request,
"POST",
self.client._temporary_credentials_request_uri,
oauth_signature="dummy-signature")
def tests_raises_InvalidAuthorizationHeaderError_when_Authorization_header_is_already_present(self):
self.assertRaises(InvalidAuthorizationHeaderError,
self.client._build_request,
"POST",
self.client._temporary_credentials_request_uri,
headers={"Authorization": "blah blah."})
def test_valid_request_generated(self):
valid_request = RequestAdapter("GET",
"http://photos.example.net/photos?file=vacation.jpg&size=original",
body="",
headers={
"Authorization": '''\
OAuth realm="Photos",\
oauth_consumer_key="dpf43f3p2l4k3l03",\
oauth_nonce="chapoH",\
oauth_signature="MdpQcU8iPSUjWoN%2FUDMsK2sui9I%3D",\
oauth_signature_method="HMAC-SHA1",\
oauth_timestamp="137131202",\
oauth_token="nnch734d00sl2jdk"'''})
method = "GET"
url = "http://photos.example.net/photos"
params = dict(file="vacation.jpg", size="original")
request = self.client._build_request(method,
url,
params,
auth_credentials=self.token_credentials,
realm="Photos",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131202",
oauth_consumer_key="dpf43f3p2l4k3l03",
oauth_token="nnch734d00sl2jdk",
oauth_nonce="chapoH",
_test_force_override_reserved_oauth_params=True,
_test_force_exclude_oauth_version=True)
self.assertEqual(request.method, valid_request.method)
self.assertEqual(request.payload, valid_request.payload)
self.assertEqual(request.url, valid_request.url)
expected_authorization_header, expected_realm = parse_authorization_header(valid_request.headers["Authorization"])
got_authorization_header, got_realm = parse_authorization_header(request.headers["Authorization"])
self.assertEqual(got_realm, expected_realm)
self.assertDictEqual(got_authorization_header, expected_authorization_header)
def test_example_post_request(self):
valid_request = RequestAdapter("POST",
"https://photos.example.net/initiate",
body="",
headers={
"Authorization": '''\
OAuth realm="Photos",\
oauth_callback="http://printer.example.com/ready",\
oauth_consumer_key="dpf43f3p2l4k3l03",\
oauth_nonce="wIjqoS",\
oauth_signature="74KNZJeDHnMBp0EMJ9ZHt/XKycU=",\
oauth_signature_method="HMAC-SHA1",\
oauth_timestamp="137131200"'''})
method = "POST"
url = "https://photos.example.net/initiate"
params = None
request = self.client._build_request(method,
url,
params,
auth_credentials=None,
realm="Photos",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131200",
oauth_consumer_key="dpf43f3p2l4k3l03",
oauth_nonce="wIjqoS",
oauth_callback="http://printer.example.com/ready",
_test_force_override_reserved_oauth_params=True,
_test_force_exclude_oauth_version=True)
self.assertEqual(request.method, valid_request.method)
self.assertEqual(request.payload, valid_request.payload)
self.assertEqual(request.url, valid_request.url)
expected_authorization_header, expected_realm = parse_authorization_header(valid_request.headers["Authorization"])
got_authorization_header, got_realm = parse_authorization_header(request.headers["Authorization"])
self.assertEqual(got_realm, expected_realm)
self.assertDictEqual(got_authorization_header, expected_authorization_header)
if __name__ == "__main__":
unittest2.main()
| {
"content_hash": "14add8e0c70113278a9a88036a4255b2",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 223,
"avg_line_length": 60.0985401459854,
"alnum_prop": 0.6314447075970122,
"repo_name": "gorakhargosh/pyoauth",
"id": "4fb4b8bcaa482d045dda45063876c544754c9131",
"size": "17153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyoauth/tests/old_pyoauth_oauth1_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "306721"
},
{
"name": "Shell",
"bytes": "5036"
}
],
"symlink_target": ""
} |
from flask import Flask
from werkzeug.contrib.cache import SimpleCache
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.mail import Mail
# Celery
from celery import Celery
###########
# GLOBALS #
###########
# the Flask app
app = Flask(__name__)
app.config.from_object('config')
# database
db = SQLAlchemy(app)
# Celery
celery = Celery('app.tasks')
celery.conf.update(app.config)
# email
mail = Mail(app)
# cache
cache = SimpleCache(threshold=30000, default_timeout=300)
# global dictionary for convenience data
helper = dict()
from app import server, models
| {
"content_hash": "6c1c32b2791d11e98ee67a1ed423f410",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 57,
"avg_line_length": 17.515151515151516,
"alnum_prop": 0.7249134948096886,
"repo_name": "nlohmann/lqfb_viewer",
"id": "665170abfac3921ce30afd1636e6149297ac916f",
"size": "1899",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73728"
}
],
"symlink_target": ""
} |
import numpy as np
from scipy.spatial import distance
def DB_index2D(centroids, clusters, X, metric='euclidean'):
"""
Return the Davies-Bouldin index for cluster evaluation for 2D data.
The index is calculated using the formula:
DB = (1/k) * sum(max((sigma_i + sigma_j)/distance(c_i, c_j)))
where:
k is the number of clusters
sigma_i is the average distance of all elements in cluster i to
centroid c_i
c_i is the centroid of cluster i
distance(c_i, c_j) is the distance between cluster centroids i and
j, using the same metric as in the algorithm
The smaller the value of the DB index, the better the clustering.
Parameters
----------
centroids : ndarray of float
The centroids of the clusters
clusters : dict
Cluster id mapped to a list of the cluster's elements
X : ndarray (2D)
The data array
metric : str or callable, optional
The distance metric to use. If a string, the distance function can be
'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation',
'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao',
'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'wminkowski', 'yule'.
Returns
-------
DB_ind : float
The Davies-Bouldin index
"""
K = len(clusters.keys())
mean_distances = np.vstack(distance.cdist(centroids[x, np.newaxis],
X.take(clusters[x], axis=0),
metric=metric).mean()
for x in clusters.keys())
D = np.zeros((K, K))
for i in range(K):
for j in range(i+1, K):
D[i, j] = (mean_distances[i] + mean_distances[j]) / distance.cdist(centroids[i, np.newaxis], centroids[j, np.newaxis], metric=metric)
return (1/K) * np.sum(D.max(axis=1))
def DB_index3D(centroids, clusters, X, metric='euclidean', mode=1):
"""
Return the Davies-Bouldin index for cluster evaluation for 3D data.
The index is calculated using the formula:
DB = (1/k) * sum(max((sigma_i + sigma_j)/distance(c_i, c_j)))
where:
k is the number of clusters
sigma_i is the average distance of all elements in cluster i to
centroid c_i
c_i is the centroid of cluster i
distance(c_i, c_j) is the distance between cluster centroids i and
j, using the same metric as in the algorithm
The smaller the value of the DB index, the better the clustering.
Parameters
----------
centroids : ndarray of float
The centroids of the clusters
clusters : dict
Cluster id mapped to a list of the cluster's elements
X : ndarray (3D)
The data array
metric : str or callable, optional
The distance metric to use. If a string, the distance function can be
'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation',
'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao',
'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'wminkowski', 'yule'.
mode : {0, 1, 2}, default 1
Determines the axis along which to do the clustering.
mode=i means that the clustering will be done along axis i.
Returns
-------
DB_ind : float
The Davies-Bouldin index
"""
centroids = np.nan_to_num(centroids)
K = len(clusters.keys())
mean_distances = []
non_empty_clusters = []
for i in sorted(clusters.keys()):
# Ignore current cluster if empty
if (len(clusters[i]) == 0):
mean_distances.append(0)
continue
non_empty_clusters.append(i)
# Choose only the elements of cluster i
i_data = np.nan_to_num(X.take(clusters[i], axis=mode))
if (mode == 0) or (mode == 1):
# Convert i_data to 2-D array by flattening the chromosomes
# dimension
i_data = np.reshape(i_data, (i_data.shape[0]*i_data.shape[1],
i_data.shape[2]))
# Remove the NaN rows
i_data = i_data.take(np.unique(np.nonzero(i_data)[0]), axis=0)
else:
# Take the mean of each sample for all chromosomes
i_data = np.nan_to_num(np.nanmean(i_data, axis=0))
i_data = np.swapaxes(i_data, 0, 1)
# Compute the mean distances of each cluster element to the
# cluster's centroid
mean_distances.append(distance.cdist(centroids[i, np.newaxis],
i_data).mean())
# Compute the distortion of each cluster
D = np.zeros((K, K))
for i in range(K):
if (i not in non_empty_clusters):
continue
for j in range(i+1, K):
if (j not in non_empty_clusters):
continue
D[i, j] = (mean_distances[i] + mean_distances[j]) / distance.cdist(centroids[i, np.newaxis], centroids[j, np.newaxis])
return (1/len(non_empty_clusters)) * np.sum(D.max(axis=1))
def modified_Gamma_index2D(centroids, clusters, labels, X, metric='euclidean'):
"""
Return the Modified Hubert Gamma statistic.
This statistic is calculated using the formula:
Gamma = (1/M) * sum(sum(P(i,j)*Q(i,j)))
where:
M is the number of all possible pairs of X datapoints
P is the similarity (distance) matrix of X datapoints
Q is the matrix whose element Q(i,j) holds the distances between the
centroids of the clusters that xi and xj belong to
High values of the Modified Hubert Gamma statistic indicate the existence
of compact clusters.
Parameters
----------
centroids : ndarray of float
The centroids of the clusters
clusters : dict
Cluster id mapped to a list of the cluster's elements
labels: ndarray of int
The label of each datapoint, i.e. the cluster id it belongs to
X : ndarray (2D)
The data array
metric : str or callable, optional
The distance metric to use. If a string, the distance function can be
'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation',
'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao',
'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'wminkowski', 'yule'.
Returns
-------
Gamma : float
The Modified Hubert Gamma index
"""
# Compute the matrix Q where Q(i,j) is equal to the distance between the
# representatives of the clusters where xi and xj belong.
representatives = centroids.take(labels, axis=0)
Q = distance.cdist(representatives, representatives, metric=metric)
# Compute the similarity matrix P of X
P = distance.pdist(X, metric=metric)
P = distance.squareform(P)
# Compute the total number of all possible pairs of X
Gamma = 0
for i in range(X.shape[0]-1):
for j in range(i+1, X.shape[0]):
Gamma += (P[i,j] * Q[i,j])
M = (X.shape[0] * (X.shape[0] - 1)) / 2
# Compute and return the statistic
return (1 / M) * Gamma
def modified_Gamma_index3D(centroids, clusters, labels, X, metric='euclidean',
mode=1):
"""
Return the Modified Hubert Gamma statistic for the 3D structure.
This statistic is calculated using the formula:
Gamma = (1/M) * sum(sum(P(i,j)*Q(i,j)))
where:
M is the number of all possible pairs of X datapoints
P is the similarity (distance) matrix of X datapoints
Q is the matrix whose element Q(i,j) holds the distances between the
centroids of the clusters that xi and xj belong to
High values of the Modified Hubert Gamma statistic indicate the existence
of compact clusters.
Parameters
----------
centroids : ndarray of float
The centroids of the clusters
clusters : dict
Cluster id mapped to a list of the cluster's elements
labels: ndarray of int
The label of each datapoint, i.e. the cluster id it belongs to
X : ndarray (3D)
The data array
metric : str or callable, optional
The distance metric to use. If a string, the distance function can be
'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation',
'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao',
'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'wminkowski', 'yule'.
mode : {0, 1, 2}, default 1
Determines the axis along which to do the clustering.
mode=i means that the clustering will be done along axis i.
Returns
-------
Gamma : float
The Modified Hubert Gamma index
"""
# Compute the matrix Q where Q(i,j) is equal to the distance between the
# representatives of the clusters where xi and xj belong.
representatives = centroids.take(labels, axis=0)
Q = distance.cdist(representatives, representatives, metric=metric)
# Compute the similarity matrix P of X
if (mode == 0):
X_mean = np.nanmean(X, axis=1)
elif (mode == 1):
X_mean = np.nanmean(X, axis=0)
else:
X_mean= np.swapaxes(np.nanmean(X, axis=0), 0, 1)
P = distance.pdist(X_mean, metric=metric)
P = distance.squareform(P)
# Compute the total number of all possible pairs of X
Gamma = 0
for i in range(X_mean.shape[0]-1):
for j in range(i+1, X_mean.shape[0]):
Gamma += (P[i,j] * Q[i,j])
M = (X_mean.shape[0] * (X_mean.shape[0] - 1)) / 2
# Compute and return the statistic
return (1 / M) * Gamma
| {
"content_hash": "0013d970ee5a7f9663d8b1a6bca8a676",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 145,
"avg_line_length": 38.13636363636363,
"alnum_prop": 0.605979340484704,
"repo_name": "paren8esis/thesis",
"id": "248b2cf573293dd7e816f97614419fe7e43b5f23",
"size": "10093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cluster_evaluation_methods.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "161949"
}
],
"symlink_target": ""
} |
from crypto_library import ctr_aes
blocksize = 16
key = 'YELLOW SUBMARINE'
nonce = 0
nonce_format = '<q'
counter_format = '<Q'
ciphertext = 'L77na/nrFsKvynd6HzOoG7GHTLXsTVu9qvY/2syLXzhPweyyMTJULu/6/kXX0KSvoOLSFQ=='.decode('base64')
print repr(ctr_aes(ciphertext, key, nonce, nonce_format, counter_format, blocksize))
| {
"content_hash": "049def6ef515b596be1210d68fdac7c6",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 104,
"avg_line_length": 35.333333333333336,
"alnum_prop": 0.7672955974842768,
"repo_name": "dimkarakostas/matasano-cryptochallenges",
"id": "752649c865cdf119ad28232dd3f5131724152a6a",
"size": "318",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problem_18.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29245"
}
],
"symlink_target": ""
} |
"""Routing directing."""
from __future__ import absolute_import
from channels.routing import route
from rotest.api.consumers import ws_connect, ws_disconnect, ws_receive
channel_routing = [
route('websocket.connect', ws_connect),
route('websocket.receive', ws_receive),
route('websocket.disconnect', ws_disconnect),
]
| {
"content_hash": "fae342580821f2ed33715ff86263cc97",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 70,
"avg_line_length": 27.75,
"alnum_prop": 0.7327327327327328,
"repo_name": "gregoil/rotest",
"id": "bd2d86304f01bab461d73a2444290d2d414d578b",
"size": "333",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rotest/common/django_utils/routing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3023"
},
{
"name": "Python",
"bytes": "919636"
}
],
"symlink_target": ""
} |
from urllib.parse import urljoin
from os.path import join, dirname, exists
from os import makedirs
import yaml, argparse
template = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>Redirecting...</title>
<link rel="canonical" href="{href}">
<meta http-equiv="refresh" content="0; url={href}">
<h1>Redirecting...</h1>
<a href="{href}">Click here if you are not redirected.</a>
<script>location="{href}"</script>
'''
def setup_redirect(old_path, new_href):
if exists(old_path):
print('Will not overwrite', old_path)
return
if not exists(dirname(old_path)):
makedirs(dirname(old_path))
with open(old_path, 'w') as output:
output.write(template.format(href=new_href))
print('Redirecting from', old_path, 'to', new_href)
parser = argparse.ArgumentParser(description='Prepare some redirects.')
parser.add_argument('config', help='Configuration file path')
parser.add_argument('base', help='Output documentation base path')
if __name__ == '__main__':
args = parser.parse_args()
with open(args.config) as input:
config = yaml.safe_load(input)
site_dir = config.get('site_dir')
for (old_name, new_name) in config.get('mz:redirects', {}).items():
old_path = join(site_dir, old_name, 'index.html')
new_href = urljoin(args.base, new_name)
setup_redirect(old_path, new_href)
| {
"content_hash": "43f1130d9077410cfd22cd3d1565f64f",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 75,
"avg_line_length": 32.72093023255814,
"alnum_prop": 0.644633972992182,
"repo_name": "mapzen/mapzen-docs-generator",
"id": "e8071379894038925f36098dd70c811092722479",
"size": "1429",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup-redirects.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11500"
},
{
"name": "HTML",
"bytes": "9816"
},
{
"name": "JavaScript",
"bytes": "2388"
},
{
"name": "Makefile",
"bytes": "5964"
},
{
"name": "Python",
"bytes": "11187"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2011-2016 Eric R. Jeschke
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
Neither the name of the Eric R. Jeschke nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import print_function
# stdlib imports
import sys
import os
import logging, logging.handlers
import threading
import traceback
# Local application imports
from ginga.misc.Bunch import Bunch
from ginga.misc import Task, ModuleManager, Settings, log
import ginga.version as version
import ginga.toolkit as ginga_toolkit
from ginga import AstroImage
from ginga.util import paths
# Catch warnings
logging.captureWarnings(True)
default_layout = ['seq', {},
['vbox', dict(name='top', width=1520, height=900),
dict(row=['hbox', dict(name='menu')],
stretch=0),
dict(row=['hpanel', dict(name='hpnl'),
['ws', dict(name='left', wstype='tabs',
width=300, height=-1, group=2),
# (tabname, layout), ...
[("Info", ['vpanel', {},
['ws', dict(name='uleft', wstype='stack',
height=300, group=3)],
['ws', dict(name='lleft', wstype='tabs',
height=430, group=3)],
]
)]],
['vbox', dict(name='main', width=700),
dict(row=['ws', dict(name='channels', wstype='tabs',
group=1)], stretch=1),
dict(row=['ws', dict(name='cbar', wstype='stack',
group=99)], stretch=0),
dict(row=['ws', dict(name='readout', wstype='stack',
group=99)], stretch=0),
dict(row=['ws', dict(name='operations', wstype='stack',
group=99)], stretch=0),
],
['ws', dict(name='right', wstype='tabs',
width=400, height=-1, group=2),
# (tabname, layout), ...
[("Dialogs", ['ws', dict(name='dialogs', wstype='tabs',
group=2)
]
)]
],
], stretch=1),
dict(row=['ws', dict(name='toolbar', wstype='stack',
height=40, group=2)],
stretch=0),
dict(row=['hbox', dict(name='status')], stretch=0),
]]
global_plugins = [
Bunch(module='Toolbar', tab='Toolbar', ws='toolbar'),
Bunch(module='Pan', tab='_pan', ws='uleft', raisekey=None),
Bunch(module='Info', tab='Synopsis', ws='lleft', raisekey=None),
Bunch(module='Header', tab='Header', ws='left', raisekey='H'),
Bunch(module='Zoom', tab='Zoom', ws='left', raisekey='Z'),
Bunch(module='Thumbs', tab='Thumbs', ws='right', raisekey='T'),
Bunch(module='Contents', tab='Contents', ws='right', raisekey='c'),
Bunch(module='Colorbar', tab='_cbar', ws='cbar', start=True),
Bunch(module='Cursor', tab='_readout', ws='readout', start=True),
Bunch(module='Operations', tab='_opns', ws='operations', start=True),
Bunch(module='WBrowser', tab='Help', ws='channels', raisekey='?', start=False),
Bunch(module='FBrowser', tab='Open File', ws='right', start=False),
Bunch(module='Errors', tab='Errors', ws='right', start=True),
Bunch(module='RC', tab='RC', ws='right', start=False),
Bunch(module='WCSMatch', tab='WCSMatch', ws='right', start=False),
Bunch(module='ChangeHistory', tab='History', ws='right', start=False),
Bunch(module='SAMP', tab='SAMP', ws='right', start=False),
Bunch(module='IRAF', tab='IRAF', ws='right', start=False),
Bunch(module='Log', tab='Log', ws='right', start=False),
Bunch(module='Debug', tab='Debug', ws='right', start=False),
]
local_plugins = [
Bunch(module='Pick', ws='dialogs', shortkey='f1'),
Bunch(module='Ruler', ws='dialogs', shortkey='f2'),
Bunch(module='MultiDim', ws='lleft', shortkey='f4'),
Bunch(module='Cuts', ws='dialogs', shortkey='f5'),
Bunch(module='Histogram', ws='dialogs', shortkey='f6'),
Bunch(module='Crosshair', ws='dialogs'),
Bunch(module='Overlays', ws='dialogs'),
Bunch(module='Blink', ws='dialogs'),
Bunch(module='LineProfile', ws='dialogs'),
Bunch(module='PixTable', ws='dialogs', shortkey='f7'),
Bunch(module='Preferences', ws='dialogs', shortkey='f9'),
Bunch(module='Catalogs', ws='dialogs', shortkey='f10'),
Bunch(module='Mosaic', ws='dialogs'),
Bunch(module='Drawing', ws='dialogs', shortkey='f11'),
Bunch(module='FBrowser', ws='dialogs', shortkey='f12'),
Bunch(module='Compose', ws='dialogs'),
# Not ready for prime time
#Bunch(module='Pipeline', ws='dialogs'),
]
class ReferenceViewer(object):
"""
This class exists solely to be able to customize the reference
viewer startup.
"""
def __init__(self, layout=default_layout):
self.local_plugins = []
self.global_plugins = []
self.layout = layout
def add_local_plugin(self, module_name, ws_name, pfx=None):
self.local_plugins.append(
Bunch(module=module_name, ws=ws_name, pfx=pfx))
def add_global_plugin(self, module_name, ws_name,
tab_name=None, start_plugin=True, pfx=None):
if tab_name is None:
tab_name = module_name
self.global_plugins.append(
Bunch(module=module_name, ws=ws_name, tab=tab_name,
start=start_plugin, pfx=pfx))
def clear_default_plugins(self):
self.local_plugins = []
self.global_plugins = []
def add_default_plugins(self):
"""
Add the ginga-distributed default set of plugins to the
reference viewer.
"""
# add default global plugins
for bnch in global_plugins:
start = bnch.get('start', True)
pfx = bnch.get('pfx', None)
self.add_global_plugin(bnch.module, bnch.ws,
tab_name=bnch.tab, start_plugin=start, pfx=pfx)
# add default local plugins
for bnch in local_plugins:
pfx = bnch.get('pfx', None)
self.add_local_plugin(bnch.module, bnch.ws, pfx=pfx)
def add_default_options(self, optprs):
"""
Adds the default reference viewer startup options to an
OptionParser instance `optprs`.
"""
optprs.add_option("--bufsize", dest="bufsize", metavar="NUM",
type="int", default=10,
help="Buffer length to NUM")
optprs.add_option("--channels", dest="channels", default="Image",
help="Specify list of channels to create")
optprs.add_option("--debug", dest="debug", default=False, action="store_true",
help="Enter the pdb debugger on main()")
optprs.add_option("--disable-plugins", dest="disable_plugins",
metavar="NAMES",
help="Specify plugins that should be disabled")
optprs.add_option("--display", dest="display", metavar="HOST:N",
help="Use X display on HOST:N")
optprs.add_option("--fitspkg", dest="fitspkg", metavar="NAME",
default=None,
help="Prefer FITS I/O module NAME")
optprs.add_option("-g", "--geometry", dest="geometry",
default=None, metavar="GEOM",
help="X geometry for initial size and placement")
optprs.add_option("--modules", dest="modules", metavar="NAMES",
help="Specify additional modules to load")
optprs.add_option("--nosplash", dest="nosplash", default=False,
action="store_true",
help="Don't display the splash screen")
optprs.add_option("--numthreads", dest="numthreads", type="int",
default=30, metavar="NUM",
help="Start NUM threads in thread pool")
optprs.add_option("--opencv", dest="opencv", default=False,
action="store_true",
help="Use OpenCv acceleration")
optprs.add_option("--plugins", dest="plugins", metavar="NAMES",
help="Specify additional plugins to load")
optprs.add_option("--profile", dest="profile", action="store_true",
default=False,
help="Run the profiler on main()")
optprs.add_option("-t", "--toolkit", dest="toolkit", metavar="NAME",
default=None,
help="Prefer GUI toolkit (gtk|qt)")
optprs.add_option("--wcspkg", dest="wcspkg", metavar="NAME",
default=None,
help="Prefer WCS module NAME")
log.addlogopts(optprs)
def main(self, options, args):
"""
Main routine for running the reference viewer.
`options` is a OptionParser object that has been populated with
values from parsing the command line. It should at least include
the options from add_default_options()
`args` is a list of arguments to the viewer after parsing out
options. It should contain a list of files or URLs to load.
"""
# Create a logger
logger = log.get_logger(name='ginga', options=options)
# Get settings (preferences)
basedir = paths.ginga_home
if not os.path.exists(basedir):
try:
os.mkdir(basedir)
except OSError as e:
logger.warn("Couldn't create ginga settings area (%s): %s" % (
basedir, str(e)))
logger.warn("Preferences will not be able to be saved")
# Set up preferences
prefs = Settings.Preferences(basefolder=basedir, logger=logger)
settings = prefs.createCategory('general')
settings.load(onError='silent')
settings.setDefaults(useMatplotlibColormaps=False,
widgetSet='choose',
WCSpkg='choose', FITSpkg='choose',
recursion_limit=2000)
# default of 1000 is a little too small
sys.setrecursionlimit(settings.get('recursion_limit'))
# So we can find our plugins
sys.path.insert(0, basedir)
moduleHome = os.path.split(sys.modules['ginga.version'].__file__)[0]
childDir = os.path.join(moduleHome, 'misc', 'plugins')
sys.path.insert(0, childDir)
pluginDir = os.path.join(basedir, 'plugins')
sys.path.insert(0, pluginDir)
# Choose a toolkit
if options.toolkit:
toolkit = options.toolkit
else:
toolkit = settings.get('widgetSet', 'choose')
if toolkit == 'choose':
try:
from ginga.qtw import QtHelp
except ImportError:
try:
from ginga.gtkw import GtkHelp
except ImportError:
print("You need python-gtk or python-qt to run Ginga!")
sys.exit(1)
else:
ginga_toolkit.use(toolkit)
tkname = ginga_toolkit.get_family()
logger.info("Chosen toolkit (%s) family is '%s'" % (
ginga_toolkit.toolkit, tkname))
# these imports have to be here, otherwise they force the choice
# of toolkit too early
from ginga.gw.GingaGw import GingaView
from ginga.Control import GingaControl, GuiLogHandler
# Define class dynamically based on toolkit choice
class GingaShell(GingaControl, GingaView):
def __init__(self, logger, thread_pool, module_manager, prefs,
ev_quit=None):
GingaView.__init__(self, logger, ev_quit, thread_pool)
GingaControl.__init__(self, logger, thread_pool, module_manager,
prefs, ev_quit=ev_quit)
if settings.get('useMatplotlibColormaps', False):
# Add matplotlib color maps if matplotlib is installed
try:
from ginga import cmap
cmap.add_matplotlib_cmaps()
except Exception as e:
logger.warn("failed to load matplotlib colormaps: %s" % (str(e)))
# User wants to customize the WCS package?
if options.wcspkg:
wcspkg = options.wcspkg
else:
wcspkg = settings.get('WCSpkg', 'choose')
try:
from ginga.util import wcsmod
assert wcsmod.use(wcspkg) == True
except Exception as e:
logger.warn("failed to set WCS package preference: %s" % (str(e)))
# User wants to customize the FITS package?
if options.fitspkg:
fitspkg = options.fitspkg
else:
fitspkg = settings.get('FITSpkg', 'choose')
try:
from ginga.util import io_fits
assert io_fits.use(fitspkg) == True
except Exception as e:
logger.warn("failed to set FITS package preference: %s" % (str(e)))
# Check whether user wants to use OpenCv
use_opencv = settings.get('use_opencv', False)
if use_opencv or options.opencv:
from ginga import trcalc
try:
trcalc.use('opencv')
except Exception as e:
logger.warn("failed to set OpenCv preference: %s" % (str(e)))
# Create the dynamic module manager
mm = ModuleManager.ModuleManager(logger)
# Create and start thread pool
ev_quit = threading.Event()
thread_pool = Task.ThreadPool(options.numthreads, logger,
ev_quit=ev_quit)
thread_pool.startall()
# Create the Ginga main object
ginga_shell = GingaShell(logger, thread_pool, mm, prefs,
ev_quit=ev_quit)
ginga_shell.set_layout(self.layout)
gc = os.path.join(basedir, "ginga_config.py")
have_ginga_config = os.path.exists(gc)
# User configuration (custom star catalogs, etc.)
if have_ginga_config:
try:
import ginga_config
ginga_config.pre_gui_config(ginga_shell)
except Exception as e:
try:
(type, value, tb) = sys.exc_info()
tb_str = "\n".join(traceback.format_tb(tb))
except Exception:
tb_str = "Traceback information unavailable."
logger.error("Error importing Ginga config file: %s" % (
str(e)))
logger.error("Traceback:\n%s" % (tb_str))
# Build desired layout
ginga_shell.build_toplevel()
# Did user specify a particular geometry?
if options.geometry:
ginga_shell.set_geometry(options.geometry)
# make the list of disabled plugins
disabled_plugins = []
if not (options.disable_plugins is None):
disabled_plugins = options.disable_plugins.lower().split(',')
# Add desired global plugins
for spec in self.global_plugins:
if not spec.module.lower() in disabled_plugins:
ginga_shell.add_global_plugin(spec)
# Add GUI log handler (for "Log" global plugin)
guiHdlr = GuiLogHandler(ginga_shell)
guiHdlr.setLevel(options.loglevel)
fmt = logging.Formatter(log.LOG_FORMAT)
guiHdlr.setFormatter(fmt)
logger.addHandler(guiHdlr)
# Load any custom modules
if options.modules:
modules = options.modules.split(',')
for longPluginName in modules:
if '.' in longPluginName:
tmpstr = longPluginName.split('.')
pluginName = tmpstr[-1]
pfx = '.'.join(tmpstr[:-1])
else:
pluginName = longPluginName
pfx = None
spec = Bunch(name=pluginName, module=pluginName,
tab=pluginName, ws='right', pfx=pfx)
ginga_shell.add_global_plugin(spec)
# Load modules for "local" (per-channel) plug ins
for spec in self.local_plugins:
if not spec.module.lower() in disabled_plugins:
ginga_shell.add_local_plugin(spec)
# Load any custom plugins
if options.plugins:
plugins = options.plugins.split(',')
for longPluginName in plugins:
if '.' in longPluginName:
tmpstr = longPluginName.split('.')
pluginName = tmpstr[-1]
pfx = '.'.join(tmpstr[:-1])
else:
pluginName = longPluginName
pfx = None
spec = Bunch(module=pluginName, ws='dialogs',
hidden=False, pfx=pfx)
ginga_shell.add_local_plugin(spec)
ginga_shell.update_pending()
# TEMP?
tab_names = list(map(lambda name: name.lower(),
ginga_shell.ds.get_tabnames(group=None)))
if 'info' in tab_names:
ginga_shell.ds.raise_tab('Info')
if 'thumbs' in tab_names:
ginga_shell.ds.raise_tab('Thumbs')
# Add custom channels
channels = options.channels.split(',')
for chname in channels:
ginga_shell.add_channel(chname)
ginga_shell.change_channel(channels[0])
# User configuration (custom star catalogs, etc.)
if have_ginga_config:
try:
ginga_config.post_gui_config(ginga_shell)
except Exception as e:
try:
(type, value, tb) = sys.exc_info()
tb_str = "\n".join(traceback.format_tb(tb))
except Exception:
tb_str = "Traceback information unavailable."
logger.error("Error processing Ginga config file: %s" % (
str(e)))
logger.error("Traceback:\n%s" % (tb_str))
# Redirect warnings to logger
for hdlr in logger.handlers:
logging.getLogger('py.warnings').addHandler(hdlr)
# Display banner the first time run, unless suppressed
showBanner = True
try:
showBanner = settings.get('showBanner')
except KeyError:
# disable for subsequent runs
settings.set(showBanner=False)
settings.save()
if (not options.nosplash) and (len(args) == 0) and showBanner:
ginga_shell.banner(raiseTab=True)
# Assume remaining arguments are fits files and load them.
for imgfile in args:
ginga_shell.nongui_do(ginga_shell.load_file, imgfile)
try:
try:
# if there is a network component, start it
if hasattr(ginga_shell, 'start'):
task = Task.FuncTask2(ginga_shell.start)
thread_pool.addTask(task)
# Main loop to handle GUI events
logger.info("Entering mainloop...")
ginga_shell.mainloop(timeout=0.001)
except KeyboardInterrupt:
logger.error("Received keyboard interrupt!")
finally:
logger.info("Shutting down...")
ev_quit.set()
sys.exit(0)
def reference_viewer(sys_argv):
viewer = ReferenceViewer(layout=default_layout)
viewer.add_default_plugins()
# Parse command line options with optparse module
from optparse import OptionParser
usage = "usage: %prog [options] cmd [args]"
optprs = OptionParser(usage=usage,
version=('%%prog %s' % version.version))
viewer.add_default_options(optprs)
(options, args) = optprs.parse_args(sys_argv[1:])
if options.display:
os.environ['DISPLAY'] = options.display
# Are we debugging this?
if options.debug:
import pdb
pdb.run('viewer.main(options, args)')
# Are we profiling this?
elif options.profile:
import profile
print(("%s profile:" % sys_argv[0]))
profile.run('viewer.main(options, args)')
else:
viewer.main(options, args)
def _main():
"""Run from command line."""
reference_viewer(sys.argv)
# END
| {
"content_hash": "65a401999d8be9614fe700a5633c88fd",
"timestamp": "",
"source": "github",
"line_count": 557,
"max_line_length": 86,
"avg_line_length": 40.077199281867145,
"alnum_prop": 0.5538682076781795,
"repo_name": "Cadair/ginga",
"id": "6e8631473bac60fa4ec7033a7ea581599ae43f1b",
"size": "22439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ginga/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1939"
},
{
"name": "JavaScript",
"bytes": "8724"
},
{
"name": "Python",
"bytes": "2458171"
}
],
"symlink_target": ""
} |
from rockit.core import serializers
from rockit.core.holders import Holder
class MixesHolder(Holder):
"""
A holder that collects all items that can be assigned to when action
"""
CONTAINER_WHEN = 'when'
CONTAINER_THEN = 'then'
CONTAINER_FINISH = 'finish'
def __init__(self, association):
super(MixesHolder, self).__init__()
self.association = serializers.AssociationSerializer(association).data
self.when = self._create_container(self.CONTAINER_WHEN)
self.then = self._create_container(self.CONTAINER_THEN)
self.finish = self._create_container(self.CONTAINER_FINISH)
self.dirty = False
self.resolve_names = False;
def add_finish(self, **kwargs):
"""
Add a final item
"""
self._add(self.finish, **kwargs)
def add_then(self, **kwargs):
"""
Add a then item
"""
self._add(self.then, **kwargs)
def add_when(self, **kwargs):
"""
Add a when item
"""
self._add(self.when, **kwargs)
def get_content(self):
"""
Override by adding when, then, final before get content
"""
if self.dirty:
self.reset_group(self.CONTAINER_WHEN)
self.reset_group(self.CONTAINER_THEN)
self.reset_group(self.CONTAINER_FINISH)
if len(self.when['items']) > 0:
self.append(self.when, self.CONTAINER_WHEN)
if len(self.then['items']) > 0:
self.append(self.then, self.CONTAINER_THEN)
if len(self.finish['items']) > 0:
self.append(self.finish, self.CONTAINER_FINISH)
return super(MixesHolder, self).get_content()
def mark_resolve_names(self):
self.resolve_names = True;
def should_resolve_names(self):
return self.resolve_names;
def _add(self, container, **kwargs):
container['items'].append({
'identifier': kwargs.get('identifier', 'NOT_SET'),
'name': kwargs.get('name', 'NOT_SET')
})
self.dirty = True
def _create_container(self, container):
self.create_group(container)
return {
'association': self.association,
'items': list()
}
class MixesDetailsHolder(Holder):
"""
Mixes details holder
"""
def __init__(self):
super(MixesDetailsHolder, self).__init__()
self.container = dict()
self.dirty = False
self.name = 'POST'
def add_post(self, **kwargs):
"""
Add post update data
"""
self._create_data('POST', **kwargs)
def add_update(self, **kwargs):
self._create_data('PUT', **kwargs)
def get_content(self):
"""
Override by post before calling super class
"""
if self.dirty:
self.reset_group('actions')
self.append({ '%s' % self.name : self.container }, 'actions', True)
if len(self.container) is 0:
self.reset_group('actions')
return super(MixesDetailsHolder, self).get_content()
def _create_data(self, name, **kwargs):
self.name = name
data = {
'id': kwargs.get('identifier', 'UNKNOWN_IDENTIFIER'),
'type': kwargs.get('type', ''),
'required': kwargs.get('required', False),
'label': kwargs.get('label', ''),
'value': kwargs.get('value', '')
}
if 'max_length' in kwargs:
data['max_length'] = kwargs.get('max_length', 0)
self.container[kwargs.get('identifier', 'UNKNOWN_IDENTIFIER')] = data
self.dirty = True
| {
"content_hash": "36e81f8a11aea79b28a2b2d2a2bc7ec3",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 79,
"avg_line_length": 26.956204379562045,
"alnum_prop": 0.5548334687246141,
"repo_name": "acreations/rockit-server",
"id": "365e710257398374bd966df3e74093d53f3e5b3b",
"size": "3693",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rockit/core/holders/mixes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1920"
},
{
"name": "CSS",
"bytes": "4090"
},
{
"name": "HTML",
"bytes": "24419"
},
{
"name": "JavaScript",
"bytes": "35318"
},
{
"name": "Python",
"bytes": "134803"
},
{
"name": "Shell",
"bytes": "553"
}
],
"symlink_target": ""
} |
import base_filters
COPY_GOOGLE_DOC_KEY = '18Zus5ARv6KLPX49AsIzoOU2_nlfl0NMmhdTEIu58T68'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| {
"content_hash": "f68f717bec73c8b8c842d7c2b89ecff4",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 77,
"avg_line_length": 25.636363636363637,
"alnum_prop": 0.7872340425531915,
"repo_name": "nprapps/dailygraphics",
"id": "0605fc9a267131a27650eaf61ba53b51b570dc53",
"size": "305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphic_templates/diverging_bar_chart/graphic_config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22532"
},
{
"name": "HTML",
"bytes": "62674"
},
{
"name": "JavaScript",
"bytes": "474385"
},
{
"name": "Python",
"bytes": "82078"
},
{
"name": "Shell",
"bytes": "810"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import cobra
import json
import pandas as pd
import cobra.test
from cobra.core import Reaction
from medusa.core.ensemble import Ensemble
from cobra.io import load_json_model
from os.path import abspath, dirname, join
from pickle import load
medusa_directory = abspath(join(dirname(abspath(__file__)), ".."))
data_dir = join(medusa_directory,"test","data","")
def create_test_ensemble(ensemble_name="Staphylococcus aureus"):
"""Returns a previously-generated ensemble for testing
model_name: str
One of 'Staphylococcus_aureus_ensemble'
"""
if ensemble_name == "Staphylococcus aureus":
with open(join(data_dir, "Staphylococcus_aureus_ensemble.pickle"), 'rb') as infile:
test_ensemble = load(infile)
else:
raise ValueError('ensemble_name does not match one of the test ensembles available')
return test_ensemble
def create_test_model(model_name="textbook"):
"""Returns a cobra.Model for testing
model_name: str
One of ['Staphylococcus aureus'] or any models in cobra.test
"""
if model_name == "Saureus_seed":
base_model = cobra.io.load_json_model(join(data_dir, 'Staphylococcus aureus.json'))
else:
try:
base_model = cobra.test.create_test_model(model_name)
except:
raise ValueError('model_name does not match one of the test models available')
return base_model
def load_biolog_plata():
biolog_base_composition = pd.read_csv(
join(data_dir,'biolog_base_composition.csv'),sep=',')
biolog_base_dict = dict(zip(biolog_base_composition['ID'],
[1000 for i in range(0,len(biolog_base_composition['ID']))]))
biolog_thresholded = pd.read_csv(
join(data_dir,'plata_thresholded.csv'),sep='\t',index_col=0)
return biolog_base_composition, biolog_base_dict, biolog_thresholded
def load_universal_modelseed():
seed_rxn_table = pd.read_csv(join(data_dir,'reactions_seed_20180809.tsv'),sep='\t')
seed_rxn_table['id'] = seed_rxn_table['id'] + '_c'
universal = load_json_model(join(data_dir,'universal_mundy.json'))
# remove any reactions from the universal that don't have "OK" status
# in modelSEED (guards against mass and charge-imbalanced reactions)
ok_ids = list(seed_rxn_table.loc[(seed_rxn_table['status'] == 'OK') | (seed_rxn_table['status'] == 'HB')]['id'])
remove_rxns = []
for reaction in universal.reactions:
if reaction.id not in ok_ids:
remove_rxns.append(reaction)
universal.remove_reactions(remove_rxns)
# remove metabolites from the universal that are no longer present in any
# reactions.
mets_in_reactions = []
for reaction in universal.reactions:
mets = [met.id for met in reaction.metabolites]
mets_in_reactions.extend(mets)
mets_in_reactions = set(mets_in_reactions)
mets_missing_reactions = []
for metabolite in universal.metabolites:
if metabolite.id not in mets_in_reactions:
mets_missing_reactions.append(metabolite)
universal.remove_metabolites(mets_missing_reactions)
universal.repair()
return universal | {
"content_hash": "d238cbca1f63d9007499e3fbe61003c2",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 116,
"avg_line_length": 37.25581395348837,
"alnum_prop": 0.6779026217228464,
"repo_name": "gregmedlock/Medusa",
"id": "24e0a7c0a3451be17d3cbfa67ad9545708e664bb",
"size": "3204",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "medusa/test/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "95195"
}
],
"symlink_target": ""
} |
import sys
import re
import os
from parser import parse_file
from style import Style
from content_error import ContentError
from song import Song
from songbook import Songbook
'''
If your text is one line string then you can use
from reportlab.pdfbase.pdfmetrics import stringWidth
textWidth = stringWidth(text, fontName, fontSize)
If your text was multi-lines, assuming you are working
in a rectangular area with defined width, then do
from reportlab.lib.utils import simpleSplit
lines = simpleSplit(text, fontName, fontSize, maxWidth)
lines is a list of all the lines of your paragraph, if you know the line spacing value then the
height of the paragraph can be calculated as lineSpacing*len(lines)
'''
def main():
if len(sys.argv) != 3:
print("Calling format:\n " + sys.argv[0] + " <songbook file> <pdf file>\n", file=sys.stderr)
else:
f = open(sys.argv[1], encoding='utf-8')
s = Songbook(f)
s.draw(sys.argv[2])
return 0
if __name__ == "__main__":
main()
| {
"content_hash": "8f70e3b493d3b2ac4da0614aa07fbdbd",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 97,
"avg_line_length": 23.714285714285715,
"alnum_prop": 0.7259036144578314,
"repo_name": "wojtex/cantionale",
"id": "413e23ab053c3af303816c1c2ce0f1ac5a4b7ae3",
"size": "1040",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "cantionale.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61148"
}
],
"symlink_target": ""
} |
from PDFWriter import PDFWriter
with PDFWriter("test_pdfwriter.pdf") as pw:
pw.setFont("Courier", 12)
pw.setHeader("Input: test_pdfwriter.py Output: test_pdfwriter.pdf")
pw.setFooter("Generated by xtopdf: http://bit.ly/xtopdf")
with open("test_pdfwriter.py") as in_fil:
for lin in in_fil:
pw.writeLine(lin)
| {
"content_hash": "d635643e16a12a823b07a5b33dc37510",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 71,
"avg_line_length": 31.454545454545453,
"alnum_prop": 0.6705202312138728,
"repo_name": "ActiveState/code",
"id": "5100eb0b8404ce14532659acf12bb520da55693d",
"size": "367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/Python/578790_Use_PDFWriter_context_manager/recipe-578790.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35894"
},
{
"name": "C",
"bytes": "56048"
},
{
"name": "C++",
"bytes": "90880"
},
{
"name": "HTML",
"bytes": "11656"
},
{
"name": "Java",
"bytes": "57468"
},
{
"name": "JavaScript",
"bytes": "181218"
},
{
"name": "PHP",
"bytes": "250144"
},
{
"name": "Perl",
"bytes": "37296"
},
{
"name": "Perl 6",
"bytes": "9914"
},
{
"name": "Python",
"bytes": "17387779"
},
{
"name": "Ruby",
"bytes": "40233"
},
{
"name": "Shell",
"bytes": "190732"
},
{
"name": "Tcl",
"bytes": "674650"
}
],
"symlink_target": ""
} |
import clang_util
import cpp
import file_parser
import re
import util
import parser_addition
from clang.cindex import TypeKind
def sequence_from_text(text):
return [cpp.SimpleToken(entry) for entry in text.split(' ')]
def get_alias_from_text(name, text):
return cpp.Alias(name, [cpp.SimpleToken(spelling) for spelling in text.split(' ')])
def get_function_from_text(classname, functionname, return_str, text, function_type='function'):
return cpp.Function(classname, functionname, return_str, [cpp.SimpleToken(spelling) for spelling in text.split(' ')], function_type)
def returns_this(function):
remaining_tokens = function.tokens[cpp.get_declaration_end_index(function.name, function.tokens):]
return cpp.contains_sequence(remaining_tokens, sequence_from_text('return * this ;'))
def get_table_return_type(function):
index, offset = cpp.find_function_name(function.name, function.tokens)
return_type = util.concat(function.tokens[:index], ' ')
if return_type in ['const ' + function.classname + ' & ',
function.classname + ' & ']:
return 'void '
if return_type == function.classname + ' ':
return
if returns_this(function):
return 'void '
return util.concat(function.tokens[:index], ' ')
def replace_in_tokens(old_spelling, new_spelling, tokens):
for token in tokens:
if token.spelling == old_spelling:
token.spelling = new_spelling
def const_specifier(function):
return 'const ' if cpp.is_const(function) else ''
def same_tokens(tokens, other_tokens):
if len(tokens) != len(other_tokens):
return False
for i in range(len(tokens)):
if tokens[i].spelling != other_tokens[i].spelling:
return False
return True
def contains(name, tokens):
for token in tokens:
if token.spelling == name:
return True
return False
def get_function_name_for_tokens(name):
if 'operator' in name:
return 'operator ' + name[8:]
return name
def get_function_name_for_type_erasure(function):
arg_extension = ''
args = cpp.get_function_arguments(function)
for arg in args:
arg_extension += '_' + arg.type()
arg_extension = arg_extension.replace('&', '_ref').replace('*', '_ptr')
arg_extension = re.sub(' |::|\(|\)', '_', arg_extension)
arg_extension = re.sub(r'<|>|\[|\]\(|\)\{\}', '', arg_extension)
arg_extension = re.sub('_+', '_', arg_extension)
arg_extension = arg_extension[:-1] if arg_extension.endswith('_') else arg_extension
if function.name == 'operator()':
return 'call' + arg_extension
elif function.name == 'operator=':
return 'assignment' + arg_extension
elif function.name == 'operator+=':
return 'add' + arg_extension
elif function.name == 'operator*=':
return 'multiply' + arg_extension
elif function.name == 'operator-=':
return 'subtract' + arg_extension
elif function.name == 'operator-':
return 'negate' + arg_extension
elif function.name == 'operator/=':
return 'divide' + arg_extension
elif function.name == 'operator==':
return 'compare' + arg_extension
return function.name + arg_extension
class CppFileParser(file_parser.FileProcessor):
def __init__(self):
self.scope = cpp.Namespace('global')
def process_inclusion_directive(self, data, cursor):
self.scope.add(cpp.InclusionDirective(clang_util.parse_inclusion_directive(data, cursor).replace('#include ', '').replace('\n', '')))
def process_open_include_guard(self, filename):
self.scope.add(cpp.ScopeEntry('include_guard', parser_addition.extract_include_guard(filename)))
def process_headers(self, headers):
self.scope.add(cpp.ScopeEntry('headers', headers))
def process_open_namespace(self, data, cursor):
self.scope.add(cpp.Namespace(cursor.spelling, clang_util.get_tokens(data.tu, cursor)))
def process_close_namespace(self):
self.scope.close()
def process_open_class(self, data, cursor):
if clang_util.get_tokens(data.tu, cursor)[2].spelling == clang_util.semicolon:
self.scope.add(cpp.Class(data.current_struct.spelling, clang_util.get_tokens(data.tu, cursor)[:3]))
# self.scope.add(ForwardDeclaration(util.concat(clang_util.get_tokens(data.tu, cursor)[:3], ' ')))
else:
self.scope.add(cpp.Class(data.current_struct.spelling, clang_util.get_tokens(data.tu, cursor)))
def process_open_struct(self, data, cursor):
if clang_util.get_tokens(data.tu, cursor)[2].spelling == clang_util.semicolon:
self.scope.add(cpp.Struct(data.current_struct.spelling, clang_util.get_tokens(data.tu, cursor)[:3]))
# self.scope.add(ForwardDeclaration(util.concat(clang_util.get_tokens(data.tu, cursor)[:3], ' ')))
else:
self.scope.add(cpp.Struct(data.current_struct.spelling, clang_util.get_tokens(data.tu, cursor)))
def process_close_class(self):
if self.scope.get_open_scope().get_type() == cpp.NAMESPACE:
return
self.scope.close()
def process_function(self, data, cursor):
classname = ''
if data.current_struct.spelling:
classname = data.current_struct.spelling
current_scope = self.scope.get_open_scope()
tokens = clang_util.get_tokens(data.tu, cursor)
tokens = tokens[:cpp.get_body_end_index(cursor.spelling, tokens)]
if current_scope.get_tokens() and not tokens[-1].spelling == clang_util.semicolon:
tokens = clang_util.get_all_tokens(tokens, current_scope.get_tokens())
function_type = cpp.FUNCTION
if clang_util.is_function_template(cursor.kind):
function_type = cpp.FUNCTION_TEMPLATE
self.scope.add(cpp.Function(classname, cursor.spelling, cursor.result_type.kind != TypeKind.VOID and 'return ' or '', tokens, function_type))
def process_function_template(self, data, cursor):
self.process_function(data, cursor)
def process_constructor(self, data, cursor):
classname = ''
if data.current_struct.spelling:
classname = data.current_struct.spelling
current_scope = self.scope.get_open_scope()
tokens = clang_util.get_tokens(data.tu, cursor)
tokens = tokens[:cpp.get_body_end_index(cursor.spelling, tokens)]
if current_scope.get_tokens() and not tokens[-1].spelling == clang_util.semicolon:
tokens = clang_util.get_all_tokens(tokens, current_scope.get_tokens())
self.scope.add(cpp.Function(classname, cursor.spelling, cursor.result_type.kind != TypeKind.VOID and 'return ' or '', tokens, cpp.CONSTRUCTOR))
def process_destructor(self, data, cursor):
self.process_function(data, cursor)
def process_type_alias(self,data,cursor):
self.scope.add(cpp.Alias(cursor.spelling, clang_util.get_tokens(data.tu, cursor)))
def process_variable_declaration(self,data,cursor):
tokens = clang_util.get_tokens(data.tu, cursor)
if tokens[-1].spelling != clang_util.semicolon and self.scope.get_open_scope().get_tokens():
tokens = clang_util.get_all_variable_tokens(tokens, self.scope.get_open_scope().get_tokens())
variable_declaration = util.concat(tokens, ' ')
# in case that an underlying type is specified,
# clang interprets enums at variables.
# filter out these cases:
if 'enum ' in variable_declaration:
#TODO try to find a workaround for this
return
if clang_util.get_tokens(data.tu, cursor)[0].spelling == 'static':
self.scope.add(cpp.StaticVariable(variable_declaration))
else:
self.scope.add(cpp.Variable(variable_declaration))
def process_member_variable_declaration(self,data,cursor):
self.process_variable_declaration(data, cursor)
def process_forward_declaration(self,data,cursor):
pass
def process_enum(self,data,cursor):
self.scope.add(cpp.ScopeEntry('enum', clang_util.get_enum_definition(data.tu, cursor)))
def process_access_specifier(self, data, cursor):
self.scope.add(cpp.AccessSpecifier(clang_util.get_tokens(data.tu, cursor)[0].spelling))
class Visitor(object):
def visit(self,visited):
pass
def visit_function(self,function):
return self.visit(function)
def visit_template_function(self,function):
return self.visit_function(function)
def visit_constructor(self,constructor):
return self.visit_function(constructor)
def visit_destructor(self,destructor):
return self.visit_function(destructor)
def visit_operator(self,operator):
return self.visit_function(operator)
def visit_class(self,class_):
return self.visit(class_)
def visit_forward_declaration(self,forward_declaration):
return self.visit(forward_declaration)
def visit_template_class(self,template_class):
return self.visit(template_class)
def visit_namespace(self,namespace):
return self.visit(namespace)
def visit_inclusion_directive(self,inclusion_directive):
return self.visit(inclusion_directive)
def visit_access_specifier(self,access_specifier):
return self.visit(access_specifier)
def visit_variable(self,variable):
return self.visit(variable)
def visit_static_variable(self,variable):
return self.visit(variable)
def visit_alias(self,alias):
return self.visit(alias)
def visit_comment(self,comment):
return self.visit(comment)
class RecursionVisitor(Visitor):
def visit_class(self,class_):
for entry in class_.content:
entry.visit(self)
def visit_template_class(self,template_class):
for entry in template_class.content:
entry.visit(self)
def visit_namespace(self,namespace):
for entry in namespace.content:
entry.visit(self)
class ExtractPublicProtectedPrivateSections(RecursionVisitor):
def __init__(self):
self.private_section = []
self.protected_section = []
self.public_section = []
self.access_specifier = cpp.PRIVATE
def visit_access_specifier(self,access_specifier):
self.access_specifier = access_specifier.value
def visit(self,entry):
if self.access_specifier == cpp.PRIVATE:
self.private_section.append(entry)
elif self.access_specifier == cpp.PROTECTED:
self.protected_section.append(entry)
else:
self.public_section.append(entry)
def append_comment(comment,group):
if comment:
group.append(comment)
return None
class ExtractTypes(Visitor):
def __init__(self):
self.aliases = []
self.static_variables = []
self.constructors = []
self.destructor = []
self.operators = []
self.functions = []
self.forward_declarations = []
self.variables = []
self.comment = None
def visit_comment(self,comment):
self.comment = comment
def visit_function(self,function):
if function.type in [cpp.ASSIGNMENT_OPERATOR]:
self.comment = append_comment(self.comment, self.operators)
self.operators.append(function)
if function.type in [cpp.FUNCTION, cpp.FUNCTION_TEMPLATE]:
if function.name.startswith('operator'):
self.comment = append_comment(self.comment, self.operators)
self.operators.append(function)
else:
self.comment = append_comment(self.comment, self.functions)
self.functions.append(function)
elif function.type in [cpp.CONSTRUCTOR, cpp.CONSTRUCTOR_TEMPLATE]:
self.comment = append_comment(self.comment, self.constructors)
self.constructors.append(function)
elif function.type == cpp.DESTRUCTOR:
self.comment = append_comment(self.comment, self.destructor)
self.destructor.append(function)
def visit_variable(self,variable):
self.comment = append_comment(self.comment,self.variables)
self.variables.append(variable)
def visit_static_variable(self,variable):
self.comment = append_comment(self.comment,self.static_variables)
self.static_variables.append(variable)
def visit_alias(self,alias):
self.comment = append_comment(self.comment,self.aliases)
self.aliases.append(alias)
def visit_forward_declaration(self,forward_declaration):
self.forward_declarations.append(forward_declaration)
def extend_section(new_section, section_part, with_separator=True):
if new_section and section_part and with_separator:
new_section.append(cpp.Separator())
new_section.extend(section_part)
def sort_section(section):
type_extractor = ExtractTypes()
for entry in section:
entry.visit(type_extractor)
new_section = []
new_section.extend(type_extractor.aliases)
extend_section(new_section, type_extractor.static_variables)
extend_section(new_section, type_extractor.constructors)
extend_section(new_section, type_extractor.destructor)
extend_section(new_section, type_extractor.operators)
extend_section(new_section, type_extractor.functions)
extend_section(new_section, type_extractor.forward_declarations)
extend_section(new_section, type_extractor.variables, with_separator=False)
return new_section
class SortClass(RecursionVisitor):
def visit_class(self,class_):
section_extractor = ExtractPublicProtectedPrivateSections()
class_.visit(section_extractor)
section_extractor.public_section = sort_section(section_extractor.public_section)
section_extractor.protected_section = sort_section(section_extractor.protected_section)
section_extractor.private_section = sort_section(section_extractor.private_section)
class_.content = []
if section_extractor.public_section:
class_.content.append(cpp.public_access)
class_.content.extend(section_extractor.public_section)
if section_extractor.protected_section:
class_.content.append(cpp.protected_access)
class_.content.extend(section_extractor.protected_section)
if section_extractor.private_section:
class_.content.append(cpp.private_access)
class_.content.extend(section_extractor.private_section)
def remove_inclusion_directives(main_scope):
new_scope = []
for entry in main_scope.content:
if not cpp.is_inclusion_directive(entry):
new_scope.append(entry)
main_scope.content = new_scope
def remove_duplicate_inclusion_directives(main_scope):
new_scope = []
for entry in main_scope.content:
if cpp.is_inclusion_directive(entry):
in_new_scope = False
for new_entry in new_scope:
if cpp.is_inclusion_directive(new_entry) and new_entry.value == entry.value:
in_new_scope = True
break
if not in_new_scope:
new_scope.append(entry)
else:
new_scope.append(entry)
main_scope.content = new_scope
def prepend_inclusion_directives(main_scope, inclusion_directives):
for inclusion_directive in reversed(inclusion_directives):
main_scope.content.insert(0, inclusion_directive)
def append_inclusion_directive(main_scope, inclusion_directive):
for i in range(len(main_scope.content)):
if not cpp.is_inclusion_directive(main_scope.content[i]):
main_scope.content.insert(i, inclusion_directive)
return
def append_inclusion_directives(main_scope, inclusion_directives):
for inclusion_directive in inclusion_directives:
append_inclusion_directive(main_scope, inclusion_directive)
def add_comment(new_content, entry, comments):
comment = util.get_comment(comments, entry)
if comment:
new_content.append(cpp.Comment(comment))
def add_comments(scope, comments):
new_content = []
for entry in scope.content:
if cpp.is_namespace(entry):
add_comment(new_content, 'namespace ' + entry.name, comments)
add_comments(entry, comments)
elif cpp.is_class(entry) or cpp.is_struct(entry):
add_comment(new_content, entry.type + ' ' + entry.name, comments)
add_comments(entry, comments)
elif entry.type in [cpp.FUNCTION, cpp.CONSTRUCTOR, cpp.DESTRUCTOR, cpp.FUNCTION_TEMPLATE, cpp.ASSIGNMENT_OPERATOR]:
add_comment(new_content, entry.get_declaration(), comments)
elif entry.type == cpp.ALIAS:
add_comment(new_content, util.concat(entry.tokens, ' '), comments)
new_content.append(entry)
scope.content = new_content
| {
"content_hash": "27a2a939c29be7b3420cb7d0c52be4d9",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 151,
"avg_line_length": 37.76391982182628,
"alnum_prop": 0.6643076197216324,
"repo_name": "lubkoll/friendly-type-erasure",
"id": "01ac14c403a10c45529fc4d6702dd7d776cb7376",
"size": "16956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "type_erasure/cpp_file_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "224252"
},
{
"name": "CMake",
"bytes": "2192"
},
{
"name": "Python",
"bytes": "184937"
},
{
"name": "Shell",
"bytes": "5563"
}
],
"symlink_target": ""
} |
'''OpenGL extension SUN.vertex
This module customises the behaviour of the
OpenGL.raw.GL.SUN.vertex to provide a more
Python-friendly API
Overview (from the spec)
This extension provides new GL commands to specify vertex data such as
color and normal along with the vertex in one single GL command in order to
minimize the overhead in making GL commands for each set of vertex data.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/SUN/vertex.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.SUN.vertex import *
from OpenGL.raw.GL.SUN.vertex import _EXTENSION_NAME
def glInitVertexSUN():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
glColor4ubVertex2fvSUN=wrapper.wrapper(glColor4ubVertex2fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 2
)
glColor4ubVertex3fvSUN=wrapper.wrapper(glColor4ubVertex3fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 3
)
glColor3fVertex3fvSUN=wrapper.wrapper(glColor3fVertex3fvSUN).setInputArraySize(
'c', 3
).setInputArraySize(
'v', 3
)
glNormal3fVertex3fvSUN=wrapper.wrapper(glNormal3fVertex3fvSUN).setInputArraySize(
'v', 3
).setInputArraySize(
'n', 3
)
glColor4fNormal3fVertex3fvSUN=wrapper.wrapper(glColor4fNormal3fVertex3fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 3
).setInputArraySize(
'n', 3
)
glTexCoord2fVertex3fvSUN=wrapper.wrapper(glTexCoord2fVertex3fvSUN).setInputArraySize(
'tc', 2
).setInputArraySize(
'v', 3
)
glTexCoord4fVertex4fvSUN=wrapper.wrapper(glTexCoord4fVertex4fvSUN).setInputArraySize(
'tc', 4
).setInputArraySize(
'v', 4
)
glTexCoord2fColor4ubVertex3fvSUN=wrapper.wrapper(glTexCoord2fColor4ubVertex3fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'tc', 2
).setInputArraySize(
'v', 3
)
glTexCoord2fColor3fVertex3fvSUN=wrapper.wrapper(glTexCoord2fColor3fVertex3fvSUN).setInputArraySize(
'c', 3
).setInputArraySize(
'tc', 2
).setInputArraySize(
'v', 3
)
glTexCoord2fNormal3fVertex3fvSUN=wrapper.wrapper(glTexCoord2fNormal3fVertex3fvSUN).setInputArraySize(
'v', 3
).setInputArraySize(
'tc', 2
).setInputArraySize(
'n', 3
)
glTexCoord2fColor4fNormal3fVertex3fvSUN=wrapper.wrapper(glTexCoord2fColor4fNormal3fVertex3fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 3
).setInputArraySize(
'tc', 2
).setInputArraySize(
'n', 3
)
glTexCoord4fColor4fNormal3fVertex4fvSUN=wrapper.wrapper(glTexCoord4fColor4fNormal3fVertex4fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 4
).setInputArraySize(
'tc', 4
).setInputArraySize(
'n', 3
)
glReplacementCodeuiVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiVertex3fvSUN).setInputArraySize(
'v', 3
).setInputArraySize(
'rc', 1
)
glReplacementCodeuiColor4ubVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiColor4ubVertex3fvSUN).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 3
).setInputArraySize(
'rc', 1
)
glReplacementCodeuiColor3fVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiColor3fVertex3fvSUN).setInputArraySize(
'c', 3
).setInputArraySize(
'v', 3
).setInputArraySize(
'rc', 1
)
glReplacementCodeuiNormal3fVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiNormal3fVertex3fvSUN).setInputArraySize(
'n', 3
).setInputArraySize(
'rc', 1
).setInputArraySize(
'v', 3
)
glReplacementCodeuiColor4fNormal3fVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiColor4fNormal3fVertex3fvSUN).setInputArraySize(
'n', 3
).setInputArraySize(
'c', 4
).setInputArraySize(
'rc', 1
).setInputArraySize(
'v', 3
)
glReplacementCodeuiTexCoord2fVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiTexCoord2fVertex3fvSUN).setInputArraySize(
'v', 3
).setInputArraySize(
'tc', 2
).setInputArraySize(
'rc', 1
)
glReplacementCodeuiTexCoord2fNormal3fVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiTexCoord2fNormal3fVertex3fvSUN).setInputArraySize(
'n', 3
).setInputArraySize(
'v', 3
).setInputArraySize(
'tc', 2
).setInputArraySize(
'rc', 1
)
glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fvSUN=wrapper.wrapper(glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fvSUN).setInputArraySize(
'n', 3
).setInputArraySize(
'c', 4
).setInputArraySize(
'v', 3
).setInputArraySize(
'tc', 2
).setInputArraySize(
'rc', 1
)
### END AUTOGENERATED SECTION | {
"content_hash": "252a7a6a1630c5652ac1dbdf1f8be7f6",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 149,
"avg_line_length": 27.698795180722893,
"alnum_prop": 0.7666376685515441,
"repo_name": "alexus37/AugmentedRealityChess",
"id": "21d46cb1e443de6c36a65a3cb677fba9028c74c8",
"size": "4598",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GL/SUN/vertex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "158062"
},
{
"name": "C++",
"bytes": "267993"
},
{
"name": "CMake",
"bytes": "11319"
},
{
"name": "Fortran",
"bytes": "3707"
},
{
"name": "Makefile",
"bytes": "14618"
},
{
"name": "Python",
"bytes": "12813086"
},
{
"name": "Roff",
"bytes": "3310"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
} |
import thread
assert hasattr(thread, '__file__')
| {
"content_hash": "e668d5c7d979e20cae3e15393763fe12",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 34,
"avg_line_length": 16.666666666666668,
"alnum_prop": 0.7,
"repo_name": "babble/babble",
"id": "c12155c9d1c5da88b68b70caea542bbc8e3334e6",
"size": "50",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/ed/lang/python/override1_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3378"
},
{
"name": "Groovy",
"bytes": "16151"
},
{
"name": "Java",
"bytes": "7316421"
},
{
"name": "JavaScript",
"bytes": "644844"
},
{
"name": "Python",
"bytes": "10107943"
},
{
"name": "Ruby",
"bytes": "4961765"
},
{
"name": "Shell",
"bytes": "2575"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function, unicode_literals
from yaml import safe_load, safe_dump, SafeLoader
SafeLoader.add_constructor('tag:yaml.org,2002:python/unicode', SafeLoader.construct_yaml_str)
def yaml_load(stream):
"""Loads a dictionary from a stream"""
return safe_load(stream)
def yaml_dump(data, stream=None):
"""Dumps an object to a YAML string"""
return safe_dump(data, stream=stream, default_flow_style=False)
| {
"content_hash": "bfb64b3b34cd971145707b403589a5f8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 93,
"avg_line_length": 29.625,
"alnum_prop": 0.7362869198312236,
"repo_name": "Anaconda-Platform/anaconda-client",
"id": "bb2987800e900aac38d6e6be1ee1260b17a34edd",
"size": "498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "binstar_client/utils/yaml.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "101"
},
{
"name": "Jupyter Notebook",
"bytes": "2976"
},
{
"name": "Python",
"bytes": "318160"
},
{
"name": "Ruby",
"bytes": "8"
},
{
"name": "Shell",
"bytes": "10280"
}
],
"symlink_target": ""
} |
import uuid
class Task(object):
"""
Task for worker.
:param sender: opaque object to identify response receiver
:param id: id for this task. identical tasks should share the same id,
(e.g. requests for the same meta tile)
:param doc: the task as JSON
"""
def __init__(self, id, doc, resp_queue=None, priority=None):
self.id = id
self.doc = doc
self.priority = priority
self.resp_queue = resp_queue
self.request_id = uuid.uuid4().hex
self.worker_id = None
def __repr__(self):
return '<Task id=%s, priority=%s>' % (self.id, self.priority)
| {
"content_hash": "8c59172adc45bde436a7db36a52bbc3e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 74,
"avg_line_length": 30.285714285714285,
"alnum_prop": 0.6022012578616353,
"repo_name": "mapproxy/mapproxy-renderd",
"id": "7155b249ed5c6883977771a4ae5b575825819c48",
"size": "1295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mp_renderd/task.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "136114"
}
],
"symlink_target": ""
} |
import pytest
import salt.states.mac_assistive as assistive
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {assistive: {}}
def test_installed():
"""
Test installing a bundle ID as being allowed to run with assistive access
"""
expected = {
"changes": {},
"comment": "Installed com.apple.Chess into the assistive access panel",
"name": "com.apple.Chess",
"result": True,
}
installed_mock = MagicMock(return_value=False)
install_mock = MagicMock()
with patch.dict(
assistive.__salt__,
{"assistive.installed": installed_mock, "assistive.install": install_mock},
):
out = assistive.installed("com.apple.Chess")
installed_mock.assert_called_once_with("com.apple.Chess")
install_mock.assert_called_once_with("com.apple.Chess", True)
assert out == expected
def test_installed_not_enabled():
"""
Test installing a bundle ID as being allowed to run with disabled assistive access
"""
expected = {
"changes": {},
"comment": "Updated enable to True",
"name": "com.apple.Chess",
"result": True,
}
installed_mock = MagicMock(return_value=True)
install_mock = MagicMock()
enabled_mock = MagicMock(return_value=False)
enable_mock = MagicMock()
with patch.dict(
assistive.__salt__,
{
"assistive.installed": installed_mock,
"assistive.install": install_mock,
"assistive.enabled": enabled_mock,
"assistive.enable": enable_mock,
},
):
out = assistive.installed("com.apple.Chess")
enabled_mock.assert_called_once_with("com.apple.Chess")
enable_mock.assert_called_once_with("com.apple.Chess", True)
assert not install_mock.called
assert out == expected
def test_installed_enabled():
"""
Test enabling an already enabled bundle ID
"""
expected = {
"changes": {},
"comment": "Already in the correct state",
"name": "com.apple.Chess",
"result": True,
}
installed_mock = MagicMock(return_value=True)
install_mock = MagicMock()
enabled_mock = MagicMock(return_value=True)
enable_mock = MagicMock()
with patch.dict(
assistive.__salt__,
{
"assistive.installed": installed_mock,
"assistive.install": install_mock,
"assistive.enabled": enabled_mock,
"assistive.enable": enable_mock,
},
):
out = assistive.installed("com.apple.Chess")
enabled_mock.assert_called_once_with("com.apple.Chess")
assert not enable_mock.called
assert not install_mock.called
assert out == expected
def test_installed_not_disabled():
"""
Test disabling an enabled and installed bundle ID
"""
expected = {
"changes": {},
"comment": "Updated enable to False",
"name": "com.apple.Chess",
"result": True,
}
installed_mock = MagicMock(return_value=True)
install_mock = MagicMock()
enabled_mock = MagicMock(return_value=True)
enable_mock = MagicMock()
with patch.dict(
assistive.__salt__,
{
"assistive.installed": installed_mock,
"assistive.install": install_mock,
"assistive.enabled": enabled_mock,
"assistive.enable": enable_mock,
},
):
out = assistive.installed("com.apple.Chess", False)
enabled_mock.assert_called_once_with("com.apple.Chess")
enable_mock.assert_called_once_with("com.apple.Chess", False)
assert not install_mock.called
assert out == expected
| {
"content_hash": "50b99fdd55ebc4d7121d5c931b44ac5b",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 86,
"avg_line_length": 29.155038759689923,
"alnum_prop": 0.6032969954799255,
"repo_name": "saltstack/salt",
"id": "e83d422f59166e6f10296608eda20d853f673b09",
"size": "3761",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/pytests/unit/states/test_mac_assistive.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
from cuckoo.common.config import config
from cuckoo.common.exceptions import CuckooOperationalError
class Elastic(object):
def __init__(self):
self.client = None
self.enabled = None
self.hosts = None
self.calls = None
self.index = None
self.index_time_pattern = None
self.cuckoo_node = None
def init(self):
self.enabled = config("reporting:elasticsearch:enabled")
self.hosts = config("reporting:elasticsearch:hosts")
self.timeout = config("reporting:elasticsearch:timeout")
self.calls = config("reporting:elasticsearch:calls")
self.index = config("reporting:elasticsearch:index")
self.index_time_pattern = config(
"reporting:elasticsearch:index_time_pattern"
)
self.cuckoo_node = config("reporting:elasticsearch:cuckoo_node")
return self.enabled
def connect(self):
# TODO Option to throw an exception?
if not self.enabled:
return
import elasticsearch
try:
self.client = elasticsearch.Elasticsearch(
self.hosts, timeout=self.timeout
)
except TypeError as e:
raise CuckooOperationalError(
"Unable to connect to ElasticSearch due to an invalid ip:port "
"pair: %s" % e
)
except elasticsearch.ConnectionError as e:
raise CuckooOperationalError(
"Unable to connect to ElasticSearch: %s" % e
)
elastic = Elastic()
| {
"content_hash": "2f4e745cef0913d046cb35de47d17946",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 32.5625,
"alnum_prop": 0.6071657069737684,
"repo_name": "cuckoobox/cuckoo",
"id": "d4a866845574d0269374fea5a47c074f173cba4b",
"size": "1732",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cuckoo/common/elastic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9652"
},
{
"name": "CSS",
"bytes": "6810"
},
{
"name": "DTrace",
"bytes": "8609"
},
{
"name": "HTML",
"bytes": "233053"
},
{
"name": "JavaScript",
"bytes": "21397"
},
{
"name": "Makefile",
"bytes": "58"
},
{
"name": "Mako",
"bytes": "1078"
},
{
"name": "Python",
"bytes": "1101334"
},
{
"name": "Shell",
"bytes": "59602"
},
{
"name": "Visual Basic",
"bytes": "1101"
}
],
"symlink_target": ""
} |
import argparse
import json
import logging
import numpy as np
import os
import threading
import time
from ray._private.test_utils import monitor_memory_usage
from ray.data._internal.progress_bar import ProgressBar
from collections import namedtuple
from queue import Queue
import ray
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
PiResult = namedtuple("PiResult", ["samples", "pi"])
@ray.remote(num_cpus=0)
class PiCalculator:
def __init__(self, metadata):
# -- Read only variables --
self.metadata = metadata
self.sample_batch = 1000000
# -- Variables that are accessed by mulitple threads --
self.lock = threading.Lock()
self.result_queue = Queue()
self.is_running = False
def ready(self):
pass
def run_compute(self):
self.is_running = True
sample_cnt = 0
while self.is_running:
# Compute pi
xs = np.random.uniform(low=-1.0, high=1.0, size=self.sample_batch)
ys = np.random.uniform(low=-1.0, high=1.0, size=self.sample_batch)
xys = np.stack((xs, ys), axis=-1)
inside = xs * xs + ys * ys <= 1.0
xys_inside = xys[inside]
in_circle = xys_inside.shape[0]
approx_pi = 4.0 * in_circle / self.sample_batch
# Put the result to the queue.
sample_cnt += self.sample_batch
with self.lock:
self.result_queue.put(PiResult(samples=sample_cnt, pi=approx_pi))
def stop(self):
self.is_running = False
def get_metadata(self):
return self.metadata
def get_pi(self):
result = None
while not result:
with self.lock:
if not self.result_queue.empty():
result = self.result_queue.get(block=False)
time.sleep(1)
return result
def start_actors(total_num_actors, num_nodes):
"""Create actors and run the computation loop."""
total_num_actors = int(total_num_actors)
actors_per_node = int(total_num_actors / num_nodes)
start = time.time()
nodes = []
# Place an actor per node in round-robin.
# It is added here to simulate the real user workload.
while len(nodes) < num_nodes:
nodes = [
next((r for r in n["Resources"] if "node" in r), None)
for n in ray.nodes()
if n["Alive"]
]
nodes = [n for n in nodes if n is not None]
pi_actors = [
PiCalculator.options(resources={n: 0.01}, max_concurrency=10).remote(
{"meta": 1}
)
for n in nodes
for _ in range(actors_per_node)
]
ray.get([actor.ready.remote() for actor in pi_actors])
print(f"Took {time.time() - start} to create {total_num_actors} actors")
# Start the computation loop.
for actor in pi_actors:
actor.run_compute.remote()
return pi_actors
def parse_script_args():
parser = argparse.ArgumentParser()
parser.add_argument("--kill-interval_s", type=float, default=60)
parser.add_argument("--test-runtime", type=float, default=3000)
return parser.parse_known_args()
def main():
"""The test simulates the workload with many threaded actors.
Test is doing 4 things for 1 hour.
- It first creates actors as many as num_cpus with max_concurrency=10
- Each actor computes pi and put the result to the queue.
- Driver keeps getting result & metadata from the actor.
- Every X seconds, it kills all actors and restarts them.
"""
ray.init(address="auto")
args, unknown = parse_script_args()
num_cpus = ray.cluster_resources()["CPU"]
num_nodes = sum(1 for n in ray.nodes() if n["Alive"])
print(f"Total number of actors: {num_cpus}, nodes: {num_nodes}")
monitor_actor = monitor_memory_usage()
start = time.time()
while time.time() - start < args.test_runtime:
# Step 1: Create actors and start computation loop.
print("Create actors.")
actors = start_actors(num_cpus, num_nodes)
# Step 2: Get the pi result from actors.
compute_start = time.time()
print("Start computation.")
while time.time() - compute_start < args.kill_interval_s:
# Get the metadata.
ray.get([actor.get_metadata.remote() for actor in actors])
# Get the result.
pb = ProgressBar("Computing Pi", num_cpus)
results = [actor.get_pi.remote() for actor in actors]
pb.fetch_until_complete(results)
pb.close()
# Step 3: Kill actors.
print("Kill all actors.")
for actor in actors:
ray.kill(actor)
# Report the result.
print("PASSED.")
used_gb, usage = ray.get(monitor_actor.get_peak_memory_info.remote())
print("Memory usage with failures.")
print(f"Peak memory usage: {round(used_gb, 2)}GB")
print(f"Peak memory usage per processes:\n {usage}")
# Report the result.
ray.get(monitor_actor.stop_run.remote())
result = {"success": 0}
with open(os.environ["TEST_OUTPUT_JSON"], "w") as f:
f.write(json.dumps(result))
if __name__ == "__main__":
main()
| {
"content_hash": "d77cbd388b47b7ff10ae631e77aec2d5",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 81,
"avg_line_length": 32.04294478527607,
"alnum_prop": 0.6053991958644457,
"repo_name": "ray-project/ray",
"id": "8b0fef482cb16ff5366c37a429e079c1261019f9",
"size": "5223",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "release/nightly_tests/stress_tests/test_threaded_actors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "37490"
},
{
"name": "C++",
"bytes": "5972422"
},
{
"name": "CSS",
"bytes": "10912"
},
{
"name": "Cython",
"bytes": "227477"
},
{
"name": "Dockerfile",
"bytes": "20210"
},
{
"name": "HTML",
"bytes": "30382"
},
{
"name": "Java",
"bytes": "1160849"
},
{
"name": "JavaScript",
"bytes": "1128"
},
{
"name": "Jinja",
"bytes": "6371"
},
{
"name": "Jupyter Notebook",
"bytes": "1615"
},
{
"name": "Makefile",
"bytes": "234"
},
{
"name": "PowerShell",
"bytes": "1114"
},
{
"name": "Python",
"bytes": "19539109"
},
{
"name": "Shell",
"bytes": "134583"
},
{
"name": "Starlark",
"bytes": "334862"
},
{
"name": "TypeScript",
"bytes": "190599"
}
],
"symlink_target": ""
} |
import os
import re
import toml.decoder
from setuptools import setup, find_packages
# from packaging.version import parse as version_parse
from sys import version_info as python_version_info
from semantic_version import SimpleSpec, Version
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
PYPROJECT_TOML = toml.decoder.load(os.path.join(ROOT_DIR, 'pyproject.toml'))
POETRY_DATA = PYPROJECT_TOML['tool']['poetry']
_CARET_MATCH = re.compile(r"[\^]([0-9]+)([.].*)?$")
_TILDE_MATCH = re.compile(r"[~]([0-9]+[.])([0-9]+)([.].*)?$")
def fix_requirement(requirement):
if isinstance(requirement, str):
return _fix_requirement_string(requirement)
elif isinstance(requirement, list):
return fix_requirement(_select_requirement(requirement))
else:
raise ValueError(f"Unrecognized requirement: {requirement!r}")
def _select_requirement(requirement):
if not isinstance(requirement, list):
raise ValueError(f"{requirement!r} is not a list.")
python_version = Version(f"{python_version_info.major}.{python_version_info.minor}.{python_version_info.micro}")
for clause in requirement:
if set(clause.keys()) != {'python', 'version'}:
raise ValueError(f"Unanticipated requirement clause: {clause!r}")
if SimpleSpec(clause['python']).match(python_version):
return clause['version']
else:
pass
raise ValueError(f"No clauses matched: {requirement!r}")
def _fix_requirement_string(requirement):
m = _CARET_MATCH.match(requirement)
if m:
return ">=%s%s,<%s" % (m.group(1), m.group(2), int(m.group(1)) + 1)
m = _TILDE_MATCH.match(requirement)
if m:
return ">=%s%s%s,<%s%s" % (m.group(1), m.group(2), m.group(3), m.group(1), int(m.group(2)) + 1)
if requirement[0].isdigit():
return "==" + requirement
else:
return requirement
_EMAIL_MATCH = re.compile(r"^([^<]*)[<]([^>]*)[>]$")
def author_and_email(authorship_spec):
m = _EMAIL_MATCH.match(authorship_spec)
if m:
return m.group(1), m.group(2)
else:
raise ValueError("Expect authorship in format 'human_name <email_name@email_host>': %s" % authorship_spec)
def get_requirements(kind='dependencies'):
return [
pkg + fix_requirement(requirement)
for pkg, requirement in POETRY_DATA[kind].items()
if pkg != "python"
]
def flatten_config_data(key, dictionary):
return "%s\n%s\n\n" % (key, "\n".join([
key + " = " + val
for key, val in dictionary.items()
]))
def entry_points():
result = flatten_config_data("[console_scripts]", POETRY_DATA['scripts'])
paste_dict = PYPROJECT_TOML['paste']
for subkey in paste_dict:
result += flatten_config_data('[paste.%s]' % subkey, paste_dict[subkey])
return result
ENTRY_POINTS = entry_points()
PACKAGE_NAME = POETRY_DATA['name']
README = open(os.path.join(ROOT_DIR, 'README.rst')).read()
# CHANGES = open(os.path.join(ROOT_DIR, 'CHANGES.rst')).read())
DESCRIPTION = POETRY_DATA['description']
LONG_DESCRIPTION = README
AUTHOR, AUTHOR_EMAIL = author_and_email(POETRY_DATA['authors'][0])
URL = 'http://data.4dnucleome.org'
LICENSE = 'MIT'
INSTALL_REQUIRES = get_requirements()
TESTS_REQUIRE = get_requirements('dev-dependencies')
VERSION = POETRY_DATA['version']
if __name__ == '__main__':
setup(
name=PACKAGE_NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
license=LICENSE,
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
extras_require={'test': TESTS_REQUIRE},
entry_points=ENTRY_POINTS,
)
| {
"content_hash": "99b3f9a033d12e38155aac023dcff418",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 116,
"avg_line_length": 31.70731707317073,
"alnum_prop": 0.6382051282051282,
"repo_name": "4dn-dcic/fourfront",
"id": "ea77d0ef411c2c99a718342ad1d2fc7f93ad64c3",
"size": "3900",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup_eb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Common Workflow Language",
"bytes": "15818"
},
{
"name": "Dockerfile",
"bytes": "6312"
},
{
"name": "HTML",
"bytes": "11048"
},
{
"name": "JavaScript",
"bytes": "2106661"
},
{
"name": "Makefile",
"bytes": "9079"
},
{
"name": "PLpgSQL",
"bytes": "12067"
},
{
"name": "Python",
"bytes": "1758496"
},
{
"name": "SCSS",
"bytes": "224522"
},
{
"name": "Shell",
"bytes": "19014"
}
],
"symlink_target": ""
} |
"""Generic TFX example_validator executor."""
import os
from typing import Any, Dict, List
from absl import logging
import tensorflow_data_validation as tfdv
from tfx import types
from tfx.components.example_validator import labels
from tfx.components.statistics_gen import stats_artifact_utils
from tfx.components.util import value_utils
from tfx.dsl.components.base import base_executor
from tfx.types import artifact_utils
from tfx.types import standard_component_specs
from tfx.utils import io_utils
from tfx.utils import json_utils
# Default file name for anomalies output.
DEFAULT_FILE_NAME = 'SchemaDiff.pb'
class Executor(base_executor.BaseExecutor):
"""TensorFlow ExampleValidator component executor."""
def Do(self, input_dict: Dict[str, List[types.Artifact]],
output_dict: Dict[str, List[types.Artifact]],
exec_properties: Dict[str, Any]) -> None:
"""TensorFlow ExampleValidator executor entrypoint.
This validates statistics against the schema.
Args:
input_dict: Input dict from input key to a list of artifacts, including:
- statistics: A list of type `standard_artifacts.ExampleStatistics`
generated by StatisticsGen.
- schema: A list of type `standard_artifacts.Schema` which should
contain a single schema artifact.
output_dict: Output dict from key to a list of artifacts, including:
- output: A list of 'standard_artifacts.ExampleAnomalies' of size one.
It will include a single binary proto file which contains all
anomalies found.
exec_properties: A dict of execution properties.
- exclude_splits: JSON-serialized list of names of splits that the
example validator should not validate.
Returns:
None
"""
self._log_startup(input_dict, output_dict, exec_properties)
# Load and deserialize exclude splits from execution properties.
exclude_splits = json_utils.loads(
exec_properties.get(standard_component_specs.EXCLUDE_SPLITS_KEY,
'null')) or []
if not isinstance(exclude_splits, list):
raise ValueError('exclude_splits in execution properties needs to be a '
'list. Got %s instead.' % type(exclude_splits))
# Setup output splits.
stats_artifact = artifact_utils.get_single_instance(
input_dict[standard_component_specs.STATISTICS_KEY])
stats_split_names = artifact_utils.decode_split_names(
stats_artifact.split_names)
split_names = [
split for split in stats_split_names if split not in exclude_splits
]
anomalies_artifact = artifact_utils.get_single_instance(
output_dict[standard_component_specs.ANOMALIES_KEY])
anomalies_artifact.split_names = artifact_utils.encode_split_names(
split_names)
schema = io_utils.SchemaReader().read(
io_utils.get_only_uri_in_dir(
artifact_utils.get_single_uri(
input_dict[standard_component_specs.SCHEMA_KEY])))
for split in artifact_utils.decode_split_names(stats_artifact.split_names):
if split in exclude_splits:
continue
logging.info(
'Validating schema against the computed statistics for '
'split %s.', split)
stats = stats_artifact_utils.load_statistics(stats_artifact,
split).proto()
label_inputs = {
standard_component_specs.STATISTICS_KEY: stats,
standard_component_specs.SCHEMA_KEY: schema
}
output_uri = artifact_utils.get_split_uri(
output_dict[standard_component_specs.ANOMALIES_KEY], split)
label_outputs = {labels.SCHEMA_DIFF_PATH: output_uri}
self._Validate(label_inputs, label_outputs)
logging.info(
'Validation complete for split %s. Anomalies written to '
'%s.', split, output_uri)
def _Validate(self, inputs: Dict[str, Any], outputs: Dict[str, Any]) -> None:
"""Validate the inputs and put validate result into outputs.
This is the implementation part of example validator executor. This is
intended for using or extending the executor without artifact dependecy.
Args:
inputs: A dictionary of labeled input values, including:
- STATISTICS_KEY: the feature statistics to validate
- SCHEMA_KEY: the schema to respect
- (Optional) labels.ENVIRONMENT: if an environment is specified, only
validate the feature statistics of the fields in that environment.
Otherwise, validate all fields.
- (Optional) labels.PREV_SPAN_FEATURE_STATISTICS: the feature
statistics of a previous span.
- (Optional) labels.PREV_VERSION_FEATURE_STATISTICS: the feature
statistics of a previous version.
- (Optional) labels.FEATURES_NEEDED: the feature needed to be
validated on.
- (Optional) labels.VALIDATION_CONFIG: the configuration of this
validation.
- (Optional) labels.EXTERNAL_CONFIG_VERSION: the version number of
external config file.
outputs: A dictionary of labeled output values, including:
- labels.SCHEMA_DIFF_PATH: the path to write the schema diff to
"""
schema = value_utils.GetSoleValue(inputs,
standard_component_specs.SCHEMA_KEY)
stats = value_utils.GetSoleValue(inputs,
standard_component_specs.STATISTICS_KEY)
schema_diff_path = value_utils.GetSoleValue(
outputs, labels.SCHEMA_DIFF_PATH)
anomalies = tfdv.validate_statistics(stats, schema)
io_utils.write_bytes_file(
os.path.join(schema_diff_path, DEFAULT_FILE_NAME),
anomalies.SerializeToString())
| {
"content_hash": "d97e1ec7408de9d31d8f52288e44016f",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 79,
"avg_line_length": 43.338345864661655,
"alnum_prop": 0.6776544066620402,
"repo_name": "tensorflow/tfx",
"id": "f1db660131424d7b0a375cf064d14085d142c979",
"size": "6360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfx/components/example_validator/executor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "7405"
},
{
"name": "Jupyter Notebook",
"bytes": "38579"
},
{
"name": "Python",
"bytes": "6009050"
},
{
"name": "Shell",
"bytes": "34056"
},
{
"name": "Starlark",
"bytes": "20324"
}
],
"symlink_target": ""
} |
import os
import re
import sys
import ast
import json
import codecs
import argparse
import shutil
# The root of our pypy source checkout, if it exists.
PYPY_ROOT = os.path.join(
os.path.dirname(__file__),
"../deps/pypy",
)
# Modules that are builtin, so we shouldn't expect them in the bundle.
BUILTIN_MODULES = [
"__builtin__",
"__pypy__",
"_ast",
"_codecs",
"_collections",
"_csv",
"_file",
"_hashlib",
"_io",
"_locale",
"_md5",
"_minimal_curses",
"_multibytecodec",
"_pickle_support",
"_pypyjson",
"_random",
"_sha",
"_socket",
"_sre",
"_struct",
"_testing",
"_warnings",
"_weakref",
"array",
"binascii",
"cStringIO",
"cmath",
"errno",
"exceptions",
"gc",
"imp",
"itertools",
"js",
"marshal",
"math",
"operator",
"parser",
"posix",
"pypyjit",
"symbol",
"sys",
"time",
"token",
"unicodedata",
]
# Modules that are not going to work, so don't bother including them.
EXCLUDE_MODULES = [
"readline",
"ntpath",
"macpath",
"os2emxpath",
"ctypes",
"ctypes_support",
"ctypes_configure",
"ctypes_configure_cache",
"_ctypes",
"cffi",
"_ffi",
"_rawffi",
"subprocess",
"_subprocess",
"threading",
"thread",
"multiprocessing",
"_multiprocessing",
"audiodev",
"audioop",
"Carbon",
"MacOS",
"_osx_support"
"smtpd",
"idlelib",
"Tkinter",
"Tkconstants",
"_tkinter",
"ttk",
"__main__",
"bsddb",
"ssl",
"_ssl",
"_winreg",
"cpyext",
"symtable",
"java",
"msilib",
"dos",
"nt",
"os2",
"org.python",
"riscos",
"riscosenviron",
"vmslib",
"win32api",
"win32con",
"win32pipe",
"win32wnet",
"win32evtlog",
"msvcrt",
"hotshot",
"sunau",
"sunaudio",
"wave",
"sqlite3",
"curses",
]
# Modules that are pretty much always needed, and so should be loaded eagerly.
PRELOAD_MODULES = [
"os",
"code",
# Python has some magic to auto-load encodings when they're needed,
# which doesn't work right if they're not preloaded.
"encodings.ascii",
"encodings.hex_codec",
"encodings.base64_codec",
"encodings.latin_1",
"encodings.string_escape",
"encodings.utf_8",
"encodings.utf_16",
"encodings.unicode_internal",
"encodings.unicode_escape",
"encodings.raw_unicode_escape",
]
def main(argv):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest="subcommand")
parser_init = subparsers.add_parser("init")
parser_init.add_argument("bundle_dir")
parser_init.add_argument("--exclude", action="append",
help="exclude these modules from the bundle")
parser_init.add_argument("--include", action="append",
help="include these modules in the bundle, overrides exclude")
parser_init.add_argument("--preload", action="append",
help="preload these modules in the bundle")
parser_init.add_argument("--pypy-root", action="store",
help="root directory of pypy source checkout")
parser_add = subparsers.add_parser("add")
parser_add.add_argument("bundle_dir")
parser_add.add_argument("modules", nargs="+", metavar="module")
parser_add.add_argument("--exclude", action="append",
help="exclude these modules from the bundle")
parser_add.add_argument("--preload", action="append",
help="preload these modules in the bundle")
parser_add.add_argument("--include", action="append",
help="include these modules in the bundle, overrides exclude")
parser_preload = subparsers.add_parser("preload")
parser_preload.add_argument("bundle_dir")
parser_preload.add_argument("modules", nargs="+", metavar="module")
opts = parser.parse_args(argv[1:])
bundler = ModuleBundle(opts.bundle_dir)
if opts.subcommand == "init":
cmd_init(bundler, opts)
elif opts.subcommand == "add":
cmd_add(bundler, opts)
elif opts.subcommand == "preload":
cmd_preload(bundler, opts)
else:
assert False, "unknown subcommand {}".format(opts.subcommand)
return 0
def cmd_init(bundler, opts):
# Update the bundler's exclusion list.
if opts.exclude:
for name in opts.exclude:
if not bundler.is_excluded(name):
bundler.exclude.append(name)
if opts.include:
for name in opts.include:
if bundler.is_excluded(name):
bundler.exclude.remove(name)
# Walk the pypy stdlib dirs to find all available module files and
# copy them into the bundle.
if opts.pypy_root:
pypy_root = opts.pypy_root
else:
pypy_root = PYPY_ROOT
for modroot in ("lib-python/2.7", "lib_pypy"):
rootdir = os.path.join(pypy_root, modroot)
bundler.bundle_directory(rootdir)
# Preload the default set of preloaded modules.
for name in PRELOAD_MODULES:
bundler.preload_module(name)
# Along with any that were explicitly requested.
if opts.preload:
for name in opts.preload:
bundler.preload_module(name)
bundler.flush_index()
def cmd_add(bundler, opts):
# Update the exclude list if necessary.
if opts.exclude:
for name in opts.exclude:
if not bundler.is_excluded(name):
bundler.exclude.append(name)
if opts.include:
for name in opts.include:
if bundler.is_excluded(name):
bundler.exclude.remove(name)
# Find and bundle each module/package.
for name in opts.modules:
if os.path.exists(name):
bundler.bundle_path(name)
else:
# XXX TODO: try to find it by importing it?
raise ValueError("non-existent module: {}".format(name))
# Preload any additional modules that were specified.
if opts.preload:
for name in opts.preload:
bundler.preload_module(name)
bundler.flush_index()
def cmd_preload(bundler, opts):
for name in opts.modules:
bundler.preload_module(name)
bundler.flush_index()
class ModuleBundle(object):
"""Class managing a directory of bundled modules.
This class builds up a directory containing python module files along
with an "index.json" file giving metadata about their contents and
dependencies. Loading the index gives enough information to determine
what files should be loaded in order to handle importing of any available
module.
The structure of index.json is as follows:
{
"modules": { # maps dotted module name to metadata
"a.b": {
"file": "<a.py>" # for modules, relative path to .py file
"dir": "<A>" # for packages, relative path to package dir
"imports": [] # list of module names imported by this module
}
},
"preload": { # maps dotted module name to raw file contents
"x.y": "<code>",
}
}
There is also an ancilliary file "meta.json" which tracks information
useful when building up the bundle, not unnecessary when loading modules
from it. This helps avoid paying the overhead of loading the extra
information when using the bundle.
The structure of meta.json is as follows:
{
"exclude": [ # list of modules excluded from the bundle
"some.module"
]
"missing": { # maps dotted module names that are not found in the
"a.b.c.d": [] # bundle to the modules that would import them.
}
}
"""
def __init__(self, bundle_dir):
self.bundle_dir = os.path.abspath(bundle_dir)
self.index_file = os.path.join(self.bundle_dir, "index.json")
self.meta_file = os.path.join(self.bundle_dir, "meta.json")
self.modules = {}
self.preload = {}
self.exclude = list(EXCLUDE_MODULES)
self.missing = {}
self._modules_pending_import_analysis = []
if not os.path.isdir(self.bundle_dir):
os.makedirs(self.bundle_dir)
if not os.path.exists(self.index_file):
self.flush_index()
self.load_index()
def flush_index(self):
"""Write out the index file based on in-memory state."""
# Atomically update the index file.
with open(self.index_file + ".new", "w") as f:
json.dump({
"modules": self.modules,
"preload": self.preload,
}, f, indent=2, sort_keys=True)
if sys.platform.startswith("win32"):
shutil.copy(self.index_file + ".new", self.index_file)
os.remove(self.index_file + ".new")
else:
os.rename(self.index_file + ".new", self.index_file)
# Atomically update the meta file.
with open(self.meta_file + ".new", "w") as f:
json.dump({
"exclude": self.exclude,
"missing": self.missing,
}, f, indent=2, sort_keys=True)
if sys.platform.startswith("win32"):
shutil.copy(self.meta_file + ".new", self.meta_file)
os.remove(self.meta_file + ".new")
else:
os.rename(self.meta_file + ".new", self.meta_file)
# Remove preloaded module files from disk, now that their contents
# are safely flushed to the index file.
for name in self.preload:
moddata = self.modules[name]
if "file" in moddata:
filepath = os.path.join(self.bundle_dir, moddata["file"])
if os.path.exists(filepath):
os.unlink(filepath)
def load_index(self):
"""Load in-memory state from the index file."""
with open(self.index_file) as f:
index = json.load(f)
self.modules = index["modules"]
self.preload = index["preload"]
with open(self.meta_file) as f:
meta = json.load(f)
self.exclude = meta["exclude"]
self.missing = meta["missing"]
def is_dotted_prefix(self, prefix, name):
"""Check whether a dotted name is a prefix of another."""
if name == prefix:
return True
if name.startswith(prefix):
if name[len(prefix)] == ".":
return True
return False
def is_builtin(self, name):
"""Check whether the named module is a builtin."""
for builtin in BUILTIN_MODULES:
if self.is_dotted_prefix(builtin, name):
return True
return False
def is_excluded(self, name):
"""Check whether the named module should be excluded."""
for excl in self.exclude:
if self.is_dotted_prefix(excl, name):
return True
return False
def bundle_module(self, filepath):
"""Bundle the given file as a python module."""
filepath = os.path.abspath(filepath)
rootdir, relpath = os.path.split(filepath)
self._gather_module("", rootdir, relpath)
self._perform_pending_import_analysis()
def bundle_package(self, dirpath):
"""Bundle the given directory as a python package."""
dirpath = os.path.abspath(dirpath)
rootdir, relpath = os.path.split(dirpath)
self._gather_package("", rootdir, relpath)
self._perform_pending_import_analysis()
def bundle_directory(self, dirpath):
"""Bundle all modules/packages in the given directory."""
dirpath = os.path.abspath(dirpath)
for nm in os.listdir(dirpath):
if nm.startswith("."):
continue
itempath = os.path.join(dirpath, nm)
if os.path.isdir(itempath):
if os.path.exists(os.path.join(itempath, "__init__.py")):
self.bundle_package(itempath)
elif nm.endswith(".py"):
self.bundle_module(itempath)
def bundle_path(self, path):
"""Bundle whatever exists at the given path.
The path could specify a module, a package, or a directory of modules
and packages. Its type is intuited based on the contents of the path.
"""
if os.path.isfile(path):
self.bundle_module(path)
elif os.path.isfile(os.path.join(path, "__init__.py")):
self.bundle_package(path)
else:
self.bundle_directory(path)
def _gather_module(self, package, rootdir, relpath):
"""Gather a python module file into the bundle.
Given the name of a python module, the root import directory under
which it was found, and the relative path from that root to the
module file, this method copies the file into the bundle and adds it
to the list of all available modules.
"""
modname = os.path.basename(relpath)[:-3]
if package:
modname = package + "." + modname
if not self.is_excluded(modname):
# Add it to the list of available modules.
moddata = {"file": relpath.replace("\\", "/")}
self.modules[modname] = moddata
# Copy its source file across.
self._copy_py_file(os.path.join(rootdir, relpath),
os.path.join(self.bundle_dir, relpath))
# We'll need to analyse its imports once all siblings are gathered.
self._modules_pending_import_analysis.append(modname)
def _gather_package(self, package, rootdir, relpath):
"""Recursively gather a python package directory into the bundle.
Given the name of the python package, the root import directory under
which it was found, and the relative path from that root to the
package directory, this method copies the package and all its contents
into the bundle and adds them to the list of available modules.
"""
abspath = os.path.join(rootdir, relpath)
subpackage = os.path.basename(abspath)
if package:
subpackage = package + "." + subpackage
if not self.is_excluded(subpackage):
# Note it as an available package.
self.modules[subpackage] = {"dir": relpath.replace("\\", "/")}
if not os.path.isdir(os.path.join(self.bundle_dir, relpath)):
os.makedirs(os.path.join(self.bundle_dir, relpath))
# Include it in post-gathering analysis.
self._modules_pending_import_analysis.append(subpackage)
# Recursively gather all its contents.
for nm in os.listdir(abspath):
if nm.startswith("."):
continue
subrelpath = os.path.join(relpath, nm)
subabspath = os.path.join(abspath, nm)
if os.path.isdir(subabspath):
if os.path.exists(os.path.join(subabspath, "__init__.py")):
self._gather_package(subpackage, rootdir, subrelpath)
elif nm.endswith(".py"):
self._gather_module(subpackage, rootdir, subrelpath)
def _copy_py_file(self, srcpath, dstpath):
"""Copy a python source file into the bundle.
This method copes the contents of a python source file into the bundle.
Since browsers usually expect strings in utf-8 format, it will try to
detect source files in other encodings and transparently convert them
to utf-8.
"""
# XXX TODO: copy in chunks, like shutil would do?
with open(srcpath, "rb") as f_src:
data = f_src.read()
# Look for the encoding marker in the first two lines of the file.
lines = data.split("\n", 2)
encoding = None
for i in xrange(2):
if i >= len(lines):
break
if lines[i].startswith("#"):
match = re.search(r"coding[:=]\s*([-\w.]+)", lines[i])
if match is not None:
encoding = match.group(1)
try:
codecs.lookup(encoding)
except LookupError:
encoding = None
break
# Write normalized data to output file.
with open(dstpath, "wb") as f_dst:
if encoding is None:
f_dst.write(data)
else:
for j in xrange(i):
f_dst.write(lines[j])
f_dst.write("\n")
f_dst.write(lines[i].replace(encoding, "utf-8"))
f_dst.write("\n")
for j in xrange(i + 1, len(lines)):
f_dst.write(lines[j].decode(encoding).encode("utf8"))
if j < len(lines) - 1:
f_dst.write("\n")
def _perform_pending_import_analysis(self):
"""Perform import analysis on any pending modules.
To make it easier to resolve intra-package relative imports, we
delay doing any import analsyis until all the contents of a package
have been gathered into the bundle. This method is called after
the gathering in order to perform the pending analyses.
"""
while self._modules_pending_import_analysis:
modname = self._modules_pending_import_analysis.pop()
# Check if this new module resolves previously-missing imports.
# XXX TODO: this is pretty ugly and inefficient...
for depname in self.missing.keys():
if self.is_dotted_prefix(modname, depname):
revdeps = self.missing.pop(depname)
for revdepname in revdeps:
revdepdata = self.modules[revdepname]
revdepdata["imports"].remove(depname)
if modname not in revdepdata["imports"]:
revdepdata["imports"].append(modname)
# Find all the names that it imports.
moddata = self.modules[modname]
if "file" not in moddata:
continue
modpath = os.path.join(self.bundle_dir, moddata["file"])
impf = ImportFinder(modname, modpath, self.modules)
moddata["imports"] = impf.find_imported_modules()
# Check for any imports that are missing from the bundle.
for depname in moddata["imports"]:
if depname not in self.modules:
if not self.is_excluded(depname):
if not self.is_builtin(depname):
if depname not in self.missing:
self.missing[depname] = []
self.missing[depname].append(modname)
def preload_module(self, name):
"""Preload a module's file data into the index itself.
This is a little trick to speed up loading of commonly-used modules.
Rather than having the module's file data as a separate file on disk,
we store it as a string directly in the index file, and avoid doing
a separate network access to load it at VM startup time.
"""
for depname in self._find_transitive_dependencies(name):
if depname in self.preload:
continue
moddata = self.modules[depname]
if "file" in moddata:
filepath = os.path.join(self.bundle_dir, moddata["file"])
with open(filepath, "r") as f:
self.preload[depname] = f.read()
def _find_transitive_dependencies(self, name, seen=None):
"""Transitively find all dependencies of a module."""
if seen is None:
seen = set((name,))
moddata = self.modules.get(name)
if moddata is not None:
deps = set()
imports = moddata.get("imports")
if imports is not None:
deps.update(imports)
if "dir" in moddata:
deps.add(name + ".__init__")
if "." in name:
deps.add(name.rsplit(".", 1)[0])
seen.add(name)
for dep in deps:
if dep not in seen:
self._find_transitive_dependencies(dep, seen)
return seen
class ImportFinder(ast.NodeVisitor):
"""An AST NodeVisitor for finding all names imported in a python file."""
def __init__(self, module, filepath, known_modules):
super(ImportFinder, self).__init__()
self.module = module
if "." in module:
self.package = module.rsplit(".", 1)[0]
else:
self.package = ""
self.filepath = filepath
self.known_modules = known_modules
self.imported_names = set()
self.uses_absolute_import = False
def find_imported_modules(self):
with open(self.filepath, "r") as f:
code = f.read()
try:
n = ast.parse(code)
except SyntaxError:
return []
self.visit(n)
return sorted(list(self.imported_names))
def visit_Import(self, node):
for alias in node.names:
self.record_imported_name(alias.name)
def visit_ImportFrom(self, node):
if node.module == "__future__":
for alias in node.names:
if alias.name == "absolute_import":
self.uses_absolute_import = True
prefix = "." * node.level
if node.module is not None:
prefix += node.module + "."
for alias in node.names:
self.record_imported_name(prefix + alias.name)
def record_imported_name(self, name):
# Dereference explicit relative imports indicated by leading dots.
if name[0] == ".":
name = name[1:]
pkgbits = self.package.split(".")
while name[0] == ".":
name = name[1:]
pkgbits = pkgbits[:-1]
name = ".".join(pkgbits) + "." + name
# Resolve implicit relative imports within the containing package.
# This depends on self.known_modules having all sibling modules.
elif not self.uses_absolute_import and self.package:
pkgname = self.package
relname = name.rsplit(".", 1)[0]
while True:
absname = pkgname + "." + relname
if absname in self.known_modules:
name = pkgname + "." + name
break
if "." not in pkgname:
break
pkgname = pkgname.rsplit(".", 1)[0]
# Strip trailing components to try to find a known module name.
orig_name = name
while name not in self.known_modules and "." in name:
name = name.rsplit(".", 1)[0]
if name in self.known_modules:
self.imported_names.add(name)
else:
self.imported_names.add(orig_name)
if __name__ == "__main__":
res = main(sys.argv)
sys.exit(res)
| {
"content_hash": "4a92ed0b320f0bc21bc4c939e6000ce4",
"timestamp": "",
"source": "github",
"line_count": 651,
"max_line_length": 90,
"avg_line_length": 35.68817204301075,
"alnum_prop": 0.5659191667025352,
"repo_name": "albertjan/pypyjs",
"id": "90f50d1ac1672f0d1b65949c0acbe62555ea627b",
"size": "24104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/module_bundler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4486"
},
{
"name": "HTML",
"bytes": "5137"
},
{
"name": "JavaScript",
"bytes": "40140"
},
{
"name": "Makefile",
"bytes": "5929"
},
{
"name": "Python",
"bytes": "14932329"
}
],
"symlink_target": ""
} |
import sys
import time
class display:
def __init__(self,width,height):
# store pixels in rows (screen has height many lists that are width long)
self.screen = []
self.width = width
self.height = height
for i in range(0,height):
self.screen.append([])
for j in range(0,width):
self.screen[i].append(False)
def fillRect(self, width, height):
for i in range(0,height):
for j in range(0,width):
self.screen[i][j] = True
def at(self, row, column):
row = row % self.height
column = column % self.width
return self.screen[row][column]
def shiftRow(self, row, amount):
shift = list(self.screen[row])
for i in range(0, self.width):
shift[i] = self.at(row, i - amount)
self.screen[row] = shift
def shiftColumn(self, column, amount):
shift = []
for i in range(0, self.height):
shift.append(self.screen[i][column])
for i in range(0, self.height):
self.screen[i][column] = shift[(i - amount) % self.height]
def render(self):
for row in self.screen:
output = ""
for pix in row:
if pix:
output += "#"
else:
output += " "
print output
def countLights(self):
count = 0
for row in self.screen:
for pix in row:
if pix:
count += 1
return count
def clear(self):
for row in self.screen:
sys.stdout.write("\033[F") #back to previous line
sys.stdout.write("\033[K") #clear line
d = display(50,6)
d.render()
with open("input.txt", "r") as f:
for line in f:
cmd, args = line.split(" ", 1)
if cmd == "rect":
width, height = args.split("x")
d.fillRect(int(width),int(height))
elif cmd == "rotate":
axis, index, by, amount = args.split(" ")
index = int(index.split("=")[1])
amount = int(amount)
if axis == "row":
d.shiftRow(index,amount)
elif axis == "column":
d.shiftColumn(index, amount)
d.clear()
d.render()
time.sleep(.14)
print d.countLights()
| {
"content_hash": "cadcde41daedb552010845fc15a7d2ef",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 75,
"avg_line_length": 23.80722891566265,
"alnum_prop": 0.5981781376518218,
"repo_name": "tbjoern/adventofcode",
"id": "27f659ca1e45461f30c69af59cc45d3eff3b493b",
"size": "1976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Eight/script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "50139"
}
],
"symlink_target": ""
} |
import socket, time
# Python 2.3 does not have 'set' in normal namespace.
# But it can be imported from 'sets'
try:
set()
except NameError:
from sets import Set as set
"""MK Livestatus Python API
This module allows easy access to Nagios via MK Livestatus.
It supports persistent connections via the connection class.
If you want single-shot connections, just initialize a
connection object on-the-fly, e.g.:
r = connection("/var/lib/nagios/rw/live").query_table_assoc("GET hosts")
For persistent connections create and keep an object:
conn = connection("/var/lib/nagios/rw/live")
r1 = conn.query_table_assoc("GET hosts")
r2 = conn.query_row("GET status")
"""
# Keep a global array of persistant connections
persistent_connections = {}
# DEBUGGING PERSISTENT CONNECTIONS
# import os
# hirn_debug = file("/tmp/live.log", "a")
# def hirn(x):
# pid = os.getpid()
# hirn_debug.write("[\033[1;3%d;4%dm%d\033[0m] %s\n" % (pid%7+1, (pid/7)%7+1, pid, x))
# hirn_debug.flush()
class MKLivestatusException(Exception):
def __init__(self, value):
self.parameter = value
def __str__(self):
return str(self.parameter)
class MKLivestatusSocketError(MKLivestatusException):
def __init__(self, reason):
MKLivestatusException.__init__(self, reason)
class MKLivestatusSocketClosed(MKLivestatusSocketError):
def __init__(self, reason):
MKLivestatusSocketError.__init__(self, reason)
class MKLivestatusConfigError(MKLivestatusException):
def __init__(self, reason):
MKLivestatusException.__init__(self, reason)
class MKLivestatusQueryError(MKLivestatusException):
def __init__(self, code, reason):
MKLivestatusException.__init__(self, "%s: %s" % (code, reason))
self.code = code
class MKLivestatusNotFoundError(MKLivestatusException):
def __init__(self, query):
MKLivestatusException.__init__(self, query)
self.query = query
# We need some unique value here
NO_DEFAULT = lambda: None
class Helpers:
def query_value(self, query, deflt = NO_DEFAULT):
"""Issues a query that returns exactly one line and one columns and returns
the response as a single value"""
result = self.query(query, "ColumnHeaders: off\n")
try:
return result[0][0]
except:
if deflt == NO_DEFAULT:
raise MKLivestatusNotFoundError(query)
else:
return deflt
def query_row(self, query):
"""Issues a query that returns one line of data and returns the elements
of that line as list"""
return self.query(query, "ColumnHeaders: off\n")[0]
def query_row_assoc(self, query):
"""Issues a query that returns one line of data and returns the elements
of that line as a dictionary from column names to values"""
r = self.query(query, "ColumnHeaders: on\n")[0:2]
return dict(zip(r[0], r[1]))
def query_column(self, query):
"""Issues a query that returns exactly one column and returns the values
of all lines in that column as a single list"""
return [ l[0] for l in self.query(query, "ColumnHeaders: off\n") ]
def query_column_unique(self, query):
"""Issues a query that returns exactly one column and returns the values
of all lines with duplicates removed"""
result = []
for line in self.query(query, "ColumnHeaders: off\n"):
if line[0] not in result:
result.append(line[0])
return result
def query_table(self, query):
"""Issues a query that may return multiple lines and columns and returns
a list of lists"""
return self.query(query, "ColumnHeaders: off\n")
def query_table_assoc(self, query):
"""Issues a query that may return multiple lines and columns and returns
a dictionary from column names to values for each line. This can be
very ineffective for large response sets."""
response = self.query(query, "ColumnHeaders: on\n")
headers = response[0]
result = []
for line in response[1:]:
result.append(dict(zip(headers, line)))
return result
def query_summed_stats(self, query, add_headers = ""):
"""Conveniance function for adding up numbers from Stats queries
Adds up results column-wise. This is useful for multisite queries."""
data = self.query(query, add_headers)
if len(data) == 1:
return data[0]
elif len(data) == 0:
raise MKLivestatusNotFoundError("Empty result to Stats-Query")
result = []
for x in range(0, len(data[0])):
result.append(sum([row[x] for row in data]))
return result
class BaseConnection:
def __init__(self, socketurl, persist = False):
"""Create a new connection to a MK Livestatus socket"""
self.add_headers = ""
self.persist = persist
self.socketurl = socketurl
self.socket = None
self.timeout = None
self.successful_persistence = False
def successfully_persisted(self):
return self.successful_persistence
def add_header(self, header):
self.add_headers += header + "\n"
def set_timeout(self, timeout):
self.timeout = timeout
if self.socket:
self.socket.settimeout(float(timeout))
def connect(self):
if self.persist and self.socketurl in persistent_connections:
self.socket = persistent_connections[self.socketurl]
self.successful_persistence = True
return
self.successful_persistence = False
# Create new socket
self.socket = None
url = self.socketurl
parts = url.split(":")
if parts[0] == "unix":
if len(parts) != 2:
raise MKLivestatusConfigError("Invalid livestatus unix url: %s. "
"Correct example is 'unix:/var/run/nagios/rw/live'" % url)
self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
target = parts[1]
elif parts[0] == "tcp":
try:
host = parts[1]
port = int(parts[2])
except:
raise MKLivestatusConfigError("Invalid livestatus tcp url '%s'. "
"Correct example is 'tcp:somehost:6557'" % url)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
target = (host, port)
else:
raise MKLivestatusConfigError("Invalid livestatus url '%s'. "
"Must begin with 'tcp:' or 'unix:'" % url)
try:
if self.timeout:
self.socket.settimeout(float(self.timeout))
self.socket.connect(target)
except Exception, e:
self.socket = None
raise MKLivestatusSocketError("Cannot connect to '%s': %s" % (self.socketurl, e))
if self.persist:
persistent_connections[self.socketurl] = self.socket
def disconnect(self):
self.socket = None
if self.persist:
del persistent_connections[self.socketurl]
def receive_data(self, size):
result = ""
while size > 0:
packet = self.socket.recv(size)
if len(packet) == 0:
raise MKLivestatusSocketClosed("Read zero data from socket, nagios server closed connection")
size -= len(packet)
result += packet
return result
def do_query(self, query, add_headers = ""):
self.send_query(query, add_headers)
return self.recv_response(query, add_headers)
def send_query(self, query, add_headers = ""):
if self.socket == None:
self.connect()
if not query.endswith("\n"):
query += "\n"
query += self.auth_header + self.add_headers
query += "Localtime: %d\nOutputFormat: python\nKeepAlive: on\nResponseHeader: fixed16\n" % int(time.time())
query += add_headers
if not query.endswith("\n"):
query += "\n"
query += "\n"
try:
self.socket.send(query)
except IOError, e:
if self.persist:
del persistent_connections[self.socketurl]
self.successful_persistence = False
self.socket = None
raise MKLivestatusSocketError(str(e))
# Reads a response from the livestatus socket. If the socket is closed
# by the livestatus server, we automatically make a reconnect and send
# the query again (once). This is due to timeouts during keepalive.
def recv_response(self, query = None, add_headers = ""):
try:
resp = self.receive_data(16)
code = resp[0:3]
try:
length = int(resp[4:15].lstrip())
except:
raise MKLivestatusSocketError("Malformed output. Livestatus TCP socket might be unreachable.")
data = self.receive_data(length)
if code == "200":
try:
return eval(data)
except:
raise MKLivestatusSocketError("Malformed output")
else:
raise MKLivestatusQueryError(code, data.strip())
except MKLivestatusSocketClosed:
self.disconnect()
if query:
self.connect()
self.send_query(query, add_headers)
return self.recv_response() # do not send query again -> danger of infinite loop
else:
raise
except IOError, e:
self.socket = None
if self.persist:
del persistent_connections[self.socketurl]
raise MKLivestatusSocketError(str(e))
def do_command(self, command):
if self.socket == None:
self.connect()
if not command.endswith("\n"):
command += "\n"
try:
self.socket.send("COMMAND " + command + "\n")
except IOError, e:
self.socket = None
if self.persist:
del persistent_connections[self.socketurl]
raise MKLivestatusSocketError(str(e))
class SingleSiteConnection(BaseConnection, Helpers):
def __init__(self, socketurl, persist = False):
BaseConnection.__init__(self, socketurl, persist)
self.prepend_site = False
self.auth_users = {}
self.deadsites = {} # never filled, just for compatibility
self.auth_header = ""
self.limit = None
def set_prepend_site(self, p):
self.prepend_site = p
def set_only_sites(self, os = None):
pass
def set_limit(self, limit = None):
self.limit = limit
def query(self, query, add_headers = ""):
if self.limit != None:
query += "Limit: %d\n" % self.limit
data = self.do_query(query, add_headers)
if self.prepend_site:
return [ [''] + line for line in data ]
else:
return data
def command(self, command, site = None):
self.do_command(command)
# Set user to be used in certain authorization domain
def set_auth_user(self, domain, user):
if user:
self.auth_users[domain] = user
else:
del self.auth_users[domain]
# Switch future request to new authorization domain
def set_auth_domain(self, domain):
auth_user = self.auth_users.get(domain)
if auth_user:
self.auth_header = "AuthUser: %s\n" % auth_user
else:
self.auth_header = ""
# sites is a dictionary from site name to a dict.
# Keys in the dictionary:
# socket: socketurl (obligatory)
# timeout: timeout for tcp/unix in seconds
class MultiSiteConnection(Helpers):
def __init__(self, sites, disabled_sites = []):
self.sites = sites
self.connections = []
self.deadsites = {}
self.prepend_site = False
self.only_sites = None
self.limit = None
self.parallelize = True
# Helper function for connecting to a site
def connect_to_site(sitename, site, temporary=False):
try:
url = site["socket"]
persist = not temporary and site.get("persist", False)
connection = SingleSiteConnection(url, persist)
if "timeout" in site:
connection.set_timeout(int(site["timeout"]))
connection.connect()
self.connections.append((sitename, site, connection))
except Exception, e:
self.deadsites[sitename] = {
"exception" : e,
"site" : site,
}
# Needed for temporary connection for status_hosts in disabled sites
def disconnect_site(sitename):
i = 0
for name, site, connection in self.connections:
if name == sitename:
del self.connections[i]
return
i += 1
# Status host: A status host helps to prevent trying to connect
# to a remote site which is unreachable. This is done by looking
# at the current state of a certain host on a local site that is
# representing the connection to the remote site. The status host
# is specified as an optional pair of (site, host) in the entry
# "status_host". We first connect to all sites without a status_host
# entry, then retrieve the host states of the status hosts and then
# connect to the remote site which are reachable
# Tackle very special problem: If the user disables a site which
# provides status_host information for other sites, the dead-detection
# would not work. For that cases we make a temporary connection just
# to fetch the status information
extra_status_sites = {}
if len(disabled_sites) > 0:
status_sitenames = set([])
for sitename, site in sites.items():
try:
s, h = site.get("status_host")
status_sitenames.add(s)
except:
continue
for sitename in status_sitenames:
site = disabled_sites.get(sitename)
if site:
extra_status_sites[sitename] = site
# First connect to sites without status host. Collect status
# hosts at the same time.
status_hosts = {} # dict from site to list of status_hosts
for sitename, site in sites.items() + extra_status_sites.items():
status_host = site.get("status_host")
if status_host:
if type(status_host) != tuple or len(status_host) != 2:
raise MKLivestatusConfigError("Status host of site %s is %r, but must be pair of site and host" %
(sitename, status_host))
s, h = status_host
status_hosts[s] = status_hosts.get(s, []) + [h]
else:
connect_to_site(sitename, site)
# Now learn current states of status hosts and store it in a dictionary
# from (local_site, host) => state
status_host_states = {}
for sitename, hosts in status_hosts.items():
# Fetch all the states of status hosts of this local site in one query
query = "GET hosts\nColumns: name state has_been_checked last_time_up\n"
for host in hosts:
query += "Filter: name = %s\n" % host
query += "Or: %d\n" % len(hosts)
self.set_only_sites([sitename]) # only connect one site
try:
result = self.query_table(query)
# raise MKLivestatusConfigError("TRESulT: %s" % (result,))
for host, state, has_been_checked, lastup in result:
if has_been_checked == 0:
state = 3
status_host_states[(sitename, host)] = (state, lastup)
except Exception, e:
raise MKLivestatusConfigError(e)
status_host_states[(sitename, host)] = (str(e), None)
self.set_only_sites() # clear site filter
# Disconnect from disabled sites that we connected to only to
# get status information from
for sitename, site in extra_status_sites.items():
disconnect_site(sitename)
# Now loop over all sites having a status_host and take that state
# of that into consideration
for sitename, site in sites.items():
status_host = site.get("status_host")
if status_host:
now = time.time()
shs, lastup = status_host_states.get(status_host, (4, now)) # None => Status host not existing
deltatime = now - lastup
if shs == 0 or shs == None:
connect_to_site(sitename, site)
else:
if shs == 1:
ex = "The remote monitoring host is down"
elif shs == 2:
ex = "The remote monitoring host is unreachable"
elif shs == 3:
ex = "The remote monitoring host's state it not yet determined"
elif shs == 4:
ex = "Invalid status host: site %s has no host %s" % (status_host[0], status_host[1])
else:
ex = "Error determining state of remote monitoring host: %s" % shs
self.deadsites[sitename] = {
"site" : site,
"status_host_state" : shs,
"exception" : ex,
}
def add_header(self, header):
for sitename, site, connection in self.connections:
connection.add_header(header)
def set_prepend_site(self, p):
self.prepend_site = p
def set_only_sites(self, os = None):
self.only_sites = os
# Impose Limit on number of returned datasets (distributed amoung sites)
def set_limit(self, limit = None):
self.limit = limit
def dead_sites(self):
return self.deadsites
def alive_sites(self):
return self.connections.keys()
def successfully_persisted(self):
for sitename, site, connection in self.connections:
if connection.successfully_persisted():
return True
return False
def set_auth_user(self, domain, user):
for sitename, site, connection in self.connections:
connection.set_auth_user(domain, user)
def set_auth_domain(self, domain):
for sitename, site, connection in self.connections:
connection.set_auth_domain(domain)
def query(self, query, add_headers = ""):
if self.parallelize:
return self.query_parallel(query, add_headers)
else:
return self.query_non_parallel(query, add_headers)
def query_non_parallel(self, query, add_headers = ""):
result = []
stillalive = []
limit = self.limit
for sitename, site, connection in self.connections:
if self.only_sites != None and sitename not in self.only_sites:
stillalive.append( (sitename, site, connection) ) # state unknown, assume still alive
continue
try:
if limit != None:
limit_header = "Limit: %d\n" % limit
else:
limit_header = ""
r = connection.query(query, add_headers + limit_header)
if self.prepend_site:
r = [ [sitename] + l for l in r ]
if limit != None:
limit -= len(r) # Account for portion of limit used by this site
result += r
stillalive.append( (sitename, site, connection) )
except Exception, e:
self.deadsites[sitename] = {
"exception" : e,
"site" : site,
}
self.connections = stillalive
return result
# New parallelized version of query(). The semantics differs in the handling
# of Limit: since all sites are queried in parallel, the Limit: is simply
# applied to all sites - resulting in possibly more results then Limit requests.
def query_parallel(self, query, add_headers = ""):
if self.only_sites != None:
active_sites = [ c for c in self.connections if c[0] in self.only_sites ]
else:
active_sites = self.connections
start_time = time.time()
stillalive = []
limit = self.limit
if limit != None:
limit_header = "Limit: %d\n" % limit
else:
limit_header = ""
# First send all queries
for sitename, site, connection in active_sites:
try:
connection.send_query(query, add_headers + limit_header)
except Exception, e:
self.deadsites[sitename] = {
"exception" : e,
"site" : site,
}
# Then retrieve all answers. We will be as slow as the slowest of all
# connections.
result = []
for sitename, site, connection in self.connections:
if self.only_sites != None and sitename not in self.only_sites:
stillalive.append( (sitename, site, connection) ) # state unknown, assume still alive
continue
try:
r = connection.recv_response(query, add_headers + limit_header)
stillalive.append( (sitename, site, connection) )
if self.prepend_site:
r = [ [sitename] + l for l in r ]
result += r
except Exception, e:
self.deadsites[sitename] = {
"exception" : e,
"site" : site,
}
self.connections = stillalive
return result
def command(self, command, sitename = "local"):
if sitename in self.deadsites:
raise MKLivestatusSocketError("Connection to site %s is dead: %s" % \
(sitename, self.deadsites[sitename]["exception"]))
conn = [t[2] for t in self.connections if t[0] == sitename]
if len(conn) == 0:
raise MKLivestatusConfigError("Cannot send command to unconfigured site '%s'" % sitename)
conn[0].do_command(command)
# Return connection to localhost (UNIX), if available
def local_connection(self):
for sitename, site, connection in self.connections:
if site["socket"].startswith("unix:"):
return connection
raise MKLivestatusConfigError("No livestatus connection to local host")
# Examle for forcing local connection:
# live.local_connection().query_single_value(...)
| {
"content_hash": "86f1dd87a2930dec87bdd3972a00bb26",
"timestamp": "",
"source": "github",
"line_count": 610,
"max_line_length": 117,
"avg_line_length": 37.967213114754095,
"alnum_prop": 0.5701640759930915,
"repo_name": "daniellawrence/simplenagios",
"id": "9e130d1ab58cd49dbfeecf5875527af2535f4767",
"size": "24636",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "contrib/livestatus.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4392"
},
{
"name": "Python",
"bytes": "61212"
},
{
"name": "Shell",
"bytes": "337"
}
],
"symlink_target": ""
} |
import os
import unittest
from GoogleScraper import scrape_with_config
from GoogleScraper.parsing import get_parser_by_search_engine
from GoogleScraper.config import get_config
from collections import Counter
config = get_config()
base = os.path.dirname(os.path.realpath(__file__))
all_search_engines = config.get('supported_search_engines')
class GoogleScraperIntegrationTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
### Test (very) static parsing for all search engines. The html files are saved in 'data/uncompressed_serp_pages/'
# The sample files may become old and the SERP format may change over time. But this is the only
# way to assert that a certain url or piece must be in the results.
# If the SERP format changes, update accordingly (after all, this shouldn't happen that often).
def get_parser_for_file(self, se, file, **kwargs):
file = os.path.join(base, file)
with open(file, 'r') as f:
html = f.read()
parser = get_parser_by_search_engine(se)
parser = parser(config, html, **kwargs)
return parser
def assert_around_10_results_with_snippets(self, parser, delta=4):
self.assertAlmostEqual(
len([v['snippet'] for v in parser.search_results['results'] if v['snippet'] is not None]), 10, delta=delta)
def assert_atleast90percent_of_items_are_not_None(self, parser, exclude_keys={'snippet'}):
for result_type, res in parser.search_results.items():
c = Counter()
for item in res:
for key, value in item.items():
if value is None:
c[key] += 1
for key, value in c.items():
if key not in exclude_keys:
assert (len(res) / int(value)) >= 9, key + ' has too many times a None value: ' + '{}/{}'.format(
int(value), len(res))
def test_parse_google(self):
parser = self.get_parser_for_file('google', 'data/uncompressed_serp_pages/abrakadabra_google_de_ip.html')
assert '232.000.000 Ergebnisse' in parser.num_results_for_query
assert len(parser.search_results['results']) == 12, len(parser.search_results)
assert all([v['visible_link'] for v in parser.search_results['results']])
assert all([v['link'] for v in parser.search_results['results']])
self.assert_around_10_results_with_snippets(parser)
assert any(['www.extremnews.com' in v['visible_link'] for v in parser.search_results[
'results']]), 'Theres a link in this serp page with visible url "www.extremnews.com"'
assert any(
['er Noise-Rock-Band Sonic Youth und wurde' in v['snippet'] for v in parser.search_results['results'] if
v['snippet']]), 'Specific string not found in snippet.'
self.assert_atleast90percent_of_items_are_not_None(parser)
def test_parse_bing(self):
parser = self.get_parser_for_file('bing', 'data/uncompressed_serp_pages/hello_bing_de_ip.html')
assert '16.900.000 results' == parser.num_results_for_query
assert len(parser.search_results['results']) == 12, len(parser.search_results['results'])
assert all([v['visible_link'] for v in parser.search_results['results']])
assert all([v['link'] for v in parser.search_results['results']])
self.assert_around_10_results_with_snippets(parser)
assert any(['Hello Kitty Online Shop - Hello' in v['title'] for v in
parser.search_results['results']]), 'Specific title not found in snippet.'
self.assert_atleast90percent_of_items_are_not_None(parser)
def test_parse_yahoo(self):
parser = self.get_parser_for_file('yahoo', 'data/uncompressed_serp_pages/snow_yahoo_de_ip.html')
assert '19,400,000 Ergebnisse' == parser.num_results_for_query
assert len(parser.search_results['results']) >= 10, len(parser.search_results['results'])
assert len([v['visible_link'] for v in parser.search_results['results'] if
v['visible_link']]) == 10, 'Not 10 elements with a visible link in yahoo serp page'
assert all([v['link'] for v in parser.search_results['results']])
self.assert_around_10_results_with_snippets(parser)
assert any(
[' crystalline water ice that falls from clouds. Since snow is composed of small ic' in v['snippet'] for v
in parser.search_results['results'] if v['snippet']]), 'Specific string not found in snippet.'
self.assert_atleast90percent_of_items_are_not_None(parser)
def test_parse_yandex(self):
parser = self.get_parser_for_file('yandex', 'data/uncompressed_serp_pages/game_yandex_de_ip.html')
assert '2 029 580' in parser.num_results_for_query
assert len(parser.search_results['results']) == 10, len(parser.search_results['results'])
assert len([v['visible_link'] for v in parser.search_results['results'] if
v['visible_link']]) == 10, 'Not 10 elements with a visible link in yandex serp page'
assert all([v['link'] for v in parser.search_results['results']])
self.assert_around_10_results_with_snippets(parser)
assert any(['n play games to compile games statist' in v['snippet'] for v in parser.search_results['results'] if
v['snippet']]), 'Specific string not found in snippet.'
self.assert_atleast90percent_of_items_are_not_None(parser)
def test_parse_baidu(self):
parser = self.get_parser_for_file('baidu', 'data/uncompressed_serp_pages/number_baidu_de_ip.html')
assert '100,000,000' in parser.num_results_for_query
assert len(parser.search_results['results']) >= 6, len(parser.search_results['results'])
assert all([v['link'] for v in parser.search_results['results']])
self.assert_around_10_results_with_snippets(parser, delta=5)
self.assert_atleast90percent_of_items_are_not_None(parser)
def test_parse_duckduckgo(self):
parser = self.get_parser_for_file('duckduckgo', 'data/uncompressed_serp_pages/mountain_duckduckgo_de_ip.html')
# duckduckgo is a biatch
def test_parse_ask(self):
parser = self.get_parser_for_file('ask', 'data/uncompressed_serp_pages/fellow_ask_de_ip.html')
assert len(parser.search_results['results']) >= 10, len(parser.search_results['results'])
assert len([v['visible_link'] for v in parser.search_results['results'] if
v['visible_link']]) == 10, 'Not 10 elements with a visible link in ask serp page'
assert all([v['link'] for v in parser.search_results['results']])
self.assert_around_10_results_with_snippets(parser)
self.assert_atleast90percent_of_items_are_not_None(parser)
### test csv output
def test_csv_output_static(self):
"""Test csv output.
Test parsing 4 html pages with two queries and two pages per query and
transforming the results to csv format.
The cached file should be saved in 'data/csv_tests/', there should
be as many files as search_engine * pages_for_keyword
The keyword used in the static SERP pages MUST be 'some words'
The filenames must be in the GoogleScraper cache format.
"""
import csv
from GoogleScraper.output_converter import csv_fieldnames
number_search_engines = len(all_search_engines)
csv_outfile = os.path.join(base, 'data/tmp/csv_test.csv')
config = {
'keyword': 'some words',
'search_engines': all_search_engines,
'num_pages_for_keyword': 2,
'scrape_method': 'selenium',
'cachedir': os.path.join(base, 'data/csv_tests/'),
'do_caching': True,
'verbosity': 0,
'output_filename': csv_outfile,
}
search = scrape_with_config(config)
assert os.path.exists(csv_outfile), '{} does not exist'.format(csv_outfile)
reader = csv.reader(open(csv_outfile, 'rt'))
# the items that should always have a value:
notnull = (
'link', 'query', 'rank', 'domain', 'title', 'link_type', 'scrape_method', 'page_number',
'search_engine_name',
'snippet')
for rownum, row in enumerate(reader):
if rownum == 0:
header = row
header_keys = set(row)
assert header_keys.issubset(set(csv_fieldnames)), 'Invalid CSV header: {}'.format(header)
for item in notnull:
assert row[header.index(item)], '{} has a item that has no value: {}'.format(item, row)
self.assertAlmostEqual(number_search_engines * 2 * 10, rownum, delta=30)
### test json output
def test_json_output_static(self):
"""Test json output.
"""
import json
number_search_engines = len(all_search_engines)
json_outfile = os.path.join(base, 'data/tmp/json_test.json')
config = {
'keyword': 'some words',
'search_engines': all_search_engines,
'num_pages_for_keyword': 2,
'scrape_method': 'selenium',
'cachedir': os.path.join(base, 'data/json_tests/'),
'do_caching': True,
'verbosity': 0,
'output_filename': json_outfile
}
search = scrape_with_config(config)
assert os.path.exists(json_outfile), '{} does not exist'.format(json_outfile)
file = open(json_outfile, 'r')
try:
results = json.load(file)
except ValueError as e:
print('Cannot parse output json file {}. Reason: {}'.format(json_outfile, e))
raise e
# the items that should always have a value:
notnull = ('link', 'rank', 'domain', 'title', 'link_type')
num_results = 0
for item in results:
for k, v in item.items():
if k == 'results':
for res in v:
num_results += 1
for item in notnull:
assert res[item], '{} has a item that has no value: {}'.format(item, res)
self.assertAlmostEqual(number_search_engines * 2 * 10, num_results, delta=30)
### test correct handling of SERP page that has no results for search query.
def test_no_results_for_query_google(self):
parser = self.get_parser_for_file('google', 'data/uncompressed_no_results_serp_pages/google.html')
assert parser.effective_query == '"be dealt and be evaluated"', 'No effective query.'
def test_no_results_for_query_yandex(self):
parser = self.get_parser_for_file('yandex', 'data/uncompressed_no_results_serp_pages/yandex.html')
assert parser.effective_query == 'food', 'Wrong effective query. {}'.format(parser.effective_query)
def test_no_results_for_query_bing(self):
parser = self.get_parser_for_file('bing', 'data/uncompressed_no_results_serp_pages/bing.html')
assert parser.effective_query == 'food', 'Wrong effective query. {}'.format(parser.effective_query)
def test_no_results_for_query_ask(self):
parser = self.get_parser_for_file('ask', 'data/uncompressed_no_results_serp_pages/ask.html')
assert parser.effective_query == 'food', 'Wrong effective query. {}'.format(parser.effective_query)
### test correct parsing of the current page number.
def test_page_number_selector_yandex(self):
parser = self.get_parser_for_file('yandex', 'data/page_number_selector/yandex_5.html')
assert parser.page_number == 5, 'Wrong page number. Got {}'.format(parser.page_number)
def test_page_number_selector_google(self):
"""Google is a bitch in testing this. While saving the html file, the selected
page is set back to 1. So page_number is always one."""
parser = self.get_parser_for_file('google', 'data/page_number_selector/google_8.html')
assert parser.page_number == 1, 'Wrong page number. Got {}'.format(parser.page_number)
def test_page_number_selector_bing(self):
parser = self.get_parser_for_file('bing', 'data/page_number_selector/bing_5.html')
assert parser.page_number == 5, 'Wrong page number. Got {}'.format(parser.page_number)
def test_page_number_selector_yahoo(self):
parser = self.get_parser_for_file('yahoo', 'data/page_number_selector/yahoo_3.html')
assert parser.page_number == 3, 'Wrong page number. Got {}'.format(parser.page_number)
def test_page_number_selector_baidu(self):
parser = self.get_parser_for_file('baidu', 'data/page_number_selector/baidu_9.html')
assert parser.page_number == 9, 'Wrong page number. Got {}'.format(parser.page_number)
def test_page_number_selector_ask(self):
parser = self.get_parser_for_file('ask', 'data/page_number_selector/ask_7.html')
assert parser.page_number == 7, 'Wrong page number. Got {}'.format(parser.page_number)
### test all SERP object indicate no results for all search engines.
def test_no_results_serp_object(self):
config = {
'keyword': 'asdfasdfa7654567654345654343sdfasd',
'search_engines': all_search_engines,
'num_pages_for_keyword': 1,
'scrape_method': 'selenium',
'cachedir': os.path.join(base, 'data/no_results/'),
'do_caching': True,
'verbosity': 1,
}
search = scrape_with_config(config)
assert search.number_search_engines_used == len(all_search_engines)
assert len(search.used_search_engines.split(',')) == len(search.used_search_engines.split(','))
assert search.number_proxies_used == 1
assert search.number_search_queries == 1
assert search.started_searching < search.stopped_searching
assert len(all_search_engines) == len(search.serps), 'Not enough results. Expected: {}, got {}'.format(
len(all_search_engines), len(search.serps))
for serp in search.serps:
assert serp.has_no_results_for_query(), 'num_results must be 0 but is {}. {}'.format(serp.num_results,
serp.links)
# some search engine do alternative searches instead of yielding
# nothing at all.
if serp.search_engine_name in ('google', 'bing'):
assert serp.effective_query, '{} must have an effective query when a keyword has no results.'.format(
serp.search_engine_name)
def test_no_results2_static(self):
query = '"Find ich besser als einfach nur den Propheten zu zeichnen, denn das ist nur reine Provokation. Was Titanic macht ist Satire."'
for search_engine in ('google', 'duckduckgo', 'bing', 'yahoo'):
parser = self.get_parser_for_file(search_engine, 'data/no_results_literal/{}.html'.format(search_engine),
query=query)
assert parser.num_results == 0 or parser.effective_query, 'No results must be true for search engine {}! But got {} serp entries and effective query: {}.'.format(
search_engine, parser.num_results, parser.effective_query)
### test correct parsing of the number of results for the query..
def test_csv_file_header_always_the_same(self):
"""
Check that csv files have always the same order in their header.
"""
csv_outfile_1 = os.path.join(base, 'data/tmp/csvout1.csv')
csv_outfile_2 = os.path.join(base, 'data/tmp/csvout2.csv')
config = {
'keyword': 'some words',
'search_engines': all_search_engines,
'num_pages_for_keyword': 2,
'scrape_method': 'selenium',
'cachedir': os.path.join(base, 'data/csv_tests/'),
'do_caching': True,
'verbosity': 0,
'output_filename': csv_outfile_1,
}
search = scrape_with_config(config)
search = scrape_with_config(config)
config.update({'output_filename': csv_outfile_2})
search = scrape_with_config(config)
assert os.path.isfile(csv_outfile_1) and os.path.isfile(csv_outfile_2)
file1 = open(csv_outfile_1, 'rt')
file2 = open(csv_outfile_2, 'rt')
import csv
reader1, reader2 = csv.DictReader(file1), csv.DictReader(file2)
header1, header2 = reader1.fieldnames, reader2.fieldnames
from GoogleScraper.output_converter import csv_fieldnames
assert header1 == header2 == csv_fieldnames
def test_duckduckgo_http_mode_works(self):
"""
duckduckgo has a non javascript version that should
be queried when using http mode
"""
parser = self.get_parser_for_file('duckduckgo', 'data/various/duckduckgo_http_mode_december_2015.html',
query='what happened')
assert parser.num_results > 8
for result_type, data in parser.search_results.items():
if result_type == 'normal':
assert len(data) > 8
for serp in data:
assert isinstance(serp['rank'], int)
assert len(serp['link']) > 8
assert serp['title']
assert len(serp['snippet']) > 5
if __name__ == '__main__':
unittest.main(warnings='ignore')
| {
"content_hash": "3911e5fdb8386d67419f9399d72268bf",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 174,
"avg_line_length": 44.30226700251889,
"alnum_prop": 0.6168410279736184,
"repo_name": "ARDivekar/SearchDistribute",
"id": "1329b50f99e227615596b2a433d801ab6769ae13",
"size": "17632",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "other/Legacy/Misc files/GoogleScraper-master/tests/integration_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3791384"
},
{
"name": "Python",
"bytes": "608509"
}
],
"symlink_target": ""
} |
import logging
import os
import os.path
import codecs
from edge.opensearch.datasetisoresponse import DatasetIsoResponse
from edge.opensearch.datasetwriter import DatasetWriter
class IsoWriter(DatasetWriter):
def __init__(self, configFilePath):
super(IsoWriter, self).__init__(configFilePath)
templatePath = os.path.dirname(configFilePath) + os.sep
templatePath += self._configuration.get('service', 'template')
self.template = self._readTemplate(templatePath)
def _generateOpenSearchResponse(self, solrResponse, searchText, searchUrl, searchParams, pretty):
response = DatasetIsoResponse()
response.setTemplate(self.template)
return response.generate(solrResponse, pretty=pretty)
def _readTemplate(self, path):
file = codecs.open(path, encoding='utf-8')
data = file.read()
file.close()
return data
| {
"content_hash": "8fd8a3ea984cdcf60fd07ffb1f3ca27e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 101,
"avg_line_length": 32.607142857142854,
"alnum_prop": 0.7042716319824753,
"repo_name": "dataplumber/edge",
"id": "a93bd7694535cd6966e6f421d697ae13eae74cf3",
"size": "913",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/python/plugins/dataset/iso/IsoWriter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "279147"
},
{
"name": "SQLPL",
"bytes": "66101"
},
{
"name": "Shell",
"bytes": "279"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.