hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f384fd02141c82a931f276ad65c77605c1db9f8e
| 9,149
|
py
|
Python
|
DeepLearningExamples/TensorFlow2/Segmentation/UNet_Medical/data_loading/data_loader.py
|
puririshi98/benchmark
|
79f554f1e1cf36f62994c78e0e6e5b360f554022
|
[
"BSD-3-Clause"
] | null | null | null |
DeepLearningExamples/TensorFlow2/Segmentation/UNet_Medical/data_loading/data_loader.py
|
puririshi98/benchmark
|
79f554f1e1cf36f62994c78e0e6e5b360f554022
|
[
"BSD-3-Clause"
] | null | null | null |
DeepLearningExamples/TensorFlow2/Segmentation/UNet_Medical/data_loading/data_loader.py
|
puririshi98/benchmark
|
79f554f1e1cf36f62994c78e0e6e5b360f554022
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Dataset class encapsulates the data loading"""
import multiprocessing
import os
from collections import deque
import numpy as np
import tensorflow as tf
from PIL import Image, ImageSequence
class Dataset:
"""Load, separate and prepare the data for training and prediction"""
def __init__(self, data_dir, batch_size, fold, augment=False, gpu_id=0, num_gpus=1, seed=0):
if not os.path.exists(data_dir):
raise FileNotFoundError('Cannot find data dir: {}'.format(data_dir))
self._data_dir = data_dir
self._batch_size = batch_size
self._augment = augment
self._seed = seed
images = self._load_multipage_tiff(os.path.join(self._data_dir, 'train-volume.tif'))
masks = self._load_multipage_tiff(os.path.join(self._data_dir, 'train-labels.tif'))
self._test_images = \
self._load_multipage_tiff(os.path.join(self._data_dir, 'test-volume.tif'))
train_indices, val_indices = self._get_val_train_indices(len(images), fold)
self._train_images = images[train_indices]
self._train_masks = masks[train_indices]
self._val_images = images[val_indices]
self._val_masks = masks[val_indices]
self._num_gpus = num_gpus
self._gpu_id = gpu_id
@property
def train_size(self):
return len(self._train_images)
@property
def eval_size(self):
return len(self._val_images)
@property
def test_size(self):
return len(self._test_images)
def _load_multipage_tiff(self, path):
"""Load tiff images containing many images in the channel dimension"""
return np.array([np.array(p) for p in ImageSequence.Iterator(Image.open(path))])
def _get_val_train_indices(self, length, fold, ratio=0.8):
assert 0 < ratio <= 1, "Train/total data ratio must be in range (0.0, 1.0]"
np.random.seed(self._seed)
indices = np.arange(0, length, 1, dtype=np.int)
np.random.shuffle(indices)
if fold is not None:
indices = deque(indices)
indices.rotate(fold * int((1.0 - ratio) * length))
indices = np.array(indices)
train_indices = indices[:int(ratio * len(indices))]
val_indices = indices[int(ratio * len(indices)):]
else:
train_indices = indices
val_indices = []
return train_indices, val_indices
def _normalize_inputs(self, inputs):
"""Normalize inputs"""
inputs = tf.expand_dims(tf.cast(inputs, tf.float32), -1)
# Center around zero
inputs = tf.divide(inputs, 127.5) - 1
# Resize to match output size
inputs = tf.image.resize(inputs, (388, 388))
return tf.image.resize_with_crop_or_pad(inputs, 572, 572)
def _normalize_labels(self, labels):
"""Normalize labels"""
labels = tf.expand_dims(tf.cast(labels, tf.float32), -1)
labels = tf.divide(labels, 255)
# Resize to match output size
labels = tf.image.resize(labels, (388, 388))
labels = tf.image.resize_with_crop_or_pad(labels, 572, 572)
cond = tf.less(labels, 0.5 * tf.ones(tf.shape(input=labels)))
labels = tf.where(cond, tf.zeros(tf.shape(input=labels)), tf.ones(tf.shape(input=labels)))
return tf.one_hot(tf.squeeze(tf.cast(labels, tf.int32)), 2)
@tf.function
def _preproc_samples(self, inputs, labels, augment=True):
"""Preprocess samples and perform random augmentations"""
inputs = self._normalize_inputs(inputs)
labels = self._normalize_labels(labels)
if self._augment and augment:
# Horizontal flip
h_flip = tf.random.uniform([]) > 0.5
inputs = tf.cond(pred=h_flip, true_fn=lambda: tf.image.flip_left_right(inputs), false_fn=lambda: inputs)
labels = tf.cond(pred=h_flip, true_fn=lambda: tf.image.flip_left_right(labels), false_fn=lambda: labels)
# Vertical flip
v_flip = tf.random.uniform([]) > 0.5
inputs = tf.cond(pred=v_flip, true_fn=lambda: tf.image.flip_up_down(inputs), false_fn=lambda: inputs)
labels = tf.cond(pred=v_flip, true_fn=lambda: tf.image.flip_up_down(labels), false_fn=lambda: labels)
# Prepare for batched transforms
inputs = tf.expand_dims(inputs, 0)
labels = tf.expand_dims(labels, 0)
# Random crop and resize
left = tf.random.uniform([]) * 0.3
right = 1 - tf.random.uniform([]) * 0.3
top = tf.random.uniform([]) * 0.3
bottom = 1 - tf.random.uniform([]) * 0.3
inputs = tf.image.crop_and_resize(inputs, [[top, left, bottom, right]], [0], (572, 572))
labels = tf.image.crop_and_resize(labels, [[top, left, bottom, right]], [0], (572, 572))
# Gray value variations
# Adjust brightness and keep values in range
inputs = tf.image.random_brightness(inputs, max_delta=0.2)
inputs = tf.clip_by_value(inputs, clip_value_min=-1, clip_value_max=1)
inputs = tf.squeeze(inputs, 0)
labels = tf.squeeze(labels, 0)
# Bring back labels to network's output size and remove interpolation artifacts
labels = tf.image.resize_with_crop_or_pad(labels, target_width=388, target_height=388)
cond = tf.less(labels, 0.5 * tf.ones(tf.shape(input=labels)))
labels = tf.where(cond, tf.zeros(tf.shape(input=labels)), tf.ones(tf.shape(input=labels)))
return inputs, labels
@tf.function
def _preproc_eval_samples(self, inputs, labels):
"""Preprocess samples and perform random augmentations"""
inputs = self._normalize_inputs(inputs)
labels = self._normalize_labels(labels)
# Bring back labels to network's output size and remove interpolation artifacts
labels = tf.image.resize_with_crop_or_pad(labels, target_width=388, target_height=388)
cond = tf.less(labels, 0.5 * tf.ones(tf.shape(input=labels)))
labels = tf.where(cond, tf.zeros(tf.shape(input=labels)), tf.ones(tf.shape(input=labels)))
return (inputs, labels)
def train_fn(self, drop_remainder=False):
"""Input function for training"""
dataset = tf.data.Dataset.from_tensor_slices(
(self._train_images, self._train_masks))
dataset = dataset.shard(self._num_gpus, self._gpu_id)
dataset = dataset.repeat()
dataset = dataset.shuffle(self._batch_size * 3)
dataset = dataset.map(self._preproc_samples,
num_parallel_calls=multiprocessing.cpu_count()//self._num_gpus)
dataset = dataset.batch(self._batch_size, drop_remainder=drop_remainder)
dataset = dataset.prefetch(self._batch_size)
return dataset
def eval_fn(self, count, drop_remainder=False):
"""Input function for validation"""
dataset = tf.data.Dataset.from_tensor_slices(
(self._val_images, self._val_masks))
dataset = dataset.repeat(count=count)
dataset = dataset.map(self._preproc_eval_samples,
num_parallel_calls=multiprocessing.cpu_count())
dataset = dataset.batch(self._batch_size, drop_remainder=drop_remainder)
dataset = dataset.prefetch(self._batch_size)
return dataset
def test_fn(self, count, drop_remainder=False):
"""Input function for testing"""
dataset = tf.data.Dataset.from_tensor_slices(
self._test_images)
dataset = dataset.repeat(count=count)
dataset = dataset.map(self._normalize_inputs)
dataset = dataset.batch(self._batch_size, drop_remainder=drop_remainder)
dataset = dataset.prefetch(self._batch_size)
return dataset
def synth_fn(self):
"""Synthetic data function for testing"""
inputs = tf.random.truncated_normal((572, 572, 1), dtype=tf.float32, mean=127.5, stddev=1, seed=self._seed,
name='synth_inputs')
masks = tf.random.truncated_normal((388, 388, 2), dtype=tf.float32, mean=0.01, stddev=0.1, seed=self._seed,
name='synth_masks')
dataset = tf.data.Dataset.from_tensors((inputs, masks))
dataset = dataset.cache()
dataset = dataset.repeat()
dataset = dataset.batch(self._batch_size)
dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
return dataset
| 41.586364
| 116
| 0.648923
|
7944e35d8abf604e7f09702fc50c7e62cc4d009c
| 49,372
|
py
|
Python
|
django/db/models/sql/compiler.py
|
doismellburning/django
|
039465a6a7a18f48ea77ceadb6949990c0ec92e1
|
[
"BSD-3-Clause"
] | null | null | null |
django/db/models/sql/compiler.py
|
doismellburning/django
|
039465a6a7a18f48ea77ceadb6949990c0ec92e1
|
[
"BSD-3-Clause"
] | null | null | null |
django/db/models/sql/compiler.py
|
doismellburning/django
|
039465a6a7a18f48ea77ceadb6949990c0ec92e1
|
[
"BSD-3-Clause"
] | null | null | null |
from itertools import chain
import re
import warnings
from django.core.exceptions import FieldError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import OrderBy, Random, RawSQL, Ref
from django.db.models.query_utils import select_related_descend, QueryWrapper
from django.db.models.sql.constants import (CURSOR, SINGLE, MULTI, NO_RESULTS,
ORDER_DIR, GET_ITERATOR_CHUNK_SIZE)
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.query import get_order_dir, Query
from django.db.transaction import TransactionManagementError
from django.db.utils import DatabaseError
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.six.moves import zip
class SQLCompiler(object):
def __init__(self, query, connection, using):
self.query = query
self.connection = connection
self.using = using
self.quote_cache = {'*': '*'}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self.ordering_parts = re.compile(r'(.*)\s(ASC|DESC)(.*)')
def setup_query(self):
if all(self.query.alias_refcount[a] == 0 for a in self.query.tables):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select()
self.col_count = len(self.select)
def pre_sql_setup(self):
"""
Does any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query()
order_by = self.get_order_by()
extra_select = self.get_extra_select(order_by, self.select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Returns a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, 'as_sql'):
expressions.append(self.query.resolve_ref(expr))
else:
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
for expr, _, _ in select:
cols = expr.get_group_by_cols()
for col in cols:
expressions.append(col)
for expr, (sql, params, is_ref) in order_by:
if expr.contains_aggregate:
continue
# We can skip References to select clause, as all expressions in
# the select clause are already part of the group by.
if is_ref:
continue
expressions.extend(expr.get_source_expressions())
having = self.query.having.get_group_by_cols()
for expr in having:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having)
for expr in expressions:
sql, params = self.compile(expr)
if (sql, tuple(params)) not in seen:
result.append((sql, params))
seen.add((sql, tuple(params)))
return result
def collapse_group_by(self, expressions, having):
# If the DB can group by primary key, then group by the primary key of
# query's main model. Note that for PostgreSQL the GROUP BY clause must
# include the primary key of every table, but for MySQL it is enough to
# have the main table's primary key. Currently only the MySQL form is
# implemented.
# MySQLism: however, columns in HAVING clause must be added to the
# GROUP BY.
if self.connection.features.allows_group_by_pk:
# The logic here is: if the main model's primary key is in the
# query, then set new_expressions to that field. If that happens,
# then also add having expressions to group by.
pk = None
for expr in expressions:
if (expr.output_field.primary_key and
getattr(expr.output_field, 'model') == self.query.model):
pk = expr
if pk:
expressions = [pk] + [expr for expr in expressions if expr in having]
return expressions
def get_select(self):
"""
Returns three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- Which model to instantiate
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
if self.query.default_cols:
select_list = []
for c in self.get_default_columns():
select_list.append(select_idx)
select.append((c, None))
select_idx += 1
klass_info = {
'model': self.query.model,
'select_fields': select_list,
}
# self.query.select is a special case. These columns never go to
# any model.
for col in self.query.select:
select.append((col, None))
select_idx += 1
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select)
klass_info['related_klass_infos'] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info['related_klass_infos']:
if ki['from_parent']:
ki['select_fields'] = (klass_info['select_fields'] +
ki['select_fields'])
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
for col, alias in select:
ret.append((col, self.compile(col, select_format=True), alias))
return ret, klass_info, annotations
def get_order_by(self):
"""
Returns a list of 2-tuples of form (expr, (sql, params)) for the
ORDER BY clause.
The order_by clause can alter the select clause (for example it
can add aliases to clauses that do not yet have one, or it can
add totally new select clauses).
"""
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = (self.query.order_by or self.query.get_meta().ordering or [])
if self.query.standard_ordering:
asc, desc = ORDER_DIR['ASC']
else:
asc, desc = ORDER_DIR['DESC']
order_by = []
for pos, field in enumerate(ordering):
if hasattr(field, 'resolve_expression'):
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field.reverse_ordering()
order_by.append((field, False))
continue
if field == '?': # random
order_by.append((OrderBy(Random()), False))
continue
col, order = get_order_dir(field, asc)
descending = True if order == 'DESC' else False
if col in self.query.annotation_select:
order_by.append((
OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending),
True))
continue
if '.' in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split('.', 1)
order_by.append((
OrderBy(
RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []),
descending=descending
), False))
continue
if not self.query._extra or col not in self.query._extra:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
order_by.extend(self.find_ordering_name(
field, self.query.get_meta(), default_order=asc))
else:
if col not in self.query.extra_select:
order_by.append((
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False))
else:
order_by.append((
OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending),
True))
result = []
seen = set()
for expr, is_ref in order_by:
resolved = expr.resolve_expression(
self.query, allow_joins=True, reuse=None)
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql).group(1)
if (without_ordering, tuple(params)) in seen:
continue
seen.add((without_ordering, tuple(params)))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
select_sql = [t[1] for t in select]
if self.query.distinct and not self.query.distinct_fields:
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql).group(1)
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def __call__(self, name):
"""
Backwards-compatibility shim so that calling a SQLCompiler is equivalent to
calling its quote_name_unless_alias method.
"""
warnings.warn(
"Calling a SQLCompiler directly is deprecated. "
"Call compiler.quote_name_unless_alias instead.",
RemovedInDjango20Warning, stacklevel=2)
return self.quote_name_unless_alias(name)
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if ((name in self.query.alias_map and name not in self.query.table_map) or
name in self.query.extra_select or name in self.query.external_aliases):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node, select_format=False):
vendor_impl = getattr(node, 'as_' + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
if select_format:
return node.output_field.select_format(self, sql, params)
return sql, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
# After executing the query, we must get rid of any joins the query
# setup created. So, take note of alias counts before the query ran.
# However we do not want to get rid of stuff done in pre_sql_setup(),
# as the pre_sql_setup will modify query state in a way that forbids
# another run of it.
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
distinct_fields = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct' -- see
# docstring of get_from_clause() for details.
from_, f_params = self.get_from_clause()
where, w_params = self.compile(self.query.where)
having, h_params = self.compile(self.query.having)
params = []
result = ['SELECT']
if self.query.distinct:
result.append(self.connection.ops.distinct_sql(distinct_fields))
out_cols = []
col_idx = 1
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias))
elif with_col_aliases:
s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx)
col_idx += 1
params.extend(s_params)
out_cols.append(s_sql)
result.append(', '.join(out_cols))
result.append('FROM')
result.extend(from_)
params.extend(f_params)
if where:
result.append('WHERE %s' % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError(
"annotate() + distinct(fields) is not implemented.")
if not order_by:
order_by = self.connection.ops.force_no_ordering()
result.append('GROUP BY %s' % ', '.join(grouping))
if having:
result.append('HAVING %s' % having)
params.extend(h_params)
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append('ORDER BY %s' % ', '.join(ordering))
if with_limits:
if self.query.high_mark is not None:
result.append('LIMIT %d' % (self.query.high_mark - self.query.low_mark))
if self.query.low_mark:
if self.query.high_mark is None:
val = self.connection.ops.no_limit_value()
if val:
result.append('LIMIT %d' % val)
result.append('OFFSET %d' % self.query.low_mark)
if self.query.select_for_update and self.connection.features.has_select_for_update:
if self.connection.get_autocommit():
raise TransactionManagementError(
"select_for_update cannot be used outside of a transaction."
)
# If we've been asked for a NOWAIT query but the backend does
# not support it, raise a DatabaseError otherwise we could get
# an unexpected deadlock.
nowait = self.query.select_for_update_nowait
if nowait and not self.connection.features.has_select_for_update_nowait:
raise DatabaseError('NOWAIT is not supported on this database backend.')
result.append(self.connection.ops.for_update_sql(nowait=nowait))
return ' '.join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def as_nested_sql(self):
"""
Perform the same functionality as the as_sql() method, returning an
SQL string and parameters. However, the alias prefixes are bumped
beforehand (in a copy -- the current query isn't changed), and any
ordering is removed if the query is unsliced.
Used when nesting this query inside another.
"""
obj = self.query.clone()
if obj.low_mark == 0 and obj.high_mark is None and not self.query.distinct_fields:
# If there is no slicing in use, then we can safely drop all ordering
obj.clear_ordering(True)
return obj.get_compiler(connection=self.connection).as_sql()
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
"""
Computes the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Returns a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
opts = self.query.get_meta()
only_load = self.deferred_to_columns()
if not start_alias:
start_alias = self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if from_parent and model is not None and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if field.model in only_load and field.attname not in only_load[field.model]:
continue
alias = self.query.join_parent_model(opts, model, start_alias,
seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Returns a quoted list of fields to use in DISTINCT ON part of the query.
Note that this method can alter the tables in the query, and thus it
must be called before get_from_clause().
"""
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
result = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _ = self._setup_joins(parts, opts, None)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
result.append("%s.%s" % (qn(alias), qn2(target.column)))
return result
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
already_seen=None):
"""
Returns the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = True if order == 'DESC' else False
pieces = name.split(LOOKUP_SEP)
field, targets, alias, joins, path, opts = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless the attribute name
# of the field is specified.
if field.rel and path and opts.ordering and name != field.attname:
# Firstly, avoid infinite loops.
if not already_seen:
already_seen = set()
join_tuple = tuple(self.query.alias_map[j].table_name for j in joins)
if join_tuple in already_seen:
raise FieldError('Infinite loop caused by ordering.')
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
results.extend(self.find_ordering_name(item, opts, alias,
order, already_seen))
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [(OrderBy(t.get_col(alias), descending=descending), False) for t in targets]
def _setup_joins(self, pieces, opts, alias):
"""
A helper method for get_order_by and get_distinct.
Note that get_ordering and get_distinct must produce same target
columns on same input, as the prefixes of get_ordering and get_distinct
must match. Executing SQL where this is not true is an error.
"""
if not alias:
alias = self.query.get_initial_alias()
field, targets, opts, joins, path = self.query.setup_joins(
pieces, opts, alias)
alias = joins[-1]
return field, targets, alias, joins, path, opts
def get_from_clause(self):
"""
Returns a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Sub-classes, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables we need. This means the select columns,
ordering and distinct must be done first.
"""
result = []
params = []
for alias in self.query.tables:
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
result.append(', %s' % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1,
requested=None, restricted=None):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects if f.field.unique
)
return chain(direct_choices, reverse_choices)
related_klass_infos = []
if not restricted and self.query.max_depth and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
only_load = self.query.get_loaded_field_names()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
if isinstance(self.query.select_related, dict):
requested = self.query.select_related
restricted = True
else:
restricted = False
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info['related_klass_infos'] = related_klass_infos
for f in opts.fields:
field_model = f.model._meta.concrete_model
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or (cur_depth == 1 and f.name in requested):
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s" % (
f.name,
", ".join(_get_field_choices()) or '(none)',
)
)
else:
next = False
if not select_related_descend(f, restricted, requested,
only_load.get(field_model)):
continue
klass_info = {
'model': f.rel.to,
'field': f,
'reverse': False,
'from_parent': False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _ = self.query.setup_joins(
[f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(start_alias=alias, opts=f.rel.to._meta)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_klass_infos = self.get_related_selections(
select, f.rel.to._meta, alias, cur_depth + 1, next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for f, model in related_fields:
if not select_related_descend(f, restricted, requested,
only_load.get(model), reverse=True):
continue
related_field_name = f.related_query_name()
fields_found.add(related_field_name)
_, _, _, joins, _ = self.query.setup_joins([related_field_name], opts, root_alias)
alias = joins[-1]
from_parent = issubclass(model, opts.model)
klass_info = {
'model': model,
'field': f,
'reverse': True,
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta, from_parent=opts.model)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next = requested.get(f.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select, model._meta, alias, cur_depth + 1,
next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested.keys()).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
'Invalid field name(s) given in select_related: %s. '
'Choices are: %s' % (
', '.join(invalid_fields),
', '.join(_get_field_choices()) or '(none)',
)
)
return related_klass_infos
def deferred_to_columns(self):
"""
Converts the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Returns the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb)
return columns
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
converters[i] = (backend_converters, field_converters, expression)
return converters
def apply_converters(self, row, converters):
row = list(row)
for pos, (backend_converters, field_converters, field) in converters.items():
value = row[pos]
for converter in backend_converters:
value = converter(value, field, self.query.context)
for converter in field_converters:
value = converter(value, self.connection, self.query.context)
row[pos] = value
return tuple(row)
def results_iter(self, results=None):
"""
Returns an iterator over the results from executing this query.
"""
converters = None
if results is None:
results = self.execute_sql(MULTI)
fields = [s[0] for s in self.select[0:self.col_count]]
converters = self.get_converters(fields)
for rows in results:
for row in rows:
if converters:
row = self.apply_converters(row, converters)
yield row
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
# This is always executed on a query clone, so we can modify self.query
self.query.add_extra({'a': 1}, None, None, None, None, None)
self.query.set_extra_mask(['a'])
return bool(self.execute_sql(SINGLE))
def execute_sql(self, result_type=MULTI):
"""
Run the query against the database and returns the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
if not result_type:
result_type = NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
cursor.close()
raise
if result_type == CURSOR:
# Caller didn't specify a result_type, so just give them back the
# cursor to process (and close).
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0:self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor, self.connection.features.empty_fetchmany_value,
self.col_count
)
if not self.connection.features.can_use_chunked_reads:
try:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further.
return list(result)
finally:
# done with the cursor
cursor.close()
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
if len(columns) == 1:
sql, params = self.as_sql()
return '%s.%s IN (%s)' % (qn(alias), qn2(columns[0]), sql), params
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = '%s.%s' % (qn(alias), qn2(columns[index]))
self.query.where.add(
QueryWrapper('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND')
sql, params = self.as_sql()
return 'EXISTS (%s)' % sql, params
class SQLInsertCompiler(SQLCompiler):
def __init__(self, *args, **kwargs):
self.return_id = False
super(SQLInsertCompiler, self).__init__(*args, **kwargs)
def placeholder(self, field, val):
if field is None:
# A field value of None means the value is raw.
return val
elif hasattr(field, 'get_placeholder'):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
return field.get_placeholder(val, self, self.connection)
else:
# Return the common case for the placeholder
return '%s'
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
result = ['INSERT INTO %s' % qn(opts.db_table)]
has_fields = bool(self.query.fields)
fields = self.query.fields if has_fields else [opts.pk]
result.append('(%s)' % ', '.join(qn(f.column) for f in fields))
if has_fields:
params = values = [
[
f.get_db_prep_save(
getattr(obj, f.attname) if self.query.raw else f.pre_save(obj, True),
connection=self.connection
) for f in fields
]
for obj in self.query.objs
]
else:
values = [[self.connection.ops.pk_default_value()] for obj in self.query.objs]
params = [[]]
fields = [None]
can_bulk = (not any(hasattr(field, "get_placeholder") for field in fields) and
not self.return_id and self.connection.features.has_bulk_insert)
if can_bulk:
placeholders = [["%s"] * len(fields)]
else:
placeholders = [
[self.placeholder(field, v) for field, v in zip(fields, val)]
for val in values
]
# Oracle Spatial needs to remove some values due to #10888
params = self.connection.ops.modify_insert_params(placeholders, params)
if self.return_id and self.connection.features.can_return_id_from_insert:
params = params[0]
col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column))
result.append("VALUES (%s)" % ", ".join(placeholders[0]))
r_fmt, r_params = self.connection.ops.return_insert_id()
# Skip empty r_fmt to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
if r_fmt:
result.append(r_fmt % col)
params += r_params
return [(" ".join(result), tuple(params))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, len(values)))
return [(" ".join(result), tuple(v for val in values for v in val))]
else:
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholders, params)
]
def execute_sql(self, return_id=False):
assert not (return_id and len(self.query.objs) != 1)
self.return_id = return_id
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not (return_id and cursor):
return
if self.connection.features.can_return_id_from_insert:
return self.connection.ops.fetch_returned_insert_id(cursor)
return self.connection.ops.last_insert_id(cursor,
self.query.get_meta().db_table, self.query.get_meta().pk.column)
class SQLDeleteCompiler(SQLCompiler):
def as_sql(self):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
"""
assert len(self.query.tables) == 1, \
"Can only delete from one table at a time."
qn = self.quote_name_unless_alias
result = ['DELETE FROM %s' % qn(self.query.tables[0])]
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(params)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return '', ()
table = self.query.tables[0]
qn = self.quote_name_unless_alias
result = ['UPDATE %s' % qn(table)]
result.append('SET')
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, 'resolve_expression'):
val = val.resolve_expression(self.query, allow_joins=False, for_save=True)
if val.contains_aggregate:
raise FieldError("Aggregate functions are not allowed in this query")
elif hasattr(val, 'prepare_database_save'):
if field.rel:
val = val.prepare_database_save(field)
else:
raise TypeError("Database is trying to update a relational field "
"of type %s with a value of type %s. Make sure "
"you are setting the correct relations" %
(field.__class__.__name__, val.__class__.__name__))
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, 'get_placeholder'):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = '%s'
name = field.column
if hasattr(val, 'as_sql'):
sql, params = self.compile(val)
values.append('%s = %s' % (qn(name), sql))
update_params.extend(params)
elif val is not None:
values.append('%s = %s' % (qn(name), placeholder))
update_params.append(val)
else:
values.append('%s = NULL' % qn(name))
if not values:
return '', ()
result.append(', '.join(values))
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Returns the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super(SQLUpdateCompiler, self).execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, we need to do some
munging of the "where" conditions to match the format required for
(portable) SQL updates. That is done here.
Further, if we are going to be running multiple updates, we pull out
the id values to update at this point so that they don't change as a
result of the progressive updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.clone(klass=Query)
query.select_related = False
query.clear_ordering(True)
query._extra = {}
query.select = []
query.add_fields([query.get_meta().pk.name])
super(SQLUpdateCompiler, self).pre_sql_setup()
must_pre_select = count > 1 and not self.connection.features.update_can_self_select
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.where = self.query.where_class()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
self.query.add_filter(('pk__in', idents))
self.query.related_ids = idents
else:
# The fast path. Filters and updates in one query.
self.query.add_filter(('pk__in', query))
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
"""
# Empty SQL for the inner query is a marker that the inner query
# isn't going to produce any results. This can happen when doing
# LIMIT 0 queries (generated by qs[:0]) for example.
if not self.query.subquery:
raise EmptyResultSet
sql, params = [], []
for annotation in self.query.annotation_select.values():
agg_sql, agg_params = self.compile(annotation)
sql.append(agg_sql)
params.extend(agg_params)
self.col_count = len(self.query.annotation_select)
sql = ', '.join(sql)
params = tuple(params)
sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery)
params = params + self.query.sub_params
return sql, params
def cursor_iter(cursor, sentinel, col_count):
"""
Yields blocks of rows from a cursor and ensures the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)),
sentinel):
yield [r[0:col_count] for r in rows]
finally:
cursor.close()
| 43.270815
| 98
| 0.5812
|
806b00d5480162c622b51b2d090e4b6e8ffd5d36
| 307
|
py
|
Python
|
saefportal/datasets/migrations/0028_delete_historicaldataset.py
|
harry-consulting/SAEF1
|
055d6e492ba76f90e3248b9da2985fdfe0c6b430
|
[
"BSD-2-Clause"
] | null | null | null |
saefportal/datasets/migrations/0028_delete_historicaldataset.py
|
harry-consulting/SAEF1
|
055d6e492ba76f90e3248b9da2985fdfe0c6b430
|
[
"BSD-2-Clause"
] | null | null | null |
saefportal/datasets/migrations/0028_delete_historicaldataset.py
|
harry-consulting/SAEF1
|
055d6e492ba76f90e3248b9da2985fdfe0c6b430
|
[
"BSD-2-Clause"
] | 1
|
2020-12-16T15:02:52.000Z
|
2020-12-16T15:02:52.000Z
|
# Generated by Django 3.1.6 on 2021-09-04 06:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('datasets', '0027_auto_20210904_0559'),
]
operations = [
migrations.DeleteModel(
name='HistoricalDataset',
),
]
| 18.058824
| 48
| 0.618893
|
99b5cc3e1c092f828f80b63f67390c4de80a8c3b
| 1,711
|
py
|
Python
|
lib/ovirtsdk4/http.py
|
oliel/python-ovirt-engine-sdk4
|
c0b13982b45dee664ebc063bda7686124b402c14
|
[
"Apache-2.0"
] | 3
|
2022-01-14T00:37:58.000Z
|
2022-03-26T12:26:32.000Z
|
lib/ovirtsdk4/http.py
|
oliel/python-ovirt-engine-sdk4
|
c0b13982b45dee664ebc063bda7686124b402c14
|
[
"Apache-2.0"
] | 29
|
2021-07-20T12:42:44.000Z
|
2022-03-28T13:01:33.000Z
|
lib/ovirtsdk4/http.py
|
oliel/python-ovirt-engine-sdk4
|
c0b13982b45dee664ebc063bda7686124b402c14
|
[
"Apache-2.0"
] | 12
|
2021-07-20T12:27:07.000Z
|
2022-02-24T11:10:12.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Request(object):
"""
This class represents an HTTP request.
This class is intended for internal use by other components of the SDK.
Refrain from using it directly as there is no backwards compatibility
guarantee.
"""
def __init__(
self,
method='GET',
path='',
query=None,
headers=None,
body=None,
):
self.method = method
self.path = path
self.query = query if query is not None else {}
self.headers = headers if headers is not None else {}
self.body = body
class Response(object):
"""
This class represents an HTTP response.
This class is intended for internal use by other components of the SDK.
Refrain from using it directly as there is no backwards compatibility
guarantee.
"""
def __init__(
self,
body=None,
code=None,
headers=None,
message=None
):
self.body = body
self.code = code
self.headers = headers if headers is not None else {}
self.message = message
| 26.734375
| 75
| 0.647575
|
0497d8c9b0dcbd4c37320407646fe3f267287433
| 610
|
py
|
Python
|
users/forms.py
|
badruu/mathsz
|
60392d07ecdbb85c19e0a528124fec9e76103012
|
[
"MIT"
] | null | null | null |
users/forms.py
|
badruu/mathsz
|
60392d07ecdbb85c19e0a528124fec9e76103012
|
[
"MIT"
] | 7
|
2020-02-12T00:31:25.000Z
|
2022-03-11T23:49:43.000Z
|
users/forms.py
|
badruu/mathsz
|
60392d07ecdbb85c19e0a528124fec9e76103012
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import *
from django.db import models
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'password1', 'password2']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email']
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['image']
| 24.4
| 64
| 0.681967
|
b312a09026444831aa68e529bd1b58a90ecbe6b2
| 1,994
|
py
|
Python
|
test/functional/feature_listmyassets.py
|
rishabhworking/kringleprojectcoin
|
8bb8b0f038f12d1e6ccd7e0aedfcef3c51387fc0
|
[
"MIT"
] | null | null | null |
test/functional/feature_listmyassets.py
|
rishabhworking/kringleprojectcoin
|
8bb8b0f038f12d1e6ccd7e0aedfcef3c51387fc0
|
[
"MIT"
] | null | null | null |
test/functional/feature_listmyassets.py
|
rishabhworking/kringleprojectcoin
|
8bb8b0f038f12d1e6ccd7e0aedfcef3c51387fc0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Copyright (c) 2017 The Raven Core developers
# Copyright (c) 2018 The Rito Core developers
# Copyright (c) 2020 The KringleProjectCoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test listmyassets RPC command."""
from test_framework.test_framework import KringleProjectCoinTestFramework
from test_framework.util import *
from test_framework.mininode import *
from io import BytesIO
class ListMyAssetsTest(KringleProjectCoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def listmyassets_filter_zeros(self):
"""Sometimes the asset cache will contain zero-quantity holdings for some assets (until they're flushed).
These shouldn't be returned by listmyassets.
"""
# activate assets
self.nodes[0].generate(500)
self.sync_all()
assert_equal(0, len(self.nodes[0].listmyassets()))
assert_equal(0, len(self.nodes[1].listmyassets()))
self.nodes[0].issue("FOO", 1000)
self.nodes[0].generate(10)
self.sync_all()
result = self.nodes[0].listmyassets()
assert_equal(2, len(result))
assert_contains_pair("FOO", 1000, result)
assert_contains_pair("FOO!", 1, result)
address_to = self.nodes[1].getnewaddress()
self.nodes[0].transfer("FOO", 1000, address_to)
self.nodes[0].generate(10)
self.sync_all()
result = self.nodes[0].listmyassets()
assert_equal(1, len(result))
assert_contains_pair("FOO!", 1, result)
result = self.nodes[1].listmyassets()
assert_equal(1, len(result))
assert_contains_pair("FOO", 1000, result)
def run_test(self):
self.listmyassets_filter_zeros()
if __name__ == '__main__':
ListMyAssetsTest().main()
| 33.233333
| 113
| 0.681043
|
4f5ed8f131fa36d0e862458c0458d58200b32371
| 3,557
|
py
|
Python
|
transformer/Beam.py
|
delldu/Transformer
|
5e2ef01c85d4d09deec0b0e88505e64c8c7cc9da
|
[
"Apache-2.0"
] | 3
|
2018-12-05T06:16:56.000Z
|
2019-03-01T23:16:07.000Z
|
transformer/Beam.py
|
delldu/Transformer
|
5e2ef01c85d4d09deec0b0e88505e64c8c7cc9da
|
[
"Apache-2.0"
] | 1
|
2019-08-06T07:46:21.000Z
|
2019-08-06T07:46:21.000Z
|
transformer/Beam.py
|
delldu/Transformer
|
5e2ef01c85d4d09deec0b0e88505e64c8c7cc9da
|
[
"Apache-2.0"
] | 1
|
2019-09-16T07:09:57.000Z
|
2019-09-16T07:09:57.000Z
|
""" Manage beam search info structure.
Heavily borrowed from OpenNMT-py.
For code in OpenNMT-py, please check the following link:
https://github.com/OpenNMT/OpenNMT-py/blob/master/onmt/Beam.py
"""
import pdb
import torch
# import numpy as np
import transformer.Constants as Constants
class Beam():
''' Beam search '''
def __init__(self, size, device=False):
self.size = size
self._done = False
# The score for each translation on the beam.
self.scores = torch.zeros((size,), dtype=torch.float, device=device)
self.all_scores = []
# The backpointers at each time-step.
self.prev_ks = []
# The outputs at each time-step.
self.next_ys = [torch.full((size,), Constants.PAD, dtype=torch.long, device=device)]
self.next_ys[0][0] = Constants.BOS
# pdb.set_trace()
# (Pdb) a
# self = <transformer.Beam.Beam object at 0x7f8b64061ba8>
# size = 5
# device = device(type='cuda')
# (Pdb) print(self.next_ys)
# [tensor([2, 0, 0, 0, 0], device='cuda:0')]
def get_current_state(self):
"Get the outputs for the current timestep."
return self.get_tentative_hypothesis()
def get_current_origin(self):
"Get the backpointers for the current timestep."
return self.prev_ks[-1]
@property
def done(self):
return self._done
def advance(self, word_prob):
"Update beam status and check if finished or not."
num_words = word_prob.size(1)
# Sum the previous scores.
if len(self.prev_ks) > 0:
beam_lk = word_prob + self.scores.unsqueeze(1).expand_as(word_prob)
else:
beam_lk = word_prob[0]
flat_beam_lk = beam_lk.view(-1)
best_scores, best_scores_id = flat_beam_lk.topk(self.size, 0, True, True) # 1st sort
best_scores, best_scores_id = flat_beam_lk.topk(self.size, 0, True, True) # 2nd sort
self.all_scores.append(self.scores)
self.scores = best_scores
# bestScoresId is flattened as a (beam x word) array,
# so we need to calculate which word and beam each score came from
prev_k = best_scores_id / num_words
self.prev_ks.append(prev_k)
self.next_ys.append(best_scores_id - prev_k * num_words)
# End condition is when top-of-beam is EOS.
if self.next_ys[-1][0].item() == Constants.EOS:
self._done = True
self.all_scores.append(self.scores)
return self._done
def sort_scores(self):
"Sort the scores."
return torch.sort(self.scores, 0, True)
def get_the_best_score_and_idx(self):
"Get the score of the best in the beam."
scores, ids = self.sort_scores()
return scores[1], ids[1]
def get_tentative_hypothesis(self):
"Get the decoded sequence for the current timestep."
if len(self.next_ys) == 1:
dec_seq = self.next_ys[0].unsqueeze(1)
else:
_, keys = self.sort_scores()
hyps = [self.get_hypothesis(k) for k in keys]
hyps = [[Constants.BOS] + h for h in hyps]
dec_seq = torch.LongTensor(hyps)
return dec_seq
def get_hypothesis(self, k):
""" Walk back to construct the full hypothesis. """
hyp = []
for j in range(len(self.prev_ks) - 1, -1, -1):
hyp.append(self.next_ys[j+1][k])
k = self.prev_ks[j][k]
return list(map(lambda x: x.item(), hyp[::-1]))
| 31.201754
| 92
| 0.606972
|
cb3580c9be95292afb3d234ff68b7c5fedc3eecb
| 5,058
|
py
|
Python
|
Packages/Anaconda/listeners/linting.py
|
prisis/sublime-text-packages
|
99ae8a5496613e27a75e5bd91723549b21476e60
|
[
"MIT"
] | null | null | null |
Packages/Anaconda/listeners/linting.py
|
prisis/sublime-text-packages
|
99ae8a5496613e27a75e5bd91723549b21476e60
|
[
"MIT"
] | 1
|
2016-02-10T09:50:09.000Z
|
2016-02-10T09:50:09.000Z
|
Packages/Anaconda/listeners/linting.py
|
prisis/sublime-text-packages
|
99ae8a5496613e27a75e5bd91723549b21476e60
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2013 - Oscar Campos <oscar.campos@member.fsf.org>
# This program is Free Software see LICENSE file for details
import time
import sublime
import sublime_plugin
from ..anaconda_lib.helpers import (
check_linting, get_settings, check_linting_behaviour,
ONLY_CODE, NOT_SCRATCH, LINTING_ENABLED, is_code
)
from ..anaconda_lib.linting.sublime import (
ANACONDA, erase_lint_marks, run_linter,
last_selected_lineno, update_statusbar
)
class BackgroundLinter(sublime_plugin.EventListener):
"""Background linter, can be turned off via plugin settings
"""
check_auto_lint = False
def __init__(self, lang='Python', linter=run_linter):
super(BackgroundLinter, self).__init__()
self.lang = lang
self.run_linter = linter
self.last_selected_line = -1
sublime.set_timeout(self.lint, 1000)
def lint(self):
view = sublime.active_window().active_view()
if get_settings(view, 'anaconda_linting_behaviour') != 'always':
if not self.check_auto_lint:
self.check_auto_lint = True
return
delay = get_settings(view, 'anaconda_linter_delay', 0.5)
valid_code = is_code(view, lang=self.lang.lower())
if not ANACONDA['ALREADY_LINTED'] and valid_code:
if time.time() - ANACONDA['LAST_PULSE'] >= delay:
ANACONDA['ALREADY_LINTED'] = True
self.run_linter(view)
sublime.set_timeout(lambda: self.lint(), int(delay * 1000))
def on_modified(self, view):
"""
Called after changes have been made to a view.
Runs in a separate thread, and does not block the application.
"""
constraints = ONLY_CODE | NOT_SCRATCH | LINTING_ENABLED
if (check_linting(view, constraints, code=self.lang.lower())
and check_linting_behaviour(view, ['always'])):
# update the last selected line number
self.last_selected_line = -1
ANACONDA['LAST_PULSE'] = time.time()
ANACONDA['ALREADY_LINTED'] = False
if not get_settings(view, 'anaconda_linter_persistent', False):
erase_lint_marks(view)
if self.check_auto_lint:
self.lint()
else:
self._erase_marks_if_no_linting(view)
def on_load(self, view):
"""Called after load a file
"""
if (check_linting(view, ONLY_CODE, code=self.lang.lower())
and check_linting_behaviour(view, ['always', 'load-save'])):
if self.lang in view.settings().get('syntax'):
self.run_linter(view)
else:
self._erase_marks_if_no_linting(view)
def on_pre_close(self, view):
"""Called when the view is about to be closed
"""
self._erase_marks(view)
for severity in ['VIOLATIONS', 'WARNINGS', 'ERRORS']:
ANACONDA[severity][view.id()] = {}
def on_post_save(self, view):
"""Called post file save event
"""
if check_linting(
view, NOT_SCRATCH | LINTING_ENABLED, code=self.lang.lower()):
if self.lang in view.settings().get('syntax'):
if get_settings(
view, "anaconda_linter_show_errors_on_save", False):
self.run_linter(view, self._show_errors_list)
else:
self.run_linter(view)
else:
self._erase_marks_if_no_linting(view)
def _show_errors_list(self, parse_results, data):
"""Hook the parser_results callback and append some functions
"""
parse_results(data)
sublime.active_window().run_command('anaconda_get_lines')
def on_activated(self, view):
"""Called when a view gain the focus
"""
if (check_linting(
view, ONLY_CODE | LINTING_ENABLED, code=self.lang.lower())
and check_linting_behaviour(view, ['always'])):
if self.lang in view.settings().get('syntax'):
self.run_linter(view)
else:
self._erase_marks_if_no_linting(view)
def on_selection_modified(self, view):
"""Called on selection modified
"""
constraints = ONLY_CODE | NOT_SCRATCH | LINTING_ENABLED
if (not check_linting(view, constraints, code=self.lang.lower())
or self.lang not in view.settings().get('syntax')):
return
last_selected_line = last_selected_lineno(view)
if last_selected_line != self.last_selected_line:
self.last_selected_line = last_selected_line
update_statusbar(view)
def _erase_marks_if_no_linting(self, view):
"""Erase the anaconda marks if linting is disabled
"""
if not check_linting(view, LINTING_ENABLED, code=self.lang.lower()):
self._erase_marks(view)
def _erase_marks(self, view):
"""Just a wrapper for erase_lint_marks
"""
erase_lint_marks(view)
| 33.946309
| 77
| 0.61724
|
4309f69fcebb558b915fc4a8e2fb9758f532d505
| 4,644
|
py
|
Python
|
rayvision_clarisse/utils.py
|
foxrenderfarm/rayvision_clarisse
|
d2f2dd4ef5394899b1b2e9ecc0d56c943ce56b1c
|
[
"Apache-2.0"
] | 1
|
2021-01-14T08:06:39.000Z
|
2021-01-14T08:06:39.000Z
|
rayvision_clarisse/utils.py
|
renderbus/rayvision_clarisse
|
7971e4178a85ab9b98c7dd626a5c5fc655f67f8a
|
[
"Apache-2.0"
] | null | null | null |
rayvision_clarisse/utils.py
|
renderbus/rayvision_clarisse
|
7971e4178a85ab9b98c7dd626a5c5fc655f67f8a
|
[
"Apache-2.0"
] | null | null | null |
"""Common method for rayvision_clarisse API."""
import sys
def get_encode(encode_str, py_version=3):
"""Get the encoding of the string decoding.
Args:
encode_str (str, unicode): String.
py_version (int): Python version, default is 3.
Returns:
str: Coding.
"""
if ((py_version == 2 and isinstance(encode_str, str)) or (
py_version == 3 and isinstance(encode_str, str))):
encode = "unicode"
else:
for code in ["utf-8", sys.getfilesystemencoding(), "gb18030",
"ascii", "gbk", "gb2312"]:
try:
encode_str.decode(code, 'ignore')
return code
except UnicodeDecodeError:
pass
encode = 'utf-8'
return encode
def str_to_unicode(encode_str, py_version=3):
"""Get the encoding of the string decoding.
Args:
encode_str (str, unicode): String.
py_version (int): Python version, default is 3.
Returns:
str: String.
"""
if (encode_str is None or encode_str == "" or encode_str == 'Null' or
encode_str == 'null'):
encode_str = ""
elif ((py_version == 2 and isinstance(encode_str, str)) or (
py_version == 3 and isinstance(encode_str, str))):
pass
else:
code = get_encode(encode_str)
encode_str = encode_str.decode(code, 'ignore')
return encode_str
def unicode_to_str(str1, logger=None, str_encode='system', py_version=3):
"""Unicode encoded string converted to str."""
if str1 is None or str1 == "" or str1 == 'Null' or str1 == 'null':
str1 = ""
elif ((py_version == 2 and isinstance(str1, str)) or (
py_version == 3 and isinstance(str1, str))):
try:
if str_encode.lower() == 'system':
str1 = str1.encode(sys.getfilesystemencoding(), 'ignore')
elif str_encode.lower() == 'utf-8':
str1 = str1.encode('utf-8', 'ignore')
elif str_encode.lower() == 'gbk':
str1 = str1.encode('gbk', 'ignore')
else:
str1 = str1.encode(str_encode, 'ignore')
except UnicodeDecodeError as err_message:
if logger:
logger.info('[err]unicode_to_str:encode %s to %s failed',
str1, str_encode)
logger.info(str(err_message))
elif ((py_version == 2 and isinstance(str1, str)) or (
py_version == 3 and isinstance(str1, bytes))):
pass
else:
if logger:
logger.info('%s is not unicode ', str1)
return str(str1)
def bytes_to_str(str1, logger=None, str_decode='default', py_version=3):
"""Bytes encoded string converted to str."""
if not ((py_version == 2 and isinstance(str1, str)) or
(py_version == 3 and isinstance(str1, str))):
try:
if str_decode != 'default':
str1 = str1.decode(str_decode.lower(), 'ignore')
else:
try:
str1 = str1.decode('utf-8', 'ignore')
except UnicodeDecodeError:
try:
str1 = str1.decode('gbk', 'ignore')
except UnicodeDecodeError:
str1 = str1.decode(sys.getfilesystemencoding(),
'ignore')
except UnicodeDecodeError as err_message:
if logger:
logger.info('[err]bytes_to_str:decode %s to str failed', str1)
logger.info(str(err_message))
return str1
def to_gbk(encode_str, py_version):
"""Convert string to gbk code."""
if ((py_version == 2 and isinstance(encode_str, str
)) or (py_version == 3 and
isinstance(encode_str, str))):
pass
else:
code = get_encode(encode_str)
encode_str = encode_str.decode(code).encode('GBK', 'ignore')
return encode_str
def convert_path(path):
"""Convert to the path the server will accept.
Args:
path (str): Local file path.
e.g.:
"D:/work/render/19183793/max/d/Work/c05/112132P-embery.jpg"
Returns:
str: Path to the server.
e.g.:
"/D/work/render/19183793/max/d/Work/c05/112132P-embery.jpg"
"""
lower_path = path.replace('\\', '/')
if lower_path[1] == ":":
path_lower = lower_path.replace(":", "")
path_server = "/" + path_lower
else:
path_server = lower_path[1:]
return path_server
| 32.93617
| 78
| 0.540698
|
f59f138c01128b792f3916ba9454dc1e6190b1f3
| 3,195
|
py
|
Python
|
src/sample.py
|
RatJuggler/gpt-2
|
bcbf47528c846c70305649c07701ac7228aad39e
|
[
"MIT"
] | null | null | null |
src/sample.py
|
RatJuggler/gpt-2
|
bcbf47528c846c70305649c07701ac7228aad39e
|
[
"MIT"
] | null | null | null |
src/sample.py
|
RatJuggler/gpt-2
|
bcbf47528c846c70305649c07701ac7228aad39e
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import model
def top_k_logits(logits, k):
if k == 0:
# no truncation
return logits
def _top_k():
values, _ = tf.nn.top_k(logits, k=k)
min_values = values[:, -1, tf.newaxis]
return tf.where(
logits < min_values,
tf.ones_like(logits, dtype=logits.dtype) * -1e10,
logits,
)
return tf.cond(
tf.equal(k, 0),
lambda: logits,
lambda: _top_k(),
)
def top_p_logits(logits, p):
"""Nucleus sampling"""
batch, _ = logits.shape.as_list()
sorted_logits = tf.contrib.framework.sort(logits, direction='DESCENDING', axis=-1)
cumulative_probs = tf.cumsum(tf.nn.softmax(sorted_logits, axis=-1), axis=-1)
indices = tf.stack([
tf.range(0, batch),
# number of indices to include
tf.maximum(tf.reduce_sum(tf.cast(cumulative_probs <= p, tf.int32), axis=-1) - 1, 0),
], axis=-1)
min_values = tf.gather_nd(sorted_logits, indices)
return tf.where(
logits < min_values,
tf.ones_like(logits) * -1e10,
logits,
)
def sample_sequence(*, hparams, length, start_token=None, batch_size=None, context=None, temperature=1, top_k=0, top_p=1):
if start_token is None:
assert context is not None, 'Specify exactly one of start_token and context!'
else:
assert context is None, 'Specify exactly one of start_token and context!'
context = tf.fill([batch_size, 1], start_token)
def step(hparams, tokens, past=None):
lm_output = model.model(hparams=hparams, X=tokens, past=past, reuse=tf.compat.v1.AUTO_REUSE)
logits = lm_output['logits'][:, :, :hparams.n_vocab]
presents = lm_output['present']
presents.set_shape(model.past_shape(hparams=hparams, batch_size=batch_size))
return {
'logits': logits,
'presents': presents,
}
with tf.name_scope('sample_sequence'):
def body(past, prev, output):
next_outputs = step(hparams, prev, past=past)
logits = next_outputs['logits'][:, -1, :] / tf.to_float(temperature)
logits = top_k_logits(logits, k=top_k)
logits = top_p_logits(logits, p=top_p)
samples = tf.multinomial(logits, num_samples=1, output_dtype=tf.int32)
return [
next_outputs['presents'] if past is None else tf.concat([past, next_outputs['presents']], axis=-2),
samples,
tf.concat([output, samples], axis=1)
]
past, prev, output = body(None, context, context)
def cond(*args):
return True
_, _, tokens = tf.while_loop(
cond=cond, body=body,
maximum_iterations=length - 1,
loop_vars=[
past,
prev,
output
],
shape_invariants=[
tf.TensorShape(model.past_shape(hparams=hparams, batch_size=batch_size)),
tf.TensorShape([batch_size, None]),
tf.TensorShape([batch_size, None]),
],
back_prop=False,
)
return tokens
| 32.938144
| 122
| 0.578091
|
6a953193f3ca4bd4f24e4ef1bcdf473e173630a7
| 3,417
|
py
|
Python
|
covid19/scrape.py
|
jimmytran16/covid-19-us-data
|
cbeaa303d88a895e5dc123623b6fd31ec534b812
|
[
"MIT"
] | null | null | null |
covid19/scrape.py
|
jimmytran16/covid-19-us-data
|
cbeaa303d88a895e5dc123623b6fd31ec534b812
|
[
"MIT"
] | null | null | null |
covid19/scrape.py
|
jimmytran16/covid-19-us-data
|
cbeaa303d88a895e5dc123623b6fd31ec534b812
|
[
"MIT"
] | null | null | null |
from bs4 import BeautifulSoup
import requests
from pprint import pprint
class COVID:
def __init__(self):
pass
#UPDATED ON 8/7/2020
def get_covid_data(self):
"""
GIVES YOU DATA ON YESTERDAYS COVID STATUS
This script will scrap data off the worldometers website to attain each states covid status
It targets the table that is on the website that renders the data of all 50 states
"""
url = 'https://www.worldometers.info/coronavirus/country/us/'
data = requests.get(url) #get the data object from webpage of url
content = data.content #get the content (src code) of the webpage -- This content is in byte format
soup = BeautifulSoup(content,features="html.parser") #call an instance of bsoup, passing in the content
all_states = soup.find_all('table',id="usa_table_countries_yesterday") #look for the element table with the specfic class name
content = bytes(str(all_states[0]).replace('\n',''),'utf8') #convert the string into byte representation, #strip all of the new lines in the string
soup = BeautifulSoup(content,features="html.parser") #pass the byte CONTENT to get the BeautifulSoup instance
list_of_elements_from_html = soup.find_all('td') #find all of the <td> elements within the table
fixed_list = self.__replace_all_special_characters_with_zero(list_of_elements_from_html=list_of_elements_from_html)
return self.__aggregrate_html_table_data_into_dict(fixed_list=fixed_list)
def __replace_all_special_characters_with_zero(self, list_of_elements_from_html):
fixed_list = []
for i in list_of_elements_from_html[:len(list_of_elements_from_html)-96]: #iterate through the list add it to a new list .. replacing all the empty spots with 0
if '[' not in i.text and i.text.strip() != '':
fixed_list.append(i.text)
else: #replace anything that has an empty space with '0'
fixed_list.append('0')
return fixed_list
def __aggregrate_html_table_data_into_dict(self, fixed_list):
"""
This function will ingest the data that is coming from the HTML and parse it into a dictionary
Parameters:
(fixed_list): this list contains all of the data for all of the states and its cases, new cases, death's, and new deaths
"""
state_stats = [] #set a empty list to populate the state's current cases, new cases, death's and new deaths
state_object = {} #dict to keep the state:[{state: [stats....]}]
counter = 0
current_state = '' #keep track of the current state that is being proccessed
for state in fixed_list:
if counter == 1:
current_state = state.strip()
# append all the data from the table into to list
elif counter in [2,3,4,5,6,7,8,9,10,11,12]:
state_stats.append(state)
elif counter == 13:
state_stats.append(state)
state_object[current_state] = state_stats
state_stats = []
counter = 0
continue
counter = counter + 1
return state_object #returns back a dictionary of the STATES:[DATA]
if __name__ == '__main__':
covid = COVID()
covid.get_covid_data()
# pprint(covid.get_covid_data())
| 44.376623
| 168
| 0.658472
|
96ba58339a5c289138db37ab2b7992cfa53837b4
| 5,894
|
py
|
Python
|
electrumsv/gui/qt/status_bar.py
|
CherryDT/electrumsv
|
6b778b1c363e22286c3e3ef1bc5a2fa56955ac48
|
[
"MIT"
] | null | null | null |
electrumsv/gui/qt/status_bar.py
|
CherryDT/electrumsv
|
6b778b1c363e22286c3e3ef1bc5a2fa56955ac48
|
[
"MIT"
] | null | null | null |
electrumsv/gui/qt/status_bar.py
|
CherryDT/electrumsv
|
6b778b1c363e22286c3e3ef1bc5a2fa56955ac48
|
[
"MIT"
] | null | null | null |
from typing import Optional, Tuple
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import (QGridLayout, QHBoxLayout, QLabel, QLineEdit, QSizePolicy, QStatusBar,
QToolButton, QWidget, QWidgetAction)
from electrumsv.i18n import _
from .util import icon_path
class BalancePopup(QWidget):
def __init__(self, main_window: 'ElectrumWindow', status_bar: 'StatusBar',
parent: QWidget) -> None:
super().__init__(parent)
grid_layout = QGridLayout()
grid_layout.addWidget(QLabel(_('Confirmed')), 0, 0, 1, 1)
grid_layout.addWidget(QLabel(_('Unconfirmed')), 1, 0, 1, 1)
grid_layout.addWidget(QLabel(_('Unmatured')), 2, 0, 1, 1)
balances = main_window.wallet.get_balance()
for i, balance in enumerate(balances):
bsv_status, fiat_status = main_window.get_amount_and_units(balance)
grid_layout.addWidget(QLabel(bsv_status), i, 1, 1, 1, Qt.AlignRight)
if status_bar._fiat_widget.isVisible():
grid_layout.addWidget(QLabel(fiat_status), i, 2, 1, 1, Qt.AlignRight)
self.setLayout(grid_layout)
class BalancePopupAction(QWidgetAction):
def __init__(self, main_window: 'ElectrumWindow', status_bar: 'StatusBar',
parent: Optional[QWidget]=None) -> None:
super().__init__(parent)
self._status_bar = status_bar
self._main_window = main_window
def createWidget(self, parent: QWidget) -> QWidget:
return BalancePopup(self._main_window, self._status_bar, parent)
class StatusBar(QStatusBar):
_balance_bsv_label: QLabel = None
_balance_equals_label: QLabel = None
_balance_fiat_label: QLabel = None
_balance_widget: QToolButton = None
_fiat_bsv_label: QLabel = None
_fiat_value_label: QLabel = None
_fiat_widget: QWidget = None
_network_label: QLabel = None
def __init__(self, main_window: 'ElectrumWindow') -> None:
super().__init__(None)
balance_widget = QToolButton()
balance_widget.setAutoRaise(True)
balance_widget.setPopupMode(QToolButton.MenuButtonPopup)
balance_icon_label = QLabel("")
balance_icon_label.setPixmap(QPixmap(icon_path("sb_balance.png")))
hbox = QHBoxLayout()
hbox.setSpacing(2)
hbox.setSizeConstraint(hbox.SetFixedSize)
hbox.addWidget(balance_icon_label)
self._balance_bsv_label = QLabel("")
hbox.addWidget(self._balance_bsv_label)
self._balance_equals_label = QLabel("")
self._balance_equals_label.setPixmap(QPixmap(icon_path("sb_approximate")))
hbox.addWidget(self._balance_equals_label)
self._balance_fiat_label = QLabel("")
hbox.addWidget(self._balance_fiat_label)
# This is to pad out the text on the RHS so that the menu indicator does not overlay it.
hbox.addWidget(QLabel(" "))
balance_widget.setLayout(hbox)
balance_widget.addAction(BalancePopupAction(main_window, self, balance_widget))
self._balance_widget = balance_widget
self.addPermanentWidget(balance_widget)
self._fiat_widget = QWidget()
self._fiat_widget.setVisible(False)
estimate_icon_label = QLabel("")
estimate_icon_label.setPixmap(QPixmap(icon_path("sb_fiat.png")))
hbox = QHBoxLayout()
hbox.setSpacing(2)
hbox.setSizeConstraint(hbox.SetFixedSize)
hbox.addWidget(estimate_icon_label)
self._fiat_bsv_label = QLabel("")
hbox.addWidget(self._fiat_bsv_label)
approximate_icon_label = QLabel("")
approximate_icon_label.setPixmap(QPixmap(icon_path("sb_approximate")))
hbox.addWidget(approximate_icon_label)
self._fiat_value_label = QLabel("")
fm = self._fiat_bsv_label.fontMetrics()
width = fm.width("1,000.00 CUR")
self._fiat_value_label.setMinimumWidth(width)
hbox.addWidget(self._fiat_value_label)
self._fiat_widget.setLayout(hbox)
self.addPermanentWidget(self._fiat_widget)
network_widget = QWidget()
network_icon_label = QLabel("")
network_icon_label.setPixmap(QPixmap(icon_path("sb_network.png")))
hbox = QHBoxLayout()
hbox.setSpacing(2)
hbox.addWidget(network_icon_label)
self._network_label = QLabel("")
sp = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sp.setHorizontalStretch(1)
self._network_label.setSizePolicy(sp)
hbox.addWidget(self._network_label)
network_widget.setLayout(hbox)
network_widget.setMinimumWidth(150)
self.addPermanentWidget(network_widget)
self.search_box = QLineEdit()
# self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
self.addPermanentWidget(self.search_box)
def set_balance_status(self, bsv_text: str, fiat_text: Optional[str]) -> None:
have_fiat_text = bool(fiat_text)
self._balance_bsv_label.setText(bsv_text)
self._balance_equals_label.setVisible(have_fiat_text)
self._balance_fiat_label.setVisible(have_fiat_text)
self._balance_fiat_label.setText(fiat_text if have_fiat_text else '')
def set_fiat_status(self, status: Optional[Tuple[str, str]]) -> None:
# None: Fiat is disabled.
# (None, None): Fiat is enabled, but no rate information yet.
if status is None or status[0] is None and status[1] is None:
self._fiat_widget.setVisible(False)
else:
self._fiat_widget.setVisible(True)
# The first call before we fetch our first rate, will show empty space for status text.
self._fiat_bsv_label.setText(status[0])
self._fiat_value_label.setText(status[1])
def set_network_status(self, text: str) -> None:
self._network_label.setText(text)
| 40.369863
| 99
| 0.688836
|
609bc596781c02a702b30be33b012b8075d29112
| 5,358
|
py
|
Python
|
docs/conf.py
|
tongwu2020/adversarial-robustness-toolbox
|
d9958493a96244476867bcf5970e7877f1b05dc8
|
[
"MIT"
] | 1
|
2019-04-14T23:18:02.000Z
|
2019-04-14T23:18:02.000Z
|
docs/conf.py
|
tongwu2020/adversarial-robustness-toolbox
|
d9958493a96244476867bcf5970e7877f1b05dc8
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
tongwu2020/adversarial-robustness-toolbox
|
d9958493a96244476867bcf5970e7877f1b05dc8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import art
# -- Project information -----------------------------------------------------
project = 'Adversarial Robustness Toolbox'
copyright = '2018, IBM Corporation'
author = 'Maria-Irina Nicolae'
# The short X.Y version
version = '0.7'
# The full version, including alpha/beta/rc tags
release = '0.7.0'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.viewcode',
'sphinx.ext.autodoc'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
if os.environ.get('READTHEDOCS') != 'True':
try:
import sphinx_rtd_theme
except ImportError:
pass # assume we have sphinx >= 1.3
else:
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'adversarial-robustness-toolboxdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'adversarial-robustness-toolbox.tex', 'adversarial-robustness-toolbox Documentation',
'Maria-Irina Nicolae', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'adversarial-robustness-toolbox', 'adversarial-robustness-toolbox Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'adversarial-robustness-toolbox', 'adversarial-robustness-toolbox Documentation',
author, 'adversarial-robustness-toolbox', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| 31.704142
| 102
| 0.659201
|
1d721a3d6e4939477a63a5ddfd2d0e2c5424ee56
| 121
|
py
|
Python
|
comments/admin.py
|
orionblastar/K666-OB
|
2ee3ffa01d4dea799ac6946881dcf5f76d787168
|
[
"MIT"
] | 8
|
2016-05-21T20:48:02.000Z
|
2018-02-19T22:57:15.000Z
|
comments/admin.py
|
orionblastar/K666
|
2ee3ffa01d4dea799ac6946881dcf5f76d787168
|
[
"MIT"
] | 35
|
2016-05-23T06:34:43.000Z
|
2020-07-10T05:56:44.000Z
|
comments/admin.py
|
orionblastar/K666
|
2ee3ffa01d4dea799ac6946881dcf5f76d787168
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Comment
admin.site.register(Comment)
# Register your models here.
| 17.285714
| 32
| 0.801653
|
12be9e70d7d79e456d97ae16c644897284c15215
| 1,522
|
py
|
Python
|
src/gui_tool/VideoCaptureManager.py
|
AutoDash/AutoDash
|
3924795a04159f80ea3b65b2172747babd15f35f
|
[
"Apache-2.0"
] | 3
|
2020-02-12T01:24:46.000Z
|
2020-02-13T00:50:46.000Z
|
src/gui_tool/VideoCaptureManager.py
|
AutoDash/AutoDash
|
3924795a04159f80ea3b65b2172747babd15f35f
|
[
"Apache-2.0"
] | 32
|
2020-02-20T10:20:56.000Z
|
2022-02-10T01:42:46.000Z
|
src/gui_tool/VideoCaptureManager.py
|
AutoDash/AutoDash
|
3924795a04159f80ea3b65b2172747babd15f35f
|
[
"Apache-2.0"
] | 1
|
2020-02-22T02:47:19.000Z
|
2020-02-22T02:47:19.000Z
|
import numpy as np
from ..data.VideoFile import VideoFile
"""
Video capture manager, specific for the GUI tool
Note:
This class is 0 indexed
"""
class VideoCaptureManager(object):
def __init__(self, file_loc: str, start_index=None, end_index=None):
self.file_loc = file_loc
self.vfm = VideoFile(file_loc, start_index, end_index)
self.paused = False
def start_from(self, location: int = 0):
self.vfm.set_index(location)
def shift_frame_index(self, shift: int):
self.start_from(
max(
min(
self.get_frame_index() + shift,
self.get_frames_count()-1
),
0
)
)
def release(self):
self.vfm.release()
def next(self) -> np.ndarray:
if not self.paused:
frame = self.vfm.next()
else:
frame = self.vfm.current()
return frame
def get_frames_count(self) -> int:
return self.vfm.get_frame_count()
def get_frame_index(self) -> int:
return self.vfm.get_index()
def get_paused(self) -> bool:
return self.paused
def set_paused(self, paused: bool):
self.paused = paused
def is_open(self) -> bool:
return self.vfm.is_open() or \
(self.get_frame_index() == self.get_frames_count()-1 and self.get_paused())
def get_height(self):
return self.vfm.get_height()
def get_width(self):
return self.vfm.get_width()
| 29.843137
| 87
| 0.586728
|
bc77ca271b4390e4bc197a373acd3d92cfa54afb
| 4,200
|
py
|
Python
|
tools/mm2ports.py
|
anima-libera/ports
|
782ea3291eb3f66a043a3bc8e3cf2d35ccd62815
|
[
"Unlicense"
] | 4
|
2020-05-14T22:27:08.000Z
|
2021-07-23T18:56:04.000Z
|
tools/mm2ports.py
|
anima-libera/ports
|
782ea3291eb3f66a043a3bc8e3cf2d35ccd62815
|
[
"Unlicense"
] | null | null | null |
tools/mm2ports.py
|
anima-libera/ports
|
782ea3291eb3f66a043a3bc8e3cf2d35ccd62815
|
[
"Unlicense"
] | 1
|
2021-05-14T09:33:07.000Z
|
2021-05-14T09:33:07.000Z
|
""" Minsky Machine to Ports translator
This is a proof that Ports is Turing-complete !
"""
from ports import run_source
from generate_text import generate
REGISTER = """ ii{R}|o
{{
m* o:pl|p{R} o:ml|m{R} o:0l|0{R} pl-lpl ml-lml vlp-frec vlm-0l e-1.1*. # init
fp* lpl* vlp* lp* fp3-2.2*.
fm* lml* vlm* fm1-3.3*.
fb* vlp vlm-m{R} fp2-12.12*.
frec* ii{R}|o{{}} ii{R}-4.4* lp-p{R} fr-0{R} fb-5.5*.
fr* vlp-fb vlm-0l fm2-6.6*.
fp2* fp-8.8*. fp3* fp-9.9*. # to fp
fm1* fm-10.10*. fm2* fm-11.11*. # to fm
e* # end
}}
"""
# If you want to understand how it works, get some paper and make some drawings to disassemble that shit
# Missign numbers are .. gone forever
def run_mm(mm):
""" TODO """
if "l" in mm["registers"]:
raise Exception("no register named \"l\" u lil shit")
registers = {}
for register in mm["registers"]:
registers[register] = 0
code = {}
for instr in mm["code"]:
code[instr[0]] = instr
ip = mm["code"][0][0]
while True:
if code[ip][1] == "+":
registers[code[ip][2]] += 1
ip = code[ip][3]
elif code[ip][1] == "-":
if registers[code[ip][2]] == 0:
ip = code[ip][4]
else:
registers[code[ip][2]] -= 1
ip = code[ip][3]
elif code[ip][1] == "p":
print(code[ip][2], end = "", flush = True)
ip = code[ip][3]
elif code[ip][1] == "h":
break
elif code[ip][1] == ".":
ip = code[ip][3]
else:
raise Exception("whats dat shit \"{}\"".format(code[ip][1]))
def mm2ports(mm):
""" TODO
NOTE don't name the register "l" or it will not work ! """
p = []
i = 1
p.append("m*")
for register in mm["registers"]:
p.append(REGISTER.format(R = register))
p.append("ii{R}-x{i}.x{i}*\n".format(R = register, i = i))
i += 1
for instr in mm["code"]:
p.append("{l}.{l}* ".format(l = instr[0])) # label
if instr[1] == "+": # increment
p.append("p{R}-x{i}.x{i}*".format(R = instr[2], i = i))
i += 1
elif instr[1] == "-": # decrement
p.append("0{R}-{l}.m{R}-x{i}.x{i}*".format(R = instr[2], i = i, l = instr[4]))
i += 1
elif instr[1] == "p": # print
code, i = generate(instr[2], start = i, pref = "x", readability = 0, new_i = True)
p.append(code)
p.append("of-x{i}.x{i}*".format(i = i))
i += 1
elif instr[1] == "h": # halt
p.append("o-x{i}.x{i}*".format(i = i))
i += 1
elif instr[1] == ".": # nop (goto)
pass
else:
raise Exception("whats dat shit \"{}\"".format(instr[1]))
p.append("{l}-x{i}.x{i}*\n".format(i = i, l = instr[3])) # goto
i += 1
p.append("0-o.0*")
return " ".join(p)
if __name__ == "__main__":
def letsgo(mm):
mm_ports = mm2ports(mm)
print("\x1b[36m" + mm_ports + "\x1b[39m")
print()
print("\x1b[7mport version\x1b[27m")
run_source(mm_ports)
print()
print("\x1b[7mpython 3 version\x1b[27m")
run_mm(mm)
print()
mm_tuto = {
"registers": ["a", "b"], # don't declare a register named "l" !
"code":
[
[10, "+", "a", 20], # label 10, increment "a" and goto 20
[20, "-", "a", 30, 40], # label 20, decrement "b" and goto 30, but if "b" was 0 then goto 40 instead
[30, "p", "hey\n", 40], # label 30, print "hey\n" and goto 40
[40, ".", 0, 50], # label 40, goto 50
[50, "h", 0, 0] # label 50, halt
]
}
mm_mult = {
"registers": ["a", "b", "c", "e"],
"code":
[
# a = 5; b = 3; c = 0; e = 0;
[10, "+", "a", 20],
[20, "+", "a", 30],
[30, "+", "a", 40],
[40, "+", "a", 60],
[60, "+", "a", 200],
[200, "+", "b", 210],
[210, "+", "b", 220],
[220, "+", "b", 300],
# while (a > 0) { a--;
[300, "-", "a", 400, 800],
# c += b; e += b; b = 0;
[400, "-", "b", 410, 600],
[410, "+", "c", 420],
[420, "+", "e", 400],
# b += e; e = 0;
[600, "-", "e", 610, 700],
[610, "+", "b", 600],
# }
[700, ".", 0, 300],
# while (c > 0) { c--;
[800, "-", "c", 1000, 1200],
# printf("o"); fflush(stdout); }
[1000, "p", "o", 800],
# printf("\n"); fflush(stdout); return;
[1200, "p", "\n", 1210],
[1210, "h", 0, 0],
# it should have printed a*b times the "o"
]
}
letsgo(mm_mult)
# TODO LIST
# TODO: make this script beautiful
# TODO: add a parser for https://esolangs.org/wiki/Portable_Minsky_Machine_Notation
| 25
| 107
| 0.512857
|
490a93ad4036516644cfd9feaced6f0d89ce3373
| 23,719
|
py
|
Python
|
tests/test_likelihoods.py
|
lexxx233/GPflow
|
7285baa5201de7e2e71dd5f903b228132a310edc
|
[
"Apache-2.0"
] | 1
|
2018-08-22T06:34:59.000Z
|
2018-08-22T06:34:59.000Z
|
tests/test_likelihoods.py
|
lexxx233/GPflow
|
7285baa5201de7e2e71dd5f903b228132a310edc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_likelihoods.py
|
lexxx233/GPflow
|
7285baa5201de7e2e71dd5f903b228132a310edc
|
[
"Apache-2.0"
] | 2
|
2019-03-09T11:46:11.000Z
|
2021-12-20T10:22:34.000Z
|
# Copyright 2017 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import six
import tensorflow as tf
from numpy.testing import assert_allclose
import gpflow
from gpflow import settings
from gpflow.test_util import GPflowTestCase
class LikelihoodSetup(object):
def __init__(self, likelihood, Y, tolerance):
self.likelihood, self.Y, self.tolerance = likelihood, Y, tolerance
self.is_analytic = six.get_unbound_function(likelihood.predict_density) is not \
six.get_unbound_function(gpflow.likelihoods.Likelihood.predict_density)
def getLikelihoodSetups(includeMultiClass=True, addNonStandardLinks=False):
test_setups = []
rng = np.random.RandomState(1)
for likelihoodClass in gpflow.likelihoods.Likelihood.__subclasses__():
if likelihoodClass == gpflow.likelihoods.MonteCarloLikelihood:
continue # abstract base class
if likelihoodClass == gpflow.likelihoods.Ordinal:
test_setups.append(
LikelihoodSetup(likelihoodClass(np.array([-1, 1])),
rng.randint(0, 3, (10, 2)), 1e-6))
elif likelihoodClass == gpflow.likelihoods.SwitchedLikelihood:
continue # switched likelihood tested separately
elif likelihoodClass == gpflow.likelihoods.MultiClass:
if includeMultiClass:
sample = rng.randn(10, 2)
# Multiclass needs a less tight tolerance due to presence of clipping.
tolerance = 1e-3
test_setups.append(
LikelihoodSetup(likelihoodClass(2),
np.argmax(sample, 1).reshape(-1, 1), tolerance))
else:
# most likelihoods follow this standard:
test_setups.append(
LikelihoodSetup(likelihoodClass(),
rng.rand(10, 2).astype(settings.float_type), 1e-6))
if addNonStandardLinks:
test_setups.append(LikelihoodSetup(gpflow.likelihoods.Poisson(invlink=tf.square),
rng.rand(10, 2).astype(settings.float_type), 1e-6))
test_setups.append(LikelihoodSetup(gpflow.likelihoods.Exponential(invlink=tf.square),
rng.rand(10, 2).astype(settings.float_type), 1e-6))
test_setups.append(LikelihoodSetup(gpflow.likelihoods.Gamma(invlink=tf.square),
rng.rand(10, 2).astype(settings.float_type), 1e-6))
def sigmoid(x):
return 1. / (1 + tf.exp(-x))
test_setups.append(LikelihoodSetup(gpflow.likelihoods.Bernoulli(invlink=sigmoid),
rng.rand(10, 2).astype(settings.float_type), 1e-6))
return test_setups
class TestPredictConditional(GPflowTestCase):
"""
Here we make sure that the conditional_mean and contitional_var functions
give the same result as the predict_mean_and_var function if the prediction
has no uncertainty.
"""
def setUp(self):
self.test_graph = tf.Graph()
def prepare(self):
test_setups = getLikelihoodSetups(addNonStandardLinks=True)
rng = np.random.RandomState(0)
F = tf.placeholder(settings.float_type)
F_data = rng.randn(10, 2).astype(settings.float_type)
feed = {F: F_data}
return test_setups, F, feed
def test_mean(self):
with self.test_context() as session:
test_setups, F, feed = self.prepare()
for test_setup in test_setups:
l = test_setup.likelihood
l.compile()
mu1 = session.run(l.conditional_mean(F), feed_dict=feed)
zero = F * 0.
mu2, _ = session.run(l.predict_mean_and_var(F, zero), feed_dict=feed)
assert_allclose(mu1, mu2, test_setup.tolerance, test_setup.tolerance)
def test_variance(self):
with self.test_context() as session:
test_setups, F, feed = self.prepare()
for test_setup in test_setups:
l = test_setup.likelihood
l.compile()
zero = F * 0.
v1 = session.run(l.conditional_variance(F), feed_dict=feed)
v2 = session.run(l.predict_mean_and_var(F, zero)[1], feed_dict=feed)
assert_allclose(v1, v2, atol=test_setup.tolerance)
def test_var_exp(self):
"""
Here we make sure that the variational_expectations gives the same result
as logp if the latent function has no uncertainty.
"""
with self.test_context() as session:
test_setups, F, feed = self.prepare()
for test_setup in test_setups:
l = test_setup.likelihood
y = test_setup.Y
l.compile()
r1 = session.run(l.logp(F, y), feed_dict=feed)
zero = F * 0.
r2 = session.run(
l.variational_expectations(F, zero, test_setup.Y), feed_dict=feed)
assert_allclose(r1, r2, atol=test_setup.tolerance, rtol=test_setup.tolerance)
class TestQuadrature(GPflowTestCase):
"""
Where quadrature methods have been overwritten, make sure the new code
does something close to the quadrature
"""
def setUp(self):
self.test_graph = tf.Graph()
self.rng = np.random.RandomState()
self.Fmu, self.Fvar, self.Y = self.rng.randn(3, 10, 2).astype(settings.float_type)
self.Fvar = 0.01 * (self.Fvar ** 2)
with self.test_context():
self.test_setups = getLikelihoodSetups(includeMultiClass=False)
def test_var_exp(self):
for test_setup in self.test_setups:
with self.test_context() as session:
# get all the likelihoods where variational expectations has been overwritten
if not test_setup.is_analytic:
continue
l = test_setup.likelihood
y = test_setup.Y
# 'build' the functions
l.compile()
F1 = l.variational_expectations(self.Fmu, self.Fvar, y)
F2 = gpflow.likelihoods.Likelihood.variational_expectations(
l, self.Fmu, self.Fvar, y)
# compile and run the functions:
F1 = session.run(F1)
F2 = session.run(F2)
assert_allclose(F1, F2, test_setup.tolerance, test_setup.tolerance)
def test_pred_density(self):
# get all the likelihoods where predict_density has been overwritten.
for test_setup in self.test_setups:
with self.test_context() as session:
if not test_setup.is_analytic:
continue
l = test_setup.likelihood
y = test_setup.Y
l.compile()
# 'build' the functions
F1 = l.predict_density(self.Fmu, self.Fvar, y)
F2 = gpflow.likelihoods.Likelihood.predict_density(l, self.Fmu, self.Fvar, y)
# compile and run the functions:
F1 = session.run(F1)
F2 = session.run(F2)
assert_allclose(F1, F2, test_setup.tolerance, test_setup.tolerance)
def test_pred_mean_and_var(self):
# get all the likelihoods where predict_density has been overwritten.
for test_setup in self.test_setups:
with self.test_context() as session:
if not test_setup.is_analytic:
continue
l = test_setup.likelihood
l.compile()
# 'build' the functions
F1 = l.predict_mean_and_var(self.Fmu, self.Fvar)
F2 = gpflow.likelihoods.Likelihood.predict_mean_and_var(l, self.Fmu, self.Fvar)
# compile and run the functions:
F1 = session.run(F1)
F2 = session.run(F2)
assert_allclose(F1, F2, test_setup.tolerance, test_setup.tolerance)
class TestMonteCarlo(GPflowTestCase):
def setUp(self):
self.test_graph = tf.Graph()
self.rng = np.random.RandomState()
self.rng.seed(1)
self.Fmu, self.Fvar, self.Y = self.rng.randn(3, 10, 1).astype(settings.float_type)
self.Fvar = 0.01 * (self.Fvar ** 2)
def test_var_exp(self):
with self.test_context() as session:
tf.set_random_seed(1)
l = gpflow.likelihoods.GaussianMC(0.3)
l.num_monte_carlo_points = 1000000
# 'build' the functions
l.compile()
F1 = l.variational_expectations(self.Fmu, self.Fvar, self.Y)
F2 = gpflow.likelihoods.Gaussian.variational_expectations(
l, self.Fmu, self.Fvar, self.Y)
# compile and run the functions:
F1 = session.run(F1)
F2 = session.run(F2)
assert_allclose(F1, F2, rtol=5e-4, atol=1e-4)
def test_pred_density(self):
with self.test_context() as session:
tf.set_random_seed(1)
l = gpflow.likelihoods.GaussianMC(0.3)
l.num_monte_carlo_points = 1000000
l.compile()
# 'build' the functions
F1 = l.predict_density(self.Fmu, self.Fvar, self.Y)
F2 = gpflow.likelihoods.Gaussian.predict_density(l, self.Fmu, self.Fvar, self.Y)
# compile and run the functions:
F1 = session.run(F1)
F2 = session.run(F2)
assert_allclose(F1, F2, rtol=5e-4, atol=1e-4)
def test_pred_mean_and_var(self):
with self.test_context() as session:
tf.set_random_seed(1)
l = gpflow.likelihoods.GaussianMC(0.3)
l.num_monte_carlo_points = 1000000
l.compile()
# 'build' the functions
F1 = l.predict_mean_and_var(self.Fmu, self.Fvar)
F2 = gpflow.likelihoods.Gaussian.predict_mean_and_var(l, self.Fmu, self.Fvar)
# compile and run the functions:
F1m, F1v = session.run(F1)
F2m, F2v = session.run(F2)
assert_allclose(F1m, F2m, rtol=5e-4, atol=1e-4)
assert_allclose(F1v, F2v, rtol=5e-4, atol=1e-4)
class TestSoftMax(GPflowTestCase):
def setUp(self):
self.test_graph = tf.Graph()
self.rng = np.random.RandomState(1)
def prepare(self, dimF, dimY, num=10):
feed = {}
def make_tensor(data, dtype=settings.float_type):
tensor = tf.placeholder(dtype)
feed[tensor] = data.astype(dtype)
return tensor
dF = np.vstack((self.rng.randn(num - 3, dimF), np.array([[-3., 0.], [3, 0.], [0., 0.]]))) if dimF == 2 else \
self.rng.randn(num, dimF)
dY = np.vstack((self.rng.randn(num - 3, dimY), np.ones((3, dimY)))) > 0
F = make_tensor(dF)
Y = make_tensor(dY, settings.int_type) # 0 or 1
return F, Y, feed
def test_y_shape_assert(self):
"""
SoftMax assumes the class is given as a label (not, e.g., one-hot
encoded), and hence just uses the first column of Y. To prevent
silent errors, there is a tf assertion that ensures Y only has one
dimension. This test checks that this assert works as intended.
"""
with self.test_context() as sess:
F, Y, feed = self.prepare(dimF=5, dimY=2)
l = gpflow.likelihoods.SoftMax(5)
l.compile()
try:
sess.run(l.logp(F, Y), feed_dict=feed)
except tf.errors.InvalidArgumentError as e:
assert "assertion failed" in e.message
def test_bernoulli_equivalence(self):
with self.test_context() as sess:
F, Y, feed = self.prepare(dimF=2, dimY=1)
Fvar = tf.exp(tf.stack([F[:, 1], -10.0 + tf.zeros(tf.shape(F)[0], dtype=F.dtype)], axis=1))
F = tf.stack([F[:, 0], tf.zeros(tf.shape(F)[0], dtype=F.dtype)], axis=1)
Ylabel = 1 - Y # We need the 1 - Y, as we need to pass the *label* to SoftMax
def logistic_link(x):
return 1.0 / (1.0 + tf.exp(-x))
ls = gpflow.likelihoods.SoftMax(2)
ls.num_monte_carlo_points = 10000000
ls.compile()
lb = gpflow.likelihoods.Bernoulli(invlink=logistic_link)
lb.num_gauss_hermite_points = 50
lb.compile()
ls_cm = sess.run(ls.conditional_mean(F), feed_dict=feed)[:, :1]
lb_cm = sess.run(lb.conditional_mean(F[:, :1]), feed_dict=feed)
ls_cv = sess.run(ls.conditional_variance(F), feed_dict=feed)[:, :1]
lb_cv = sess.run(lb.conditional_variance(F[:, :1]), feed_dict=feed)
ls_lp = sess.run(ls.logp(F, Ylabel), feed_dict=feed)
lb_lp = sess.run(lb.logp(F[:, :1], Y), feed_dict=feed)
assert_allclose(ls_cm, lb_cm)
assert_allclose(ls_cv, lb_cv)
assert_allclose(ls_lp, lb_lp)
ls_pm, ls_pv = sess.run(ls.predict_mean_and_var(F, Fvar), feed_dict=feed)
lb_pm, lb_pv = sess.run(lb.predict_mean_and_var(F[:, :1], Fvar[:, :1]), feed_dict=feed)
assert_allclose(ls_pm[:, 0, None], lb_pm, rtol=1e-3)
assert_allclose(ls_pv[:, 0, None], lb_pv, rtol=1e-3)
ls_ve = sess.run(ls.variational_expectations(F, Fvar, Ylabel), feed_dict=feed)
lb_ve = sess.run(lb.variational_expectations(F[:, :1], Fvar[:, :1], Y), feed_dict=feed)
assert_allclose(ls_ve[:, 0, None], lb_ve, rtol=1e-3)
class TestRobustMaxMulticlass(GPflowTestCase):
"""
Some specialized tests to the multiclass likelihood with RobustMax inverse link function.
"""
def setUp(self):
self.test_graph = tf.Graph()
def testSymmetric(self):
"""
This test is based on the observation that for
symmetric inputs the class predictions must have equal probability.
"""
with self.test_context() as session:
nClasses = 5
nPoints = 10
tolerance = 1e-4
epsilon = 1e-3
F = tf.placeholder(settings.float_type)
F_data = np.ones((nPoints, nClasses))
feed = {F: F_data}
rng = np.random.RandomState(1)
Y = rng.randint(nClasses, size=(nPoints, 1))
l = gpflow.likelihoods.MultiClass(nClasses)
l.invlink.epsilon = epsilon
l.compile()
mu, _ = session.run(l.predict_mean_and_var(F, F), feed_dict=feed)
pred = session.run(l.predict_density(F, F, Y), feed_dict=feed)
variational_expectations = session.run(
l.variational_expectations(F, F, Y), feed_dict=feed)
expected_mu = (1. / nClasses * (1. - epsilon) + (1. - 1. / nClasses) * \
epsilon / (nClasses - 1)) * np.ones((nPoints, 1))
self.assertTrue(np.allclose(mu, expected_mu, tolerance,
tolerance)) # assert_allclose() would complain about shape mismatch
expected_log_denisty = np.log(expected_mu)
self.assertTrue(np.allclose(pred, expected_log_denisty, 1e-3, 1e-3))
validation_variational_expectation = 1. / nClasses * np.log(1. - epsilon) + \
(1. - 1. / nClasses) * np.log(epsilon / (nClasses - 1))
assert_allclose(
variational_expectations,
np.ones((nPoints, 1)) * validation_variational_expectation,
tolerance, tolerance)
def testPredictDensity(self):
tol = 1e-4
num_points = 100
mock_prob = 0.73
class MockRobustMax(gpflow.likelihoods.RobustMax):
def prob_is_largest(self, Y, Fmu, Fvar, gh_x, gh_w):
return tf.ones((num_points, 1), dtype=settings.float_type) * mock_prob
with self.test_context() as session:
epsilon = 0.231
num_classes = 5
l = gpflow.likelihoods.MultiClass(
num_classes, invlink=MockRobustMax(num_classes, epsilon))
l.compile()
F = tf.placeholder(settings.float_type)
y = tf.placeholder(settings.float_type)
F_data = np.ones((num_points, num_classes))
rng = np.random.RandomState(1)
Y_data = rng.randint(num_classes, size=(num_points, 1))
feed = {F: F_data, y: Y_data}
pred = session.run(l.predict_density(F, F, y), feed_dict=feed)
expected_prediction = -0.5499780059
# ^^^ evaluated on calculator:
# log((1-\epsilon) * 0.73 + (1-0.73) * \epsilon/(num_classes -1))
assert_allclose(pred, expected_prediction, tol, tol)
def testEpsK1Changes(self):
"""
Checks that eps K1 changes when epsilon changes. This used to not happen and had to be manually changed.
"""
with self.test_context() as session:
initial_eps = 1e-3
num_classes = 5
rm = gpflow.likelihoods.RobustMax(num_classes, initial_eps)
expected_eps_k1 = initial_eps / (num_classes - 1.)
actual_eps_k1 = session.run(rm._eps_K1)
self.assertAlmostEqual(expected_eps_k1, actual_eps_k1)
new_eps = 0.412
rm.epsilon.assign(new_eps, session=session)
expected_eps_k2 = new_eps / (num_classes - 1.)
actual_eps_k2 = session.run(rm._eps_K1)
self.assertAlmostEqual(expected_eps_k2, actual_eps_k2)
class TestMulticlassIndexFix(GPflowTestCase):
"""
A regression test for a bug in multiclass likelihood.
"""
def testA(self):
with self.test_context():
mu = tf.placeholder(settings.float_type)
var = tf.placeholder(settings.float_type)
Y = tf.placeholder(tf.int32)
lik = gpflow.likelihoods.MultiClass(3)
ve = lik.variational_expectations(mu, var, Y)
tf.gradients(tf.reduce_sum(ve), mu)
class TestSwitchedLikelihood(GPflowTestCase):
"""
SwitchedLikelihood is separately tested here.
Here, we make sure the partition-stitch works fine.
"""
def setUp(self):
self.test_graph = tf.Graph()
with self.test_context():
rng = np.random.RandomState(1)
self.Y_list = [rng.randn(3, 2), rng.randn(4, 2), rng.randn(5, 2)]
self.F_list = [rng.randn(3, 2), rng.randn(4, 2), rng.randn(5, 2)]
self.Fvar_list = [np.exp(rng.randn(3, 2)), np.exp(rng.randn(4, 2)),
np.exp(rng.randn(5, 2))]
self.Y_label = [np.ones((3, 1)) * 0, np.ones((4, 1)) * 1, np.ones((5, 1)) * 2]
self.Y_perm = list(range(3 + 4 + 5))
rng.shuffle(self.Y_perm)
# shuffle the original data
self.Y_sw = np.hstack([
np.concatenate(self.Y_list),
np.concatenate(self.Y_label)])[self.Y_perm, :]
self.F_sw = np.concatenate(self.F_list)[self.Y_perm, :]
self.Fvar_sw = np.concatenate(self.Fvar_list)[self.Y_perm, :]
# likelihoods
self.likelihoods = [gpflow.likelihoods.Gaussian(),
gpflow.likelihoods.Gaussian(),
gpflow.likelihoods.Gaussian()]
for lik in self.likelihoods:
lik.variance = np.exp(rng.randn(1)).squeeze()
self.switched_likelihood = gpflow.likelihoods.SwitchedLikelihood(self.likelihoods)
def test_logp(self):
# switchedlikelihood
with self.test_context() as session:
self.switched_likelihood.compile()
switched_rslt = session.run(self.switched_likelihood.logp(self.F_sw, self.Y_sw))
rslts = []
for lik, y, f in zip(self.likelihoods, self.Y_list, self.F_list):
rslts.append(session.run(lik.logp(f, y)))
assert_allclose(switched_rslt, np.concatenate(rslts)[self.Y_perm, :])
def test_predict_density(self):
with self.test_context() as session:
self.switched_likelihood.compile()
# switchedlikelihood
switched_rslt = session.run(
self.switched_likelihood.predict_density(self.F_sw, self.Fvar_sw, self.Y_sw))
# likelihood
rslts = []
for lik, y, f, fvar in zip(self.likelihoods,
self.Y_list,
self.F_list,
self.Fvar_list):
rslts.append(session.run(lik.predict_density(f, fvar, y)))
assert_allclose(switched_rslt, np.concatenate(rslts)[self.Y_perm, :])
def test_variational_expectations(self):
# switchedlikelihood
with self.test_context() as session:
self.switched_likelihood.compile()
switched_rslt = session.run(
self.switched_likelihood.variational_expectations(
self.F_sw, self.Fvar_sw, self.Y_sw))
rslts = []
for lik, y, f, fvar in zip(self.likelihoods,
self.Y_list,
self.F_list,
self.Fvar_list):
rslts.append(session.run(lik.variational_expectations(f, fvar, y)))
assert_allclose(switched_rslt, np.concatenate(rslts)[self.Y_perm, :])
class TestSwitchedLikelihoodRegression(GPflowTestCase):
"""
A Regression test when using Switched likelihood: the number of latent
functions in a GP model must be equal to the number of columns in Y minus
one. The final column of Y is used to index the switch. If the number of
latent functions does not match, an exception will be raised.
"""
def setUp(self):
self.test_graph = tf.Graph()
with self.test_context():
rng = np.random.RandomState(1)
self.X = rng.rand(100, 1)
self.Y = np.hstack((np.random.randn(100, 1), np.random.randint(0, 3, (100, 1))))
self.likelihoods = [gpflow.likelihoods.StudentT(),
gpflow.likelihoods.StudentT(),
gpflow.likelihoods.StudentT()]
self.switched_likelihood = gpflow.likelihoods.SwitchedLikelihood(self.likelihoods)
def test_correct_num_latent(self):
with self.test_context():
m = gpflow.models.VGP(self.X, self.Y, kern=gpflow.kernels.Matern12(1),
likelihood=self.switched_likelihood, num_latent=1)
m.compute_log_likelihood() # should compute something!
def test_bad_num_latent(self):
with self.test_context():
m = gpflow.models.VGP(self.X, self.Y, kern=gpflow.kernels.Matern12(1),
likelihood=self.switched_likelihood, num_latent=2)
with self.assertRaises(tf.errors.InvalidArgumentError):
m.compute_log_likelihood() # should die
if __name__ == "__main__":
tf.test.main()
| 43.282847
| 117
| 0.587967
|
88edc7019cc9ba092355d995e9170628996dc3d7
| 4,116
|
py
|
Python
|
bot/cogs/moderation_cog.py
|
HerbyZ/CubeGod
|
d2b94b5ad29521258b220e47116a778fa0070693
|
[
"MIT"
] | 1
|
2021-07-02T11:10:27.000Z
|
2021-07-02T11:10:27.000Z
|
bot/cogs/moderation_cog.py
|
HerbyZ/CubeGod
|
d2b94b5ad29521258b220e47116a778fa0070693
|
[
"MIT"
] | null | null | null |
bot/cogs/moderation_cog.py
|
HerbyZ/CubeGod
|
d2b94b5ad29521258b220e47116a778fa0070693
|
[
"MIT"
] | null | null | null |
from discord.ext import commands
import discord
from database.managers import UserManager
from ..webhooks.log_hook import log
class ModerationCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command('clear')
@commands.has_permissions(manage_messages=True)
async def clear(self, ctx, amount=100, channel: discord.TextChannel = None):
if channel is None:
channel = ctx.channel
await ctx.message.delete()
await channel.purge(limit=amount)
author = f'{ctx.message.author.name}#{ctx.message.author.discriminator}'
await log(f'Command !clear was used in channel {channel} by user {author} with amount {amount}')
@commands.command('kick')
@commands.has_permissions(kick_members=True)
async def kick(self, ctx, member: discord.Member, reason=None):
await ctx.message.delete()
await member.kick(reason=reason)
author = f'{ctx.message.author.name}#{ctx.message.author.discriminator}'
await log(f'Command !kick was used by {author} on user {member.nick}')
@commands.command('ban')
@commands.has_permissions(ban_members=True)
async def ban(self, ctx, member: discord.Member, reason=None):
author = ctx.message.author
author_name = f'{author.name}#{author.discriminator}'
member_name = f'{member.name}#{member.discriminator}'
await ctx.message.delete()
# Ban user in db
try:
user = UserManager.find_one(member.id)
except ValueError:
user = UserManager.create(member.id)
if not user.is_banned:
user.ban(reason)
else:
await author.send(f'User {member_name} is already banned (in database).')
# Ban user on server
await member.ban(reason=reason)
await log(f'Command !ban was used by {author_name} on user {member_name}')
@commands.command('unban')
@commands.has_permissions(ban_members=True)
async def unban(self, ctx, user_id):
author = ctx.message.author
try:
int(user_id)
except ValueError:
return await author.send('You should use user\'s id instead of his name')
try:
user = UserManager.find_one(user_id)
if not user.is_banned:
return await author.send(f'User is not banned.')
except ValueError:
await author.send(f'User not found in database, trying to unban on server...')
user = await self.bot.fetch_user(user_id)
await ctx.guild.unban(user)
await ctx.send('User is not banned')
@commands.command('getbans')
@commands.has_permissions(ban_members=True)
async def get_bans(self, ctx, user_id):
author = ctx.message.author
try:
int(user_id)
except ValueError:
return await author.send('You should use user\'s id instead of his name')
try:
user = UserManager.find_one(user_id)
except ValueError:
return await author.send(f'User with id {user_id} not found.')
bans = user.get_bans()
if len(bans) == 0:
return await author.send(f'User with id {user_id} has no bans.')
bans_str = ''
for ban in bans:
bans_str += f'**Date**: {ban.date}; **Is active**: {ban.is_active}; ' \
f'**Id**: {ban.id}; **Reason**: {ban.reason}.\n'
await author.send(f'Bans list for user with id {user_id}\n' + bans_str)
await log(f'User {author.name}#{author.discriminator} requested bans history for user with id {user_id}.')
@commands.command('say')
@commands.has_permissions(manage_messages=True)
async def say(self, ctx, channel: discord.TextChannel, *, text):
await ctx.message.delete()
await channel.send(text)
author = f'{ctx.message.author.name}#{ctx.message.author.discriminator}'
await log(f'Command !say was used in channel {channel} by user {author} with text:\n```{text}```')
def setup(bot):
bot.add_cog(ModerationCog(bot))
| 34.588235
| 114
| 0.629495
|
f1f94f610abfcdc3c570aed78cf4dcac844ec9c2
| 2,698
|
py
|
Python
|
toontown/coghq/DistributedLawbotBossGavelAI.py
|
CrankySupertoon01/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2021-02-13T22:40:50.000Z
|
2021-02-13T22:40:50.000Z
|
toontown/coghq/DistributedLawbotBossGavelAI.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2018-07-28T20:07:04.000Z
|
2018-07-30T18:28:34.000Z
|
toontown/coghq/DistributedLawbotBossGavelAI.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 2
|
2019-12-02T01:39:10.000Z
|
2021-02-13T22:41:00.000Z
|
from direct.directnotify import DirectNotifyGlobal
from panda3d.core import *
from direct.distributed.DistributedObjectAI import DistributedObjectAI
from toontown.toonbase import ToontownGlobals
from otp.otpbase import OTPGlobals
from direct.fsm import FSM
class DistributedLawbotBossGavelAI(DistributedObjectAI, FSM.FSM):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedLawbotBossGavelAI')
def __init__(self, air, boss, index):
DistributedObjectAI.__init__(self, air)
FSM.FSM.__init__(self, 'DistributedLawbotBossGavelAI')
self.boss = boss
self.index = index
cn = CollisionNode('controls')
cs = CollisionSphere(0, -6, 0, 6)
cn.addSolid(cs)
self.goonShield = NodePath(cn)
self.goonShield.setPosHpr(*ToontownGlobals.LawbotBossGavelPosHprs[self.index])
self.avId = 0
self.objectId = 0
def getBossCogId(self):
return self.boss.doId
def getIndex(self):
return self.index
def setState(self, state):
self.request(state)
def d_setState(self, state):
newState = state
if state == 'On':
newState = 'N'
elif state == 'Off':
newState = 'F'
self.sendUpdate('setState', [newState])
def b_setState(self, state):
self.request(state)
self.d_setState(state)
def turnOn(self):
self.b_setState('On')
def requestControl(self):
avId = self.air.getAvatarIdFromSender()
if avId in self.boss.involvedToons and self.avId == 0:
craneId = self.__getCraneId(avId)
if craneId == 0:
self.request('Controlled', avId)
def requestFree(self):
avId = self.air.getAvatarIdFromSender()
if avId == self.avId:
self.request('Free')
def removeToon(self, avId):
if avId == self.avId:
self.request('Free')
def __getCraneId(self, avId):
if self.boss and self.boss.cranes != None:
for crane in self.boss.cranes:
if crane.avId == avId:
return crane.doId
return 0
def enterOn(self):
pass
def exitOn(slef):
pass
def enterOff(self):
self.goonShield.detachNode()
def exitOff(self):
pass
def enterControlled(self, avId):
self.avId = avId
self.d_setState('C')
def exitControlled(self):
if self.objectId:
obj = self.air.doId2do[self.objectId]
obj.request('Dropped', self.avId, self.doId)
def enterFree(self):
self.avId = 0
self.d_setState('F')
def exitFree(self):
pass
| 27.252525
| 88
| 0.613788
|
0325a2716a72b06c470eef132129dc61366e4ff4
| 1,379
|
py
|
Python
|
core/dns_utils.py
|
exatel/ph0neutria
|
38ee193f994c8f6408238b13b7e6cd66c4f4dbad
|
[
"Apache-2.0"
] | 277
|
2017-02-02T14:26:20.000Z
|
2021-12-26T13:54:30.000Z
|
core/dns_utils.py
|
exatel/ph0neutria
|
38ee193f994c8f6408238b13b7e6cd66c4f4dbad
|
[
"Apache-2.0"
] | 27
|
2016-11-30T12:50:36.000Z
|
2020-04-12T06:47:09.000Z
|
core/dns_utils.py
|
exatel/ph0neutria
|
38ee193f994c8f6408238b13b7e6cd66c4f4dbad
|
[
"Apache-2.0"
] | 65
|
2016-11-30T12:43:34.000Z
|
2021-05-21T21:04:30.000Z
|
#!/usr/bin/python3
from .config_utils import get_base_config
from .log_utils import get_module_logger
import DNS
import os
import sys
import time
import validators
CDIR = os.path.dirname(os.path.realpath(__file__))
ROOTDIR = os.path.abspath(os.path.join(CDIR, os.pardir))
BASECONFIG = get_base_config(ROOTDIR)
LOGGING = get_module_logger(__name__)
DNS.defaults['server'] = ['8.8.8.8', '8.8.4.4']
DNS.defaults['timeout'] = 5
def forward_dns_lookup(host_name):
"""Perform a DNS lookup of a FQDN.
Params:
- host_name: (type: string) FQDN to perform lookup of.
Returns:
- result: (type: string) resulting IP address.
"""
try:
ip_list = DNS.dnslookup(host_name, 'A')
if len(ip_list) > 0:
for ip_addr in ip_list:
if validators.ipv4(ip_addr):
return ip_addr
except BaseException:
LOGGING.warning('DNS lookup of {0} failed.'.format(host_name))
return None
return None
def resolve_dns(host_name):
"""Perform a DNS lookup of a FQDN.
Params:
- host_name: (type: string) FQDN to perform lookup of.
Returns:
- result: (type: string) resulting IP address.
"""
if validators.ipv4(host_name):
return host_name
ip_addr = forward_dns_lookup(host_name)
if ip_addr is not None:
return ip_addr
return False
| 20.893939
| 70
| 0.655547
|
de38cc41b034e827bf4c113b3a89b43d5899df59
| 242
|
py
|
Python
|
giftrun-0.1/setup.py
|
olemb/giftrun
|
003248357e3b632afee61f64d4feede2db59665d
|
[
"MIT"
] | 4
|
2017-02-23T13:10:20.000Z
|
2019-05-27T18:25:13.000Z
|
giftrun-0.1/setup.py
|
olemb/giftrun
|
003248357e3b632afee61f64d4feede2db59665d
|
[
"MIT"
] | null | null | null |
giftrun-0.1/setup.py
|
olemb/giftrun
|
003248357e3b632afee61f64d4feede2db59665d
|
[
"MIT"
] | 1
|
2021-12-07T11:57:35.000Z
|
2021-12-07T11:57:35.000Z
|
import sys
import os
from distutils.core import setup
setup(name='giftrun',
url='http://www.cs.uit.no/~olemb/',
author='Ole Martin Bjorndalen',
author_email='olemb@stud.cs.uit.no',
version='0.1',
license='MIT')
| 22
| 42
| 0.640496
|
e3c5be8b04b0c052846f77f5bd718004c7db596a
| 3,563
|
py
|
Python
|
infer.py
|
ssktotoro/neuro
|
2d6d4cd060ee368d67c3437cac2b64eeb7d7180c
|
[
"Apache-2.0"
] | null | null | null |
infer.py
|
ssktotoro/neuro
|
2d6d4cd060ee368d67c3437cac2b64eeb7d7180c
|
[
"Apache-2.0"
] | 3
|
2020-11-09T06:07:13.000Z
|
2021-05-12T03:16:38.000Z
|
infer.py
|
ssktotoro/neuro
|
2d6d4cd060ee368d67c3437cac2b64eeb7d7180c
|
[
"Apache-2.0"
] | null | null | null |
import nibabel as nib
import numpy as np
from training.generator_coords import CoordsGenerator
import torch
class Predictor:
"""Only useful for serving... other methods would use a dataloader"""
def __init__(
self, model, volume_shape, subvolume_shape, n_subvolumes, n_classes
):
"""Docs."""
self.model = model
self.volume_shape = volume_shape
self.subvolume_shape = subvolume_shape
self.n_subvolumes = n_subvolumes
self.n_classes = n_classes
self.generator = CoordsGenerator(
self.volume_shape, self.subvolume_shape
)
if len(self.generator.get_coordinates(mode="test")) > n_subvolumes:
raise ValueError(
"n_subvolumes must be at least {coords_len}".format(
coords_len=len(self.generator.get_coordinates(mode="test"))
)
)
device_name = "cuda:0" if torch.cuda.is_available() else "cpu"
self.device = torch.device(device_name)
def generate_coords(self):
"""Generate coordinate for prediction"""
coords_list = []
for i in range(self.n_subvolumes):
coords = self.generator.get_coordinates(mode="test")
if i >= len(coords):
coords = self.generator.get_coordinates()
coords_list.append(coords)
else:
coords_list.append(np.expand_dims(coords[i], 0))
return coords_list
def preprocess_image(self, img):
"""Unit interval preprocessing"""
img = (img - img.min()) / (img.max() - img.min())
new_img = np.zeros(self.volume_shape)
new_img[: img.shape[0], : img.shape[1], : img.shape[2]] = img
return new_img
def predict(self, image_path):
"""Predict segmentation given an image_path"""
img = nib.load(image_path)
img = img.get_fdata()
normalized_img = self.preprocess_image(img)
coords_list = self.generate_coords()
one_hot_predicted_segmentation = torch.zeros(
tuple(np.insert(self.volume_shape, 0, self.n_classes)),
dtype=torch.uint8,
).to(self.device)
self.model.eval()
with torch.no_grad():
for coords in coords_list:
input_slice = np.expand_dims(
normalized_img[
coords[0][0][0] : coords[0][0][1],
coords[0][1][0] : coords[0][1][1],
coords[0][2][0] : coords[0][2][1],
],
0,
)
torch_slice = torch.from_numpy(
np.expand_dims(input_slice, 0).astype(np.float32)
).to(self.device)
_, predicted = torch.max(
torch.nn.functional.log_softmax(
self.model(torch_slice), dim=1
),
1,
)
for j in range(predicted.shape[0]):
c_j = coords[j]
for c in range(self.n_classes):
one_hot_predicted_segmentation[
c,
c_j[0, 0] : c_j[0, 1],
c_j[1, 0] : c_j[1, 1],
c_j[2, 0] : c_j[2, 1],
] += (predicted[j] == c)
predicted_segmentation = torch.max(
one_hot_predicted_segmentation, 0
)[1]
return predicted_segmentation
| 35.989899
| 79
| 0.521471
|
b8cef1da21f93fc9f82b34bf5cbdc39e707046fd
| 773
|
py
|
Python
|
almc/src/utils/scripts/wiki_scraper.py
|
almeswe/almc
|
0a9238b5844d2fd4c03e56453df7b59111a000b5
|
[
"MIT"
] | 1
|
2022-01-01T22:18:57.000Z
|
2022-01-01T22:18:57.000Z
|
almc/src/utils/scripts/wiki_scraper.py
|
almeswe/almc
|
0a9238b5844d2fd4c03e56453df7b59111a000b5
|
[
"MIT"
] | null | null | null |
almc/src/utils/scripts/wiki_scraper.py
|
almeswe/almc
|
0a9238b5844d2fd4c03e56453df7b59111a000b5
|
[
"MIT"
] | null | null | null |
import requests
from bs4 import BeautifulSoup
url = 'https://en.wikipedia.org/wiki/X86_instruction_listings'
response = requests.get(url, headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36'
})
if response.status_code != 200:
raise Exception('Code was not 200...')
soup = BeautifulSoup(response.text, 'html.parser')
table = soup.find('table', {'class': 'wikitable sortable jquery-tablesorter'})
data = []
for tr in soup.find_all('tr'):
span = tr.find('span', {'class': 'monospaced'})
if span != None:
data.append(span.text)
print(f'Find {len(data)} items')
with open('results.txt', 'w', encoding='utf-8') as file:
for line in data:
file.write(f'{line}\n')
print('Done')
| 30.92
| 131
| 0.69599
|
355ef0cb84e6ba6c8d47dacef569b86b81d0556a
| 132
|
py
|
Python
|
pyrevolve/tol/__init__.py
|
braj29/robo_swimmers
|
b3c3fa91976884095eb6b5e67844167598ec573d
|
[
"Apache-1.1"
] | 24
|
2017-06-19T20:25:15.000Z
|
2022-01-03T16:27:34.000Z
|
pyrevolve/tol/__init__.py
|
Ravi-Bossema/revolve
|
46ece6f7dc137afcc0ddcde163ffcfe9548bd1ad
|
[
"Apache-1.1"
] | 51
|
2016-04-19T16:01:34.000Z
|
2021-09-17T19:07:10.000Z
|
pyrevolve/tol/__init__.py
|
Ravi-Bossema/revolve
|
46ece6f7dc137afcc0ddcde163ffcfe9548bd1ad
|
[
"Apache-1.1"
] | 37
|
2017-02-21T23:20:44.000Z
|
2022-01-18T16:40:17.000Z
|
from __future__ import absolute_import
from .logger import logger
from .logger import output_console
from .logger import log_debug
| 22
| 38
| 0.848485
|
73b51f521a9559489c7c1be410a78847cabe4e91
| 6,788
|
py
|
Python
|
Proper/proper/prop_rectangle.py
|
RupertDodkins/medis
|
bdb1f00fb93506da2a1f251bc6780e70e97a16c5
|
[
"MIT"
] | 1
|
2021-06-25T17:35:56.000Z
|
2021-06-25T17:35:56.000Z
|
Proper/proper/prop_rectangle.py
|
RupertDodkins/medis
|
bdb1f00fb93506da2a1f251bc6780e70e97a16c5
|
[
"MIT"
] | null | null | null |
Proper/proper/prop_rectangle.py
|
RupertDodkins/medis
|
bdb1f00fb93506da2a1f251bc6780e70e97a16c5
|
[
"MIT"
] | 2
|
2018-12-08T15:05:13.000Z
|
2019-08-08T17:28:24.000Z
|
# Copyright 2016, 2017 California Institute of Technology
# Users must agree to abide by the restrictions listed in the
# file "LegalStuff.txt" in the PROPER library directory.
#
# PROPER developed at Jet Propulsion Laboratory/California Inst. Technology
# Original IDL version by John Krist
# Python translation by Navtej Saini, with Luis Marchen and Nikta Amiri
import proper
import numpy as np
def prop_rectangle(wf, xsize, ysize, xc = np.nan, yc = np.nan, **kwargs):
"""Return an image containing a filled rectangle with antialiased edges.
By default, a clear rectangle (one inside, zero outside) is assumed.
Typically used to draw apertures or obscurations (like spiders). The
rectangles are drawn antiliased, so that the edge values correspond to the
fraction of the pixel area covered by the rectangle.
Parameters
----------
wf : obj
WaveFront class object
xsize : float
X width of the rectangle in meters (unless norm is specified, in which
case this is the size relative to the beam diameter).
ysize : float
Y width of the rectangle in meters (unless norm is specified, in which
case this is the size relative to the beam diameter).
xc : float
X-center of the rectangle relative to the center of the beam in the
wavefront array in meters (unless norm is specified, in which case
this is the default center of the rectangle.
yc : float
Y-center of the rectangle relative to the center of the beam in the
wavefront array in meters (unless norm is specified, in which case
this is the default center of the rectangle. (0,0) is the center of
a pixel.
Returns
-------
image : numpy ndarray
Returns an image array containing an antialiased rectangular mask with
the same dimensions as the wavefront array.
Other Parameters
----------------
DARK : bool
Specifies that the rectangle is filled with zeros and is 1.0 outside,
rather than the default of zero outside and 1.0 inside.
NORM : bool
Specifies that the rectangle dimensions and position are specified
relative to the beam radius.
ROTATION : float
Specifies the angle degrees counter-clockwise to rotate the rectangle
about its center.
"""
# grid size, beam radius and sampling
ngrid = proper.prop_get_gridsize(wf)
beamradius = proper.prop_get_beamradius(wf)
sampling = proper.prop_get_sampling(wf)
mag = 11
# beam radius in pixels
pr = beamradius / sampling
# get keyword argument values
if ("DARK" in kwargs and kwargs["DARK"]):
dark = kwargs["DARK"]
else:
dark = False
if ("NORM" in kwargs and kwargs["NORM"]):
norm = kwargs["NORM"]
else:
norm = False
if "ROTATION" in kwargs:
rotation = kwargs["ROTATION"]
else:
rotation = 0.0
# Set xcpix and ycpix values
if np.isnan(xc):
xcpix = int(ngrid/2)
else:
if norm:
xcpix = xc * pr + int(ngrid/2)
else:
xcpix = xc/sampling + int(ngrid/2)
if np.isnan(yc):
ycpix = int(ngrid/2)
else:
if norm:
ycpix = yc * pr + int(ngrid/2)
else:
ycpix = yc/sampling + int(ngrid/2)
# Set xradpix and yradpix
if norm:
xradpix = 0.5 * xsize * pr
yradpix = 0.5 * ysize * pr
else:
xradpix = 0.5 * xsize / sampling
yradpix = 0.5 * ysize / sampling
# Rotation angle in radians
angle_rad = rotation * np.pi / 180.
xp0 = np.array([-xradpix, -xradpix, xradpix, xradpix])
yp0 = np.array([-yradpix, yradpix, yradpix, -yradpix])
nvert = 4
xp = xp0 * np.cos(angle_rad) - yp0 * np.sin(angle_rad) + xcpix
yp = xp0 * np.sin(angle_rad) + yp0 * np.cos(angle_rad) + ycpix
image = np.zeros([ngrid, ngrid], dtype = np.float64)
left = np.where(yp == np.min(yp))
left = left[0][np.where(xp[left] == np.min(xp[left]))[0]]
left = left[0]
if left != nvert -1:
leftnext = left + 1
else:
leftnext = 0
right = left
if right != 0:
rightnext = right - 1
else:
rightnext = nvert - 1
if int(np.round(np.min(yp))) < 0:
imin = 0
else:
imin = int(np.round(np.min(yp)))
if int(np.round(np.max(yp))) > ngrid:
imax = ngrid
else:
imax = int(np.round(np.max(yp))) + 1
for ypix in range(imin, imax):
for ysub in range(0, mag):
y = ypix - 0.5 + (0.5 + ysub)/mag
if y < yp[left]:
continue
if y > np.max(yp):
break
if y >= yp[leftnext]:
left = leftnext
if left != nvert-1:
leftnext = left + 1
else:
leftnext = 0
if y >= yp[rightnext]:
right = rightnext
if right != 0:
rightnext = right - 1
else:
rightnext = nvert - 1
leftdy = yp[leftnext] - yp[left]
if leftdy != 0:
leftdx = xp[leftnext] - xp[left]
xleft = leftdx/leftdy * (y-yp[left]) + xp[left]
else:
xleft = xp[left]
rightdy = yp[rightnext] - yp[right]
if rightdy != 0:
rightdx = xp[rightnext] - xp[right]
xright = rightdx/rightdy * (y - yp[right]) + xp[right]
else:
xright = xp[right]
xleftpix = int(np.round(xleft))
xrightpix = int(np.round(xright))
if xleftpix != xrightpix:
if (xleftpix >= 0 and xleftpix < ngrid):
image[ypix,xleftpix] = image[ypix,xleftpix] + mag * ((xleftpix + 0.5) - xleft)
if (xrightpix >= 0 and xrightpix < ngrid):
image[ypix,xrightpix] = image[ypix,xrightpix] + mag * (xright - (xrightpix - 0.5))
if (xrightpix - xleftpix > 1 and xleftpix + 1 < ngrid and xrightpix > 0):
if xleftpix+1 < 0:
imin = 0
else:
imin = xleftpix+1
if xrightpix >= ngrid:
imax = ngrid - 1
else:
imax = xrightpix
image[ypix,imin:imax] = image[ypix,imin:imax] + mag
else:
if xleftpix >= 0 and xleftpix < ngrid:
image[ypix,xleftpix] = image[ypix,xleftpix] + mag * (xright - xleft)
image = image / float(mag)**2
if dark:
image = 1.0 - image
return image
| 30.303571
| 102
| 0.551414
|
c4ebee7bd1533648ebc8a85449968a0c4ef863b5
| 196,919
|
py
|
Python
|
neutron_lbaas/tests/unit/db/loadbalancer/test_db_loadbalancerv2.py
|
mail2nsrajesh/neutron-lbaas
|
997a4556a07f1c830c4d1d84ae0c54570ceab169
|
[
"Apache-2.0"
] | null | null | null |
neutron_lbaas/tests/unit/db/loadbalancer/test_db_loadbalancerv2.py
|
mail2nsrajesh/neutron-lbaas
|
997a4556a07f1c830c4d1d84ae0c54570ceab169
|
[
"Apache-2.0"
] | null | null | null |
neutron_lbaas/tests/unit/db/loadbalancer/test_db_loadbalancerv2.py
|
mail2nsrajesh/neutron-lbaas
|
997a4556a07f1c830c4d1d84ae0c54570ceab169
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import copy
import mock
from neutron.api import extensions
from neutron.common import config
import neutron.db.l3_db # noqa
from neutron.tests.unit.db import test_db_base_plugin_v2
from neutron_lib import constants as n_constants
from neutron_lib import context
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import constants
from neutron_lib.plugins import directory
from oslo_config import cfg
from oslo_utils import uuidutils
import six
import testtools
import webob.exc
from neutron_lbaas._i18n import _
from neutron_lbaas.common.cert_manager import cert_manager
from neutron_lbaas.common import exceptions
from neutron_lbaas.db.loadbalancer import loadbalancer_dbv2
from neutron_lbaas.db.loadbalancer import models
from neutron_lbaas.drivers.logging_noop import driver as noop_driver
import neutron_lbaas.extensions
from neutron_lbaas.extensions import healthmonitor_max_retries_down
from neutron_lbaas.extensions import l7
from neutron_lbaas.extensions import lb_graph
from neutron_lbaas.extensions import lb_network_vip
from neutron_lbaas.extensions import loadbalancerv2
from neutron_lbaas.extensions import sharedpools
from neutron_lbaas.services.loadbalancer import constants as lb_const
from neutron_lbaas.services.loadbalancer import plugin as loadbalancer_plugin
from neutron_lbaas.tests import base
DB_CORE_PLUGIN_CLASS = 'neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
DB_LB_PLUGIN_CLASS = (
"neutron_lbaas.services.loadbalancer."
"plugin.LoadBalancerPluginv2"
)
NOOP_DRIVER_CLASS = ('neutron_lbaas.drivers.logging_noop.driver.'
'LoggingNoopLoadBalancerDriver')
extensions_path = ':'.join(neutron_lbaas.extensions.__path__)
_subnet_id = "0c798ed8-33ba-11e2-8b28-000c291c4d14"
class LbaasTestMixin(object):
resource_keys = list(loadbalancerv2.RESOURCE_ATTRIBUTE_MAP.keys())
resource_keys.extend(l7.RESOURCE_ATTRIBUTE_MAP.keys())
resource_keys.extend(lb_graph.RESOURCE_ATTRIBUTE_MAP.keys())
resource_keys.extend(lb_network_vip.EXTENDED_ATTRIBUTES_2_0.keys())
resource_keys.extend(healthmonitor_max_retries_down.
EXTENDED_ATTRIBUTES_2_0.keys())
resource_prefix_map = dict(
(k, loadbalancerv2.LOADBALANCERV2_PREFIX)
for k in resource_keys)
def _get_loadbalancer_optional_args(self):
return ('description', 'vip_address', 'admin_state_up', 'name',
'listeners', 'vip_network_id', 'vip_subnet_id')
def _create_loadbalancer(self, fmt, subnet_id,
expected_res_status=None, **kwargs):
data = {'loadbalancer': {'vip_subnet_id': subnet_id,
'tenant_id': self._tenant_id}}
args = self._get_loadbalancer_optional_args()
for arg in args:
if arg in kwargs:
if kwargs[arg] is not None:
data['loadbalancer'][arg] = kwargs[arg]
else:
data['loadbalancer'].pop(arg, None)
lb_req = self.new_create_request('loadbalancers', data, fmt)
lb_res = lb_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(expected_res_status, lb_res.status_int)
return lb_res
def _create_graph(self, fmt, subnet_id, expected_res_status=None,
**kwargs):
data = {'vip_subnet_id': subnet_id, 'tenant_id': self._tenant_id}
args = self._get_loadbalancer_optional_args()
for arg in args:
if arg in kwargs and kwargs[arg] is not None:
data[arg] = kwargs[arg]
data = {'graph': {'loadbalancer': data, 'tenant_id': self._tenant_id}}
lb_req = self.new_create_request('graphs', data, fmt)
lb_res = lb_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(expected_res_status, lb_res.status_int)
return lb_res
def _get_listener_optional_args(self):
return ('name', 'description', 'default_pool_id', 'loadbalancer_id',
'connection_limit', 'admin_state_up',
'default_tls_container_ref', 'sni_container_refs')
def _create_listener(self, fmt, protocol, protocol_port,
loadbalancer_id=None, default_pool_id=None,
expected_res_status=None, **kwargs):
data = {'listener': {'protocol': protocol,
'protocol_port': protocol_port,
'tenant_id': self._tenant_id}}
if loadbalancer_id:
data['listener']['loadbalancer_id'] = loadbalancer_id
if default_pool_id:
data['listener']['default_pool_id'] = default_pool_id
args = self._get_listener_optional_args()
for arg in args:
if arg in kwargs and kwargs[arg] is not None:
data['listener'][arg] = kwargs[arg]
listener_req = self.new_create_request('listeners', data, fmt)
listener_res = listener_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(expected_res_status, listener_res.status_int)
return listener_res
def _get_pool_optional_args(self):
return 'name', 'description', 'admin_state_up', 'session_persistence'
def _create_pool(self, fmt, protocol, lb_algorithm, listener_id=None,
loadbalancer_id=None, expected_res_status=None, **kwargs):
data = {'pool': {'protocol': protocol,
'lb_algorithm': lb_algorithm,
'tenant_id': self._tenant_id}}
if listener_id:
data['pool']['listener_id'] = listener_id
if loadbalancer_id:
data['pool']['loadbalancer_id'] = loadbalancer_id
args = self._get_pool_optional_args()
for arg in args:
if arg in kwargs and kwargs[arg] is not None:
data['pool'][arg] = kwargs[arg]
pool_req = self.new_create_request('pools', data, fmt)
pool_res = pool_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(expected_res_status, pool_res.status_int)
return pool_res
def _get_member_optional_args(self):
return 'weight', 'admin_state_up', 'name'
def _create_member(self, fmt, pool_id, address, protocol_port, subnet_id,
expected_res_status=None, **kwargs):
data = {'member': {'address': address,
'protocol_port': protocol_port,
'subnet_id': subnet_id,
'tenant_id': self._tenant_id}}
args = self._get_member_optional_args()
for arg in args:
if arg in kwargs and kwargs[arg] is not None:
data['member'][arg] = kwargs[arg]
member_req = self.new_create_request('pools',
data,
fmt=fmt,
id=pool_id,
subresource='members')
member_res = member_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(expected_res_status, member_res.status_int)
return member_res
def _get_healthmonitor_optional_args(self):
return ('weight', 'admin_state_up', 'expected_codes', 'url_path',
'http_method', 'name', 'max_retries_down')
def _create_healthmonitor(self, fmt, pool_id, type, delay, timeout,
max_retries, expected_res_status=None, **kwargs):
data = {'healthmonitor': {'type': type,
'delay': delay,
'timeout': timeout,
'max_retries': max_retries,
'pool_id': pool_id,
'tenant_id': self._tenant_id}}
args = self._get_healthmonitor_optional_args()
for arg in args:
if arg in kwargs and kwargs[arg] is not None:
data['healthmonitor'][arg] = kwargs[arg]
hm_req = self.new_create_request('healthmonitors', data, fmt=fmt)
hm_res = hm_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(expected_res_status, hm_res.status_int)
return hm_res
def _add_optional_args(self, optional_args, data, **kwargs):
for arg in optional_args:
if arg in kwargs and kwargs[arg] is not None:
data[arg] = kwargs[arg]
def _get_l7policy_optional_args(self):
return ('name', 'description', 'redirect_pool_id',
'redirect_url', 'admin_state_up', 'position')
def _create_l7policy(self, fmt, listener_id, action,
expected_res_status=None, **kwargs):
data = {'l7policy': {'listener_id': listener_id,
'action': action,
'tenant_id': self._tenant_id}}
optional_args = self._get_l7policy_optional_args()
self._add_optional_args(optional_args, data['l7policy'], **kwargs)
l7policy_req = self.new_create_request('l7policies', data, fmt)
l7policy_res = l7policy_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(l7policy_res.status_int, expected_res_status)
return l7policy_res
def _get_l7rule_optional_args(self):
return ('invert', 'key', 'admin_state_up')
def _create_l7policy_rule(self, fmt, l7policy_id, type, compare_type,
value, expected_res_status=None, **kwargs):
data = {'rule': {'type': type,
'compare_type': compare_type,
'value': value,
'tenant_id': self._tenant_id}}
optional_args = self._get_l7rule_optional_args()
self._add_optional_args(optional_args, data['rule'], **kwargs)
rule_req = self.new_create_request('l7policies', data, fmt,
id=l7policy_id,
subresource='rules')
rule_res = rule_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(rule_res.status_int, expected_res_status)
return rule_res
@contextlib.contextmanager
def loadbalancer(self, fmt=None, subnet=None, no_delete=False, **kwargs):
if not fmt:
fmt = self.fmt
with test_db_base_plugin_v2.optional_ctx(
subnet, self.subnet) as tmp_subnet:
res = self._create_loadbalancer(fmt,
tmp_subnet['subnet']['id'],
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
exc = webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") %
res.status_int
)
exc.code = res.status_int
exc.status_code = res.status_int
raise exc
lb = self.deserialize(fmt or self.fmt, res)
yield lb
if not no_delete:
self._delete('loadbalancers', lb['loadbalancer']['id'])
@contextlib.contextmanager
def graph(self, fmt=None, subnet=None, no_delete=False, **kwargs):
if not fmt:
fmt = self.fmt
with test_db_base_plugin_v2.optional_ctx(
subnet, self.subnet) as tmp_subnet:
res = self._create_graph(fmt, tmp_subnet['subnet']['id'],
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
exc = webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") %
res.status_int
)
exc.code = res.status_int
exc.status_code = res.status_int
raise exc
graph = self.deserialize(fmt or self.fmt, res)
yield graph
if not no_delete:
# delete loadbalancer children if this was a loadbalancer
# graph create call
lb = graph['graph']['loadbalancer']
for listener in lb.get('listeners', []):
pool = listener.get('default_pool')
if pool:
hm = pool.get('healthmonitor')
if hm:
self._delete('healthmonitors', hm['id'])
members = pool.get('members', [])
for member in members:
self._delete('pools', pool['id'],
subresource='members',
sub_id=member['id'])
self._delete('pools', pool['id'])
policies = listener.get('l7policies', [])
for policy in policies:
r_pool = policy.get('redirect_pool')
if r_pool:
r_hm = r_pool.get('healthmonitor')
if r_hm:
self._delete('healthmonitors', r_hm['id'])
r_members = r_pool.get('members', [])
for r_member in r_members:
self._delete('pools', r_pool['id'],
subresource='members',
sub_id=r_member['id'])
self._delete('pools', r_pool['id'])
self._delete('l7policies', policy['id'])
self._delete('listeners', listener['id'])
self._delete('loadbalancers', lb['id'])
@contextlib.contextmanager
def listener(self, fmt=None, protocol='HTTP', loadbalancer_id=None,
protocol_port=80, default_pool_id=None, no_delete=False,
**kwargs):
if not fmt:
fmt = self.fmt
if loadbalancer_id and default_pool_id:
res = self._create_listener(fmt, protocol, protocol_port,
loadbalancer_id=loadbalancer_id,
default_pool_id=default_pool_id,
**kwargs)
elif loadbalancer_id:
res = self._create_listener(fmt, protocol, protocol_port,
loadbalancer_id=loadbalancer_id,
**kwargs)
else:
res = self._create_listener(fmt, protocol, protocol_port,
default_pool_id=default_pool_id,
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") % res.status_int
)
listener = self.deserialize(fmt or self.fmt, res)
yield listener
if not no_delete:
self._delete('listeners', listener['listener']['id'])
@contextlib.contextmanager
def pool(self, fmt=None, protocol='HTTP', lb_algorithm='ROUND_ROBIN',
no_delete=False, listener_id=None,
loadbalancer_id=None, **kwargs):
if not fmt:
fmt = self.fmt
if listener_id and loadbalancer_id:
res = self._create_pool(fmt,
protocol=protocol,
lb_algorithm=lb_algorithm,
listener_id=listener_id,
loadbalancer_id=loadbalancer_id,
**kwargs)
elif listener_id:
res = self._create_pool(fmt,
protocol=protocol,
lb_algorithm=lb_algorithm,
listener_id=listener_id,
**kwargs)
else:
res = self._create_pool(fmt,
protocol=protocol,
lb_algorithm=lb_algorithm,
loadbalancer_id=loadbalancer_id,
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") % res.status_int
)
pool = self.deserialize(fmt or self.fmt, res)
yield pool
if not no_delete:
self._delete('pools', pool['pool']['id'])
@contextlib.contextmanager
def member(self, fmt=None, pool_id='pool1id', address='127.0.0.1',
protocol_port=80, subnet=None, no_delete=False,
**kwargs):
if not fmt:
fmt = self.fmt
subnet = subnet or self.test_subnet
with test_db_base_plugin_v2.optional_ctx(
subnet, self.subnet) as tmp_subnet:
res = self._create_member(fmt,
pool_id=pool_id,
address=address,
protocol_port=protocol_port,
subnet_id=tmp_subnet['subnet']['id'],
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") % res.status_int
)
member = self.deserialize(fmt or self.fmt, res)
yield member
if not no_delete:
self._delete('pools', id=pool_id, subresource='members',
sub_id=member['member']['id'])
@contextlib.contextmanager
def healthmonitor(self, fmt=None, pool_id='pool1id', type='TCP', delay=1,
timeout=1, max_retries=2, no_delete=False, **kwargs):
if not fmt:
fmt = self.fmt
res = self._create_healthmonitor(fmt,
pool_id=pool_id,
type=type,
delay=delay,
timeout=timeout,
max_retries=max_retries,
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") % res.status_int
)
healthmonitor = self.deserialize(fmt or self.fmt, res)
yield healthmonitor
if not no_delete:
del_req = self.new_delete_request(
'healthmonitors', fmt=fmt,
id=healthmonitor['healthmonitor']['id'])
del_res = del_req.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPNoContent.code, del_res.status_int)
@contextlib.contextmanager
def l7policy(self, listener_id, fmt=None,
action=lb_const.L7_POLICY_ACTION_REJECT,
no_delete=False, **kwargs):
if not fmt:
fmt = self.fmt
res = self._create_l7policy(fmt,
listener_id=listener_id,
action=action,
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") % res.status_int
)
l7policy = self.deserialize(fmt or self.fmt, res)
yield l7policy
if not no_delete:
self.plugin.db.update_status(context.get_admin_context(),
models.L7Policy,
l7policy['l7policy']['id'],
n_constants.ACTIVE)
del_req = self.new_delete_request(
'l7policies',
fmt=fmt,
id=l7policy['l7policy']['id'])
del_res = del_req.get_response(self.ext_api)
self.assertEqual(del_res.status_int,
webob.exc.HTTPNoContent.code)
@contextlib.contextmanager
def l7policy_rule(self, l7policy_id, fmt=None, value='value1',
type=lb_const.L7_RULE_TYPE_HOST_NAME,
compare_type=lb_const.L7_RULE_COMPARE_TYPE_EQUAL_TO,
no_delete=False, **kwargs):
if not fmt:
fmt = self.fmt
res = self._create_l7policy_rule(fmt,
l7policy_id=l7policy_id,
type=type,
compare_type=compare_type,
value=value,
**kwargs)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(
explanation=_("Unexpected error code: %s") % res.status_int
)
rule = self.deserialize(fmt or self.fmt, res)
yield rule
if not no_delete:
self.plugin.db.update_status(context.get_admin_context(),
models.L7Rule,
rule['rule']['id'],
n_constants.ACTIVE)
del_req = self.new_delete_request(
'l7policies',
fmt=fmt,
id=l7policy_id,
subresource='rules',
sub_id=rule['rule']['id'])
del_res = del_req.get_response(self.ext_api)
self.assertEqual(del_res.status_int,
webob.exc.HTTPNoContent.code)
class ExtendedPluginAwareExtensionManager(object):
def __init__(self, extension_aliases):
self.extension_aliases = extension_aliases
def get_resources(self):
extensions_list = []
if 'shared_pools' in self.extension_aliases:
extensions_list.append(sharedpools)
if 'l7' in self.extension_aliases:
extensions_list.append(l7)
if 'lb-graph' in self.extension_aliases:
extensions_list.append(lb_graph)
if 'lb_network_vip' in self.extension_aliases:
extensions_list.append(lb_network_vip)
if 'hm_max_retries_down' in self.extension_aliases:
extensions_list.append(healthmonitor_max_retries_down)
for extension in extensions_list:
if 'RESOURCE_ATTRIBUTE_MAP' in extension.__dict__:
loadbalancerv2.RESOURCE_ATTRIBUTE_MAP.update(
extension.RESOURCE_ATTRIBUTE_MAP)
if 'SUB_RESOURCE_ATTRIBUTE_MAP' in extension.__dict__:
loadbalancerv2.SUB_RESOURCE_ATTRIBUTE_MAP.update(
extension.SUB_RESOURCE_ATTRIBUTE_MAP)
if 'EXTENDED_ATTRIBUTES_2_0' in extension.__dict__:
for key in loadbalancerv2.RESOURCE_ATTRIBUTE_MAP.keys():
loadbalancerv2.RESOURCE_ATTRIBUTE_MAP[key].update(
extension.EXTENDED_ATTRIBUTES_2_0.get(key, {}))
return loadbalancerv2.Loadbalancerv2.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class LbaasPluginDbTestCase(LbaasTestMixin, base.NeutronDbPluginV2TestCase):
def setUp(self, core_plugin=None, lb_plugin=None, lbaas_provider=None,
ext_mgr=None):
service_plugins = {'lb_plugin_name': DB_LB_PLUGIN_CLASS}
if not lbaas_provider:
lbaas_provider = (
constants.LOADBALANCERV2 +
':lbaas:' + NOOP_DRIVER_CLASS + ':default')
# override the default service provider
self.set_override([lbaas_provider])
# removing service-type because it resides in neutron and tests
# dont care
LBPlugin = loadbalancer_plugin.LoadBalancerPluginv2
sea_index = None
for index, sea in enumerate(LBPlugin.supported_extension_aliases):
if sea == 'service-type':
sea_index = index
if sea_index:
del LBPlugin.supported_extension_aliases[sea_index]
super(LbaasPluginDbTestCase, self).setUp(
ext_mgr=ext_mgr,
service_plugins=service_plugins
)
if not ext_mgr:
self.plugin = loadbalancer_plugin.LoadBalancerPluginv2()
# This is necessary because the automatic extension manager
# finding algorithm below will find the loadbalancerv2
# extension and fail to initizlize the main API router with
# extensions' resources
ext_mgr = ExtendedPluginAwareExtensionManager(
LBPlugin.supported_extension_aliases)
app = config.load_paste_app('extensions_test_app')
self.ext_api = extensions.ExtensionMiddleware(app, ext_mgr=ext_mgr)
get_lbaas_agent_patcher = mock.patch(
'neutron_lbaas.agent_scheduler'
'.LbaasAgentSchedulerDbMixin.get_agent_hosting_loadbalancer')
mock_lbaas_agent = mock.MagicMock()
get_lbaas_agent_patcher.start().return_value = mock_lbaas_agent
mock_lbaas_agent.__getitem__.return_value = {'host': 'host'}
self._subnet_id = _subnet_id
def _update_loadbalancer_api(self, lb_id, data):
req = self.new_update_request('loadbalancers', data, lb_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, req.get_response(self.ext_api))
return resp, body
def _delete_loadbalancer_api(self, lb_id):
req = self.new_delete_request('loadbalancers', lb_id)
resp = req.get_response(self.ext_api)
return resp
def _get_loadbalancer_api(self, lb_id):
req = self.new_show_request('loadbalancers', lb_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _list_loadbalancers_api(self):
req = self.new_list_request('loadbalancers')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _get_loadbalancer_stats_api(self, lb_id):
req = self.new_show_request('loadbalancers', lb_id,
subresource='stats')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _get_loadbalancer_statuses_api(self, lb_id):
req = self.new_show_request('loadbalancers', lb_id,
subresource='statuses')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _validate_statuses(self, lb_id, listener_id=None,
l7policy_id=None, l7rule_id=None,
pool_id=None, member_id=None, hm_id=None,
member_disabled=False, listener_disabled=False,
l7policy_disabled=False, l7rule_disabled=False,
loadbalancer_disabled=False):
resp, body = self._get_loadbalancer_statuses_api(lb_id)
lb_statuses = body['statuses']['loadbalancer']
self.assertEqual(n_constants.ACTIVE,
lb_statuses['provisioning_status'])
if loadbalancer_disabled:
self.assertEqual(lb_const.DISABLED,
lb_statuses['operating_status'])
else:
self.assertEqual(lb_const.ONLINE,
lb_statuses['operating_status'])
if listener_id:
listener_statuses = None
for listener in lb_statuses['listeners']:
if listener['id'] == listener_id:
listener_statuses = listener
self.assertIsNotNone(listener_statuses)
self.assertEqual(n_constants.ACTIVE,
listener_statuses['provisioning_status'])
if listener_disabled:
self.assertEqual(lb_const.DISABLED,
listener_statuses['operating_status'])
else:
self.assertEqual(lb_const.ONLINE,
listener_statuses['operating_status'])
if l7policy_id:
policy_statuses = None
for policy in listener_statuses['l7policies']:
if policy['id'] == l7policy_id:
policy_statuses = policy
self.assertIsNotNone(policy_statuses)
self.assertEqual(n_constants.ACTIVE,
policy_statuses['provisioning_status'])
if l7rule_id:
rule_statuses = None
for rule in policy_statuses['rules']:
if rule['id'] == l7rule_id:
rule_statuses = rule
self.assertIsNotNone(rule_statuses)
self.assertEqual(n_constants.ACTIVE,
rule_statuses['provisioning_status'])
if pool_id:
pool_statuses = None
for pool in lb_statuses['pools']:
if pool['id'] == pool_id:
pool_statuses = pool
self.assertIsNotNone(pool_statuses)
self.assertEqual(n_constants.ACTIVE,
pool_statuses['provisioning_status'])
self.assertEqual(lb_const.ONLINE,
pool_statuses['operating_status'])
if member_id:
member_statuses = None
for member in pool_statuses['members']:
if member['id'] == member_id:
member_statuses = member
self.assertIsNotNone(member_statuses)
self.assertEqual(n_constants.ACTIVE,
member_statuses['provisioning_status'])
if member_disabled:
self.assertEqual(lb_const.DISABLED,
member_statuses["operating_status"])
else:
self.assertEqual(lb_const.ONLINE,
member_statuses['operating_status'])
if hm_id:
hm_status = pool_statuses['healthmonitor']
self.assertEqual(n_constants.ACTIVE,
hm_status['provisioning_status'])
def test_assert_modification_allowed(self):
mock_lb = mock.MagicMock()
mock_lb.provisioning_status = n_constants.PENDING_UPDATE
mock_lb.id = uuidutils.generate_uuid()
LBPluginDBv2 = loadbalancer_dbv2.LoadBalancerPluginDbv2()
self.assertRaises(
loadbalancerv2.StateInvalid,
LBPluginDBv2.assert_modification_allowed, mock_lb)
# Check that this is a sub-exception of conflict to return 409
self.assertRaises(
n_exc.Conflict,
LBPluginDBv2.assert_modification_allowed, mock_lb)
class LbaasLoadBalancerTests(LbaasPluginDbTestCase):
def test_create_loadbalancer(self, **extras):
expected = {
'name': 'vip1',
'description': '',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'tenant_id': self._tenant_id,
'listeners': [],
'pools': [],
'provider': 'lbaas'
}
expected.update(extras)
with self.subnet() as subnet:
expected['vip_subnet_id'] = subnet['subnet']['id']
name = expected['name']
with self.loadbalancer(name=name, subnet=subnet, **extras) as lb:
lb_id = lb['loadbalancer']['id']
for k in ('id', 'vip_address', 'vip_subnet_id'):
self.assertTrue(lb['loadbalancer'].get(k, None))
expected['vip_port_id'] = lb['loadbalancer']['vip_port_id']
actual = dict((k, v)
for k, v in lb['loadbalancer'].items()
if k in expected)
self.assertEqual(expected, actual)
self._validate_statuses(lb_id)
return lb
def test_create_loadbalancer_with_vip_address(self):
self.test_create_loadbalancer(vip_address='10.0.0.7')
def test_create_loadbalancer_with_vip_address_outside_subnet(self):
with testtools.ExpectedException(webob.exc.HTTPClientError):
self.test_create_loadbalancer(vip_address='9.9.9.9')
def test_create_loadbalancer_with_no_vip_network_or_subnet(self):
with testtools.ExpectedException(webob.exc.HTTPClientError):
self.test_create_loadbalancer(
vip_network_id=None,
vip_subnet_id=None,
expected_res_status=400)
def test_create_loadbalancer_with_vip_network_id(self):
expected = {
'name': 'vip1',
'description': '',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'tenant_id': self._tenant_id,
'listeners': [],
'pools': [],
'provider': 'lbaas'
}
with self.subnet() as subnet:
expected['vip_subnet_id'] = subnet['subnet']['id']
name = expected['name']
extras = {
'vip_network_id': subnet['subnet']['network_id'],
'vip_subnet_id': None
}
with self.loadbalancer(name=name, subnet=subnet, **extras) as lb:
lb_id = lb['loadbalancer']['id']
for k in ('id', 'vip_address', 'vip_subnet_id'):
self.assertTrue(lb['loadbalancer'].get(k, None))
expected['vip_port_id'] = lb['loadbalancer']['vip_port_id']
actual = dict((k, v)
for k, v in lb['loadbalancer'].items()
if k in expected)
self.assertEqual(expected, actual)
self._validate_statuses(lb_id)
return lb
def test_update_loadbalancer(self):
name = 'new_loadbalancer'
description = 'a crazy loadbalancer'
expected_values = {'name': name,
'description': description,
'admin_state_up': False,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'listeners': [],
'provider': 'lbaas'}
with self.subnet() as subnet:
expected_values['vip_subnet_id'] = subnet['subnet']['id']
with self.loadbalancer(subnet=subnet) as loadbalancer:
expected_values['vip_port_id'] = (
loadbalancer['loadbalancer']['vip_port_id'])
loadbalancer_id = loadbalancer['loadbalancer']['id']
data = {'loadbalancer': {'name': name,
'description': description,
'admin_state_up': False}}
resp, res = self._update_loadbalancer_api(loadbalancer_id,
data)
for k in expected_values:
self.assertEqual(expected_values[k],
res['loadbalancer'][k])
self._validate_statuses(loadbalancer_id,
loadbalancer_disabled=True)
def test_delete_loadbalancer(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet,
no_delete=True) as loadbalancer:
loadbalancer_id = loadbalancer['loadbalancer']['id']
resp = self._delete_loadbalancer_api(loadbalancer_id)
self.assertEqual(webob.exc.HTTPNoContent.code, resp.status_int)
def test_delete_loadbalancer_when_loadbalancer_in_use(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet) as loadbalancer:
lb_id = loadbalancer['loadbalancer']['id']
with self.listener(loadbalancer_id=lb_id):
ctx = context.get_admin_context()
self.assertRaises(loadbalancerv2.EntityInUse,
self.plugin.delete_loadbalancer,
ctx, lb_id)
self._validate_statuses(lb_id)
def test_show_loadbalancer(self):
name = 'lb_show'
description = 'lb_show description'
vip_address = '10.0.0.10'
expected_values = {'name': name,
'description': description,
'vip_address': '10.0.0.10',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'listeners': [],
'provider': 'lbaas'}
with self.subnet() as subnet:
vip_subnet_id = subnet['subnet']['id']
expected_values['vip_subnet_id'] = vip_subnet_id
with self.loadbalancer(subnet=subnet, name=name,
description=description,
vip_address=vip_address) as lb:
lb_id = lb['loadbalancer']['id']
expected_values['id'] = lb_id
expected_values['vip_port_id'] = (
lb['loadbalancer']['vip_port_id'])
resp, body = self._get_loadbalancer_api(lb_id)
for k in expected_values:
self.assertEqual(expected_values[k],
body['loadbalancer'][k])
def test_list_loadbalancers(self):
name = 'lb_show'
description = 'lb_show description'
vip_address = '10.0.0.10'
expected_values = {'name': name,
'description': description,
'vip_address': '10.0.0.10',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'listeners': [],
'provider': 'lbaas'}
with self.subnet() as subnet:
vip_subnet_id = subnet['subnet']['id']
expected_values['vip_subnet_id'] = vip_subnet_id
with self.loadbalancer(subnet=subnet, name=name,
description=description,
vip_address=vip_address) as lb:
lb_id = lb['loadbalancer']['id']
expected_values['id'] = lb_id
expected_values['vip_port_id'] = (
lb['loadbalancer']['vip_port_id'])
resp, body = self._list_loadbalancers_api()
self.assertEqual(1, len(body['loadbalancers']))
for k in expected_values:
self.assertEqual(expected_values[k],
body['loadbalancers'][0][k])
def test_list_loadbalancers_with_sort_emulated(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet, name='lb1') as lb1:
with self.loadbalancer(subnet=subnet, name='lb2') as lb2:
with self.loadbalancer(subnet=subnet, name='lb3') as lb3:
self._test_list_with_sort(
'loadbalancer',
(lb1, lb2, lb3),
[('name', 'asc')]
)
def test_list_loadbalancers_with_pagination_emulated(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet, name='lb1') as lb1:
with self.loadbalancer(subnet=subnet, name='lb2') as lb2:
with self.loadbalancer(subnet=subnet, name='lb3') as lb3:
self._test_list_with_pagination(
'loadbalancer',
(lb1, lb2, lb3),
('name', 'asc'), 2, 2
)
def test_list_loadbalancers_with_pagination_reverse_emulated(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet, name='lb1') as lb1:
with self.loadbalancer(subnet=subnet, name='lb2') as lb2:
with self.loadbalancer(subnet=subnet, name='lb3') as lb3:
self._test_list_with_pagination_reverse(
'loadbalancer',
(lb1, lb2, lb3),
('name', 'asc'), 2, 2
)
def test_get_loadbalancer_stats(self):
expected_values = {'stats': {lb_const.STATS_TOTAL_CONNECTIONS: 0,
lb_const.STATS_ACTIVE_CONNECTIONS: 0,
lb_const.STATS_OUT_BYTES: 0,
lb_const.STATS_IN_BYTES: 0}}
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet) as lb:
lb_id = lb['loadbalancer']['id']
resp, body = self._get_loadbalancer_stats_api(lb_id)
self.assertEqual(expected_values, body)
def test_show_loadbalancer_with_listeners(self):
name = 'lb_show'
description = 'lb_show description'
vip_address = '10.0.0.10'
expected_values = {'name': name,
'description': description,
'vip_address': '10.0.0.10',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'listeners': []}
with self.subnet() as subnet:
vip_subnet_id = subnet['subnet']['id']
expected_values['vip_subnet_id'] = vip_subnet_id
with self.loadbalancer(subnet=subnet, name=name,
description=description,
vip_address=vip_address) as lb:
lb_id = lb['loadbalancer']['id']
expected_values['id'] = lb_id
with self.listener(loadbalancer_id=lb_id,
protocol_port=80) as listener1:
listener1_id = listener1['listener']['id']
expected_values['listeners'].append({'id': listener1_id})
with self.listener(loadbalancer_id=lb_id,
protocol_port=81) as listener2:
listener2_id = listener2['listener']['id']
expected_values['listeners'].append(
{'id': listener2_id})
resp, body = self._get_loadbalancer_api(lb_id)
for k in expected_values:
self.assertEqual(expected_values[k],
body['loadbalancer'][k])
def test_port_delete_via_port_api(self):
port = {
'id': 'my_port_id',
'device_owner': n_constants.DEVICE_OWNER_LOADBALANCERV2
}
ctx = context.get_admin_context()
port['device_owner'] = n_constants.DEVICE_OWNER_LOADBALANCERV2
myloadbalancers = [{'name': 'lb1'}]
plugin = mock.Mock()
directory.add_plugin(n_constants.CORE, plugin)
self.plugin.db.get_loadbalancers = (
mock.Mock(return_value=myloadbalancers))
plugin._get_port.return_value = port
self.assertRaises(n_exc.ServicePortInUse,
self.plugin.db.prevent_lbaasv2_port_deletion,
ctx,
port['id'])
class LoadBalancerDelegateVIPCreation(LbaasPluginDbTestCase):
def setUp(self):
driver_patcher = mock.patch.object(
noop_driver.LoggingNoopLoadBalancerManager,
'allocates_vip', new_callable=mock.PropertyMock)
driver_patcher.start().return_value = True
super(LoadBalancerDelegateVIPCreation, self).setUp()
def test_create_loadbalancer(self):
expected = {
'name': 'vip1',
'description': '',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'tenant_id': self._tenant_id,
'listeners': [],
'pools': [],
'provider': 'lbaas'
}
with self.subnet() as subnet:
expected['vip_subnet_id'] = subnet['subnet']['id']
name = expected['name']
with self.loadbalancer(name=name, subnet=subnet) as lb:
lb_id = lb['loadbalancer']['id']
for k in ('id', 'vip_subnet_id'):
self.assertTrue(lb['loadbalancer'].get(k, None))
self.assertIsNone(lb['loadbalancer'].get('vip_address'))
expected['vip_port_id'] = lb['loadbalancer']['vip_port_id']
actual = dict((k, v)
for k, v in lb['loadbalancer'].items()
if k in expected)
self.assertEqual(expected, actual)
self._validate_statuses(lb_id)
return lb
def test_delete_loadbalancer(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet, no_delete=True) as lb:
lb_id = lb['loadbalancer']['id']
acontext = context.get_admin_context()
db_port = self.plugin.db._core_plugin.create_port(
acontext,
{'port': {'network_id': subnet['subnet']['network_id'],
'name': '', 'admin_state_up': True,
'device_id': lb_id, 'device_owner': '',
'mac_address': '', 'fixed_ips': [],
'tenant_id': acontext.tenant_id}})
port_id = db_port['id']
self.addCleanup(self.plugin.db._core_plugin.delete_port,
acontext, port_id)
self.plugin.db.update_loadbalancer(
acontext, lb_id,
{'loadbalancer': {'vip_port_id': port_id}})
self.plugin.db.delete_loadbalancer(
acontext, lb_id, delete_vip_port=True)
port = self.plugin.db._core_plugin.get_port(acontext, port_id)
self.assertIsNotNone(port)
class TestLoadBalancerGraphCreation(LbaasPluginDbTestCase):
def _assert_graphs_equal(self, expected_graph, observed_graph):
observed_graph_copy = copy.deepcopy(observed_graph)
for k in ('id', 'vip_address', 'vip_subnet_id'):
self.assertTrue(observed_graph_copy.get(k, None))
expected_graph['id'] = observed_graph_copy['id']
expected_graph['vip_port_id'] = observed_graph_copy['vip_port_id']
expected_listeners = expected_graph.pop('listeners', [])
observed_listeners = observed_graph_copy.pop('listeners', [])
actual = dict((k, v)
for k, v in observed_graph_copy.items()
if k in expected_graph)
self.assertEqual(expected_graph, actual)
for observed_listener in observed_listeners:
self.assertTrue(observed_listener.get('id'))
listener_id = observed_listener.pop('id')
default_pool = observed_listener.get('default_pool')
l7_policies = observed_listener.get('l7policies')
if default_pool:
self.assertTrue(default_pool.get('id'))
default_pool.pop('id')
hm = default_pool.get('healthmonitor')
if hm:
self.assertTrue(hm.get('id'))
hm.pop('id')
for member in default_pool.get('members', []):
self.assertTrue(member.get('id'))
member.pop('id')
if l7_policies:
for policy in l7_policies:
self.assertTrue(policy.get('id'))
self.assertTrue(policy.get('listener_id'))
self.assertEqual(listener_id, policy.get('listener_id'))
policy.pop('id')
policy.pop('listener_id')
r_pool = policy.get('redirect_pool')
rules = policy.get('rules')
if r_pool:
self.assertTrue(r_pool.get('id'))
r_pool.pop('id')
r_hm = r_pool.get('healthmonitor')
if r_hm:
self.assertTrue(r_hm.get('id'))
r_hm.pop('id')
for r_member in r_pool.get('members', []):
self.assertTrue(r_member.get('id'))
r_member.pop('id')
if rules:
for rule in rules:
self.assertTrue(rule.get('id'))
rule.pop('id')
self.assertIn(observed_listener, expected_listeners)
def _validate_graph_statuses(self, graph):
lb_id = graph['id']
for listener in graph.get('listeners', []):
kwargs = {'listener_id': listener['id']}
pool = listener.get('default_pool')
if pool:
kwargs['pool_id'] = pool['id']
hm = pool.get('health_monitor')
if hm:
kwargs['hm_id'] = hm['id']
for member in pool.get('members', []):
kwargs['member_id'] = member['id']
self._validate_statuses(lb_id, **kwargs)
if pool.get('members'):
continue
self._validate_statuses(lb_id, **kwargs)
def _get_expected_lb(self, expected_listeners):
expected_lb = {
'name': 'vip1',
'description': '',
'admin_state_up': True,
'provisioning_status': n_constants.ACTIVE,
'operating_status': lb_const.ONLINE,
'tenant_id': self._tenant_id,
'listeners': expected_listeners,
'provider': 'lbaas'
}
return expected_lb
def _get_listener_bodies(self, name='listener1', protocol_port=80,
create_default_pool=None,
expected_default_pool=None,
create_l7_policies=None,
expected_l7_policies=None):
create_listener = {
'name': name,
'protocol_port': protocol_port,
'protocol': lb_const.PROTOCOL_HTTP,
'tenant_id': self._tenant_id,
}
if create_default_pool:
create_listener['default_pool'] = create_default_pool
if create_l7_policies:
create_listener['l7policies'] = create_l7_policies
expected_listener = {
'description': '',
'default_tls_container_ref': None,
'sni_container_refs': [],
'connection_limit': -1,
'admin_state_up': True,
'l7policies': []
}
expected_listener.update(create_listener)
if expected_default_pool:
expected_listener['default_pool'] = expected_default_pool
expected_listener['default_tls_container_id'] = None
expected_listener['l7policies'] = expected_l7_policies or []
return create_listener, expected_listener
def _get_pool_bodies(self, name='pool1', create_members=None,
expected_members=None, create_hm=None,
expected_hm=None):
create_pool = {
'name': name,
'protocol': lb_const.PROTOCOL_HTTP,
'lb_algorithm': lb_const.LB_METHOD_ROUND_ROBIN,
'tenant_id': self._tenant_id
}
if create_members:
create_pool['members'] = create_members
if create_hm:
create_pool['healthmonitor'] = create_hm
expected_pool = {
'description': '',
'session_persistence': None,
'members': [],
'admin_state_up': True
}
expected_pool.update(create_pool)
if expected_members:
expected_pool['members'] = expected_members
if expected_hm:
expected_pool['healthmonitor'] = expected_hm
return create_pool, expected_pool
def _get_member_bodies(self, name='member1'):
create_member = {
'name': name,
'address': '10.0.0.1',
'protocol_port': 80,
'subnet_id': self._subnet_id,
'tenant_id': self._tenant_id
}
expected_member = {
'weight': 1,
'admin_state_up': True,
}
expected_member.update(create_member)
return create_member, expected_member
def _get_hm_bodies(self, name='hm1'):
create_hm = {
'name': name,
'type': lb_const.HEALTH_MONITOR_HTTP,
'delay': 1,
'timeout': 1,
'max_retries': 1,
'tenant_id': self._tenant_id,
'max_retries_down': 1
}
expected_hm = {
'http_method': 'GET',
'url_path': '/',
'expected_codes': '200',
'admin_state_up': True
}
expected_hm.update(create_hm)
return create_hm, expected_hm
def _get_l7policies_bodies(self, name='l7policy_name', create_rules=None,
expected_rules=None, create_r_pool=None,
expected_r_pool=None):
c_policy = {
'name': name,
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
'admin_state_up': True,
'tenant_id': self._tenant_id
}
if create_r_pool:
c_policy['redirect_pool'] = create_r_pool
if create_rules:
c_policy['rules'] = create_rules
e_policy = {
'description': '',
'position': 1
}
e_policy.update(c_policy)
if expected_r_pool:
e_policy['redirect_pool'] = expected_r_pool
if expected_rules:
e_policy['rules'] = expected_rules
create_l7policies = [c_policy]
expected_l7policies = [e_policy]
return create_l7policies, expected_l7policies
def _get_l7rules_bodes(self):
create_rule = {
'compare_type': lb_const.L7_RULE_COMPARE_TYPE_EQUAL_TO,
'type': lb_const.L7_RULE_TYPE_HOST_NAME,
'invert': False,
'value': 'localhost',
'admin_state_up': True,
'tenant_id': self._tenant_id
}
create_rules = [create_rule]
expected_rule = {
'key': None
}
expected_rule.update(create_rule)
expected_rules = [expected_rule]
return create_rules, expected_rules
def create_graph(self, expected_lb_graph, listeners):
with self.subnet() as subnet:
expected_lb_graph['vip_subnet_id'] = subnet['subnet']['id']
for listener in listeners:
for member in listener.get('default_pool',
{}).get('members', []):
member['subnet_id'] = subnet['subnet']['id']
for listener in expected_lb_graph.get('listeners', []):
for member in listener.get('default_pool',
{}).get('members', []):
member['subnet_id'] = subnet['subnet']['id']
name = expected_lb_graph.get('name')
kwargs = {'name': name, 'subnet': subnet, 'listeners': listeners}
with self.graph(**kwargs) as graph:
lb = graph['graph']['loadbalancer']
self._assert_graphs_equal(expected_lb_graph, lb)
self._validate_graph_statuses(lb)
return graph
def test_with_one_listener(self):
create_listener, expected_listener = self._get_listener_bodies()
expected_lb = self._get_expected_lb([expected_listener])
self.create_graph(expected_lb, [create_listener])
def test_with_many_listeners(self):
create_listener1, expected_listener1 = self._get_listener_bodies()
create_listener2, expected_listener2 = self._get_listener_bodies(
name='listener2', protocol_port=81)
expected_lb = self._get_expected_lb(
[expected_listener1, expected_listener2])
self.create_graph(expected_lb,
[create_listener1, create_listener2])
def test_with_many_listeners_same_port(self):
create_listener1, expected_listener1 = self._get_listener_bodies()
create_listener2, expected_listener2 = self._get_listener_bodies()
try:
self.create_graph(
{}, [create_listener1, create_listener2])
except webob.exc.HTTPClientError as exc:
self.assertEqual(exc.status_code, 409)
def test_with_one_listener_one_pool(self):
create_pool, expected_pool = self._get_pool_bodies()
create_listener, expected_listener = self._get_listener_bodies(
create_default_pool=create_pool,
expected_default_pool=expected_pool)
expected_lb = self._get_expected_lb([expected_listener])
self.create_graph(expected_lb, [create_listener])
def test_with_many_listeners_many_pools(self):
create_pool1, expected_pool1 = self._get_pool_bodies()
create_pool2, expected_pool2 = self._get_pool_bodies(name='pool2')
create_listener1, expected_listener1 = self._get_listener_bodies(
create_default_pool=create_pool1,
expected_default_pool=expected_pool1)
create_listener2, expected_listener2 = self._get_listener_bodies(
name='listener2', protocol_port=81,
create_default_pool=create_pool2,
expected_default_pool=expected_pool2)
expected_lb = self._get_expected_lb(
[expected_listener1, expected_listener2])
self.create_graph(
expected_lb, [create_listener1, create_listener2])
def test_with_one_listener_one_member(self):
create_member, expected_member = self._get_member_bodies()
create_pool, expected_pool = self._get_pool_bodies(
create_members=[create_member],
expected_members=[expected_member])
create_listener, expected_listener = self._get_listener_bodies(
create_default_pool=create_pool,
expected_default_pool=expected_pool)
expected_lb = self._get_expected_lb([expected_listener])
self.create_graph(expected_lb, [create_listener])
def test_with_one_listener_one_hm(self):
create_hm, expected_hm = self._get_hm_bodies()
create_pool, expected_pool = self._get_pool_bodies(
create_hm=create_hm,
expected_hm=expected_hm)
create_listener, expected_listener = self._get_listener_bodies(
create_default_pool=create_pool,
expected_default_pool=expected_pool)
expected_lb = self._get_expected_lb([expected_listener])
self.create_graph(expected_lb, [create_listener])
def test_with_one_of_everything(self):
create_member, expected_member = self._get_member_bodies()
create_hm, expected_hm = self._get_hm_bodies()
create_pool, expected_pool = self._get_pool_bodies(
create_members=[create_member],
expected_members=[expected_member],
create_hm=create_hm,
expected_hm=expected_hm)
create_r_member, expected_r_member = self._get_member_bodies(
name='r_member1')
create_r_hm, expected_r_hm = self._get_hm_bodies(name='r_hm1')
create_r_pool, expected_r_pool = self._get_pool_bodies(
create_members=[create_r_member],
expected_members=[expected_r_member],
create_hm=create_r_hm,
expected_hm=expected_r_hm)
create_rules, expected_rules = self._get_l7rules_bodes()
create_l7_policies, expected_l7_policies = self._get_l7policies_bodies(
create_rules=create_rules, expected_rules=expected_rules,
create_r_pool=create_r_pool, expected_r_pool=expected_r_pool)
create_listener, expected_listener = self._get_listener_bodies(
create_default_pool=create_pool,
expected_default_pool=expected_pool,
create_l7_policies=create_l7_policies,
expected_l7_policies=expected_l7_policies)
expected_lb = self._get_expected_lb([expected_listener])
self.create_graph(expected_lb, [create_listener])
class ListenerTestBase(LbaasPluginDbTestCase):
def setUp(self):
super(ListenerTestBase, self).setUp()
network = self._make_network(self.fmt, 'test-net', True)
self.test_subnet = self._make_subnet(
self.fmt, network, gateway=n_constants.ATTR_NOT_SPECIFIED,
cidr='10.0.0.0/24')
self.test_subnet_id = self.test_subnet['subnet']['id']
lb_res = self._create_loadbalancer(
self.fmt, subnet_id=self.test_subnet_id)
self.lb = self.deserialize(self.fmt, lb_res)
self.lb_id = self.lb['loadbalancer']['id']
self.addCleanup(self._delete_loadbalancer_api, self.lb_id)
lb_res2 = self._create_loadbalancer(
self.fmt, subnet_id=self.test_subnet_id)
self.lb2 = self.deserialize(self.fmt, lb_res2)
self.lb_id2 = self.lb2['loadbalancer']['id']
def _create_listener_api(self, data):
req = self.new_create_request("listeners", data, self.fmt)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _update_listener_api(self, listener_id, data):
req = self.new_update_request('listeners', data, listener_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, req.get_response(self.ext_api))
return resp, body
def _delete_listener_api(self, listener_id):
req = self.new_delete_request('listeners', listener_id)
resp = req.get_response(self.ext_api)
return resp
def _get_listener_api(self, listener_id):
req = self.new_show_request('listeners', listener_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _list_listeners_api(self):
req = self.new_list_request('listeners')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
class CertMock(cert_manager.Cert):
def __init__(self, cert_container):
pass
def get_certificate(self):
return "mock"
def get_intermediates(self):
return "mock"
def get_private_key(self):
return "mock"
def get_private_key_passphrase(self):
return "mock"
class Exceptions(object):
def __iter__(self):
return self
pass
class LbaasListenerTests(ListenerTestBase):
def test_create_listener(self, **extras):
expected = {
'protocol': 'HTTP',
'protocol_port': 80,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'default_pool_id': None,
'loadbalancers': [{'id': self.lb_id}]
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener'].get('id')
self.assertTrue(listener_id)
actual = {}
for k, v in listener['listener'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, listener_id)
return listener
def test_create_listener_with_default_pool_no_lb(self, **extras):
listener_pool_res = self._create_pool(
self.fmt, lb_const.PROTOCOL_HTTP,
lb_const.LB_METHOD_ROUND_ROBIN,
loadbalancer_id=self.lb_id)
listener_pool = self.deserialize(self.fmt, listener_pool_res)
listener_pool_id = listener_pool['pool']['id']
expected = {
'protocol': 'HTTP',
'protocol_port': 80,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'default_pool_id': listener_pool_id
}
expected.update(extras)
with self.listener(default_pool_id=listener_pool_id) as listener:
listener_id = listener['listener'].get('id')
self.assertTrue(listener_id)
actual = {}
for k, v in listener['listener'].items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
self._validate_statuses(self.lb_id, listener_id)
return listener
def test_create_listener_same_port_same_load_balancer(self):
with self.listener(loadbalancer_id=self.lb_id,
protocol_port=80):
self._create_listener(self.fmt, 'HTTP', 80,
loadbalancer_id=self.lb_id,
expected_res_status=409)
def test_create_listener_with_tls_no_default_container(self, **extras):
listener_data = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': None,
'protocol_port': 443,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancer_id': self.lb_id,
}
listener_data.update(extras)
self.assertRaises(
loadbalancerv2.TLSDefaultContainerNotSpecified,
self.plugin.create_listener,
context.get_admin_context(),
{'listener': listener_data})
def test_create_listener_with_tls_missing_container(self, **extras):
default_tls_container_ref = uuidutils.generate_uuid()
class ReplaceClass(Exception):
def __init__(self, status_code, message):
self.status_code = status_code
self.message = message
cfg.CONF.set_override('service_name',
'lbaas',
'service_auth')
cfg.CONF.set_override('region',
'RegionOne',
'service_auth')
listener_data = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': default_tls_container_ref,
'sni_container_refs': [],
'protocol_port': 443,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancer_id': self.lb_id
}
listener_data.update(extras)
exc = ReplaceClass(status_code=404, message='Cert Not Found')
with mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.get_cert',
side_effect=exc), \
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.delete_cert'):
self.assertRaises(loadbalancerv2.TLSContainerNotFound,
self.plugin.create_listener,
context.get_admin_context(),
{'listener': listener_data})
def test_create_listener_with_tls_invalid_service_acct(self, **extras):
default_tls_container_ref = uuidutils.generate_uuid()
listener_data = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': default_tls_container_ref,
'sni_container_refs': [],
'protocol_port': 443,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancer_id': self.lb_id
}
listener_data.update(extras)
with mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.get_cert') as \
get_cert_mock, \
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.delete_cert'):
get_cert_mock.side_effect = Exception('RandomFailure')
self.assertRaises(loadbalancerv2.CertManagerError,
self.plugin.create_listener,
context.get_admin_context(),
{'listener': listener_data})
def test_create_listener_with_tls_invalid_container(self, **extras):
default_tls_container_ref = uuidutils.generate_uuid()
cfg.CONF.set_override('service_name',
'lbaas',
'service_auth')
cfg.CONF.set_override('region',
'RegionOne',
'service_auth')
listener_data = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': default_tls_container_ref,
'sni_container_refs': [],
'protocol_port': 443,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancer_id': self.lb_id
}
listener_data.update(extras)
with mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'cert_parser.validate_cert') as validate_cert_mock, \
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.get_cert') as \
get_cert_mock, \
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.delete_cert') as \
rm_consumer_mock:
get_cert_mock.start().return_value = CertMock(
'mock_cert')
validate_cert_mock.side_effect = exceptions.MisMatchedKey
self.assertRaises(loadbalancerv2.TLSContainerInvalid,
self.plugin.create_listener,
context.get_admin_context(),
{'listener': listener_data})
rm_consumer_mock.assert_called_once_with(
cert_ref=listener_data['default_tls_container_ref'],
project_id=self._tenant_id,
resource_ref=cert_manager.CertManager.get_service_url(
self.lb_id))
def test_create_listener_with_tls(self, **extras):
default_tls_container_ref = uuidutils.generate_uuid()
sni_tls_container_ref_1 = uuidutils.generate_uuid()
sni_tls_container_ref_2 = uuidutils.generate_uuid()
expected = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': default_tls_container_ref,
'sni_container_refs': [sni_tls_container_ref_1,
sni_tls_container_ref_2]}
extras['default_tls_container_ref'] = default_tls_container_ref
extras['sni_container_refs'] = [sni_tls_container_ref_1,
sni_tls_container_ref_2]
with mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'cert_parser.validate_cert') as validate_cert_mock, \
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.get_cert') as \
get_cert_mock:
get_cert_mock.start().return_value = CertMock(
'mock_cert')
validate_cert_mock.start().return_value = True
with self.listener(protocol=lb_const.PROTOCOL_TERMINATED_HTTPS,
loadbalancer_id=self.lb_id, protocol_port=443,
**extras) as listener:
self.assertEqual(
expected,
dict((k, v)
for k, v in listener['listener'].items()
if k in expected)
)
def test_create_listener_loadbalancer_id_does_not_exist(self):
self._create_listener(self.fmt, 'HTTP', 80,
loadbalancer_id=uuidutils.generate_uuid(),
expected_res_status=404)
def test_can_create_listener_with_pool_loadbalancer_match(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet) as loadbalancer:
lb_id = loadbalancer['loadbalancer']['id']
with self.pool(loadbalancer_id=lb_id) as p1:
p_id = p1['pool']['id']
with self.listener(default_pool_id=p_id,
loadbalancer_id=lb_id):
pass
def test_cannot_create_listener_with_pool_loadbalancer_mismatch(self):
with self.subnet() as subnet, \
self.loadbalancer(subnet=subnet) as lb1, \
self.loadbalancer(subnet=subnet) as lb2:
lb_id1 = lb1['loadbalancer']['id']
lb_id2 = lb2['loadbalancer']['id']
with self.pool(loadbalancer_id=lb_id1) as p1:
p_id = p1['pool']['id']
data = {'listener': {'name': '',
'protocol_port': 80,
'protocol': 'HTTP',
'connection_limit': 100,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'default_pool_id': p_id,
'loadbalancer_id': lb_id2}}
resp, body = self._create_listener_api(data)
self.assertEqual(resp.status_int,
webob.exc.HTTPBadRequest.code)
def test_update_listener(self):
name = 'new_listener'
expected_values = {'name': name,
'protocol_port': 80,
'protocol': 'HTTP',
'connection_limit': 100,
'admin_state_up': False,
'tenant_id': self._tenant_id,
'loadbalancers': [{'id': self.lb_id}]}
with self.listener(name=name, loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
data = {'listener': {'name': name,
'connection_limit': 100,
'admin_state_up': False}}
resp, body = self._update_listener_api(listener_id, data)
for k in expected_values:
self.assertEqual(expected_values[k], body['listener'][k])
self._validate_statuses(self.lb_id, listener_id,
listener_disabled=True)
def test_update_listener_with_tls(self):
default_tls_container_ref = uuidutils.generate_uuid()
sni_tls_container_ref_1 = uuidutils.generate_uuid()
sni_tls_container_ref_2 = uuidutils.generate_uuid()
sni_tls_container_ref_3 = uuidutils.generate_uuid()
sni_tls_container_ref_4 = uuidutils.generate_uuid()
sni_tls_container_ref_5 = uuidutils.generate_uuid()
listener_data = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': default_tls_container_ref,
'sni_container_refs': [sni_tls_container_ref_1,
sni_tls_container_ref_2],
'protocol_port': 443,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancer_id': self.lb_id
}
with mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'cert_parser.validate_cert') as validate_cert_mock, \
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.get_cert') as \
get_cert_mock:
get_cert_mock.start().return_value = CertMock(
'mock_cert')
validate_cert_mock.start().return_value = True
# Default container and two SNI containers
# Test order and validation behavior.
listener = self.plugin.create_listener(context.get_admin_context(),
{'listener': listener_data})
self.assertEqual([sni_tls_container_ref_1,
sni_tls_container_ref_2],
listener['sni_container_refs'])
# Default container and two other SNI containers
# Test order and validation behavior.
listener_data.pop('loadbalancer_id')
listener_data.pop('protocol')
listener_data.pop('provisioning_status')
listener_data.pop('operating_status')
listener_data['sni_container_refs'] = [sni_tls_container_ref_3,
sni_tls_container_ref_4]
listener = self.plugin.update_listener(
context.get_admin_context(),
listener['id'],
{'listener': listener_data}
)
self.assertEqual([sni_tls_container_ref_3,
sni_tls_container_ref_4],
listener['sni_container_refs'])
# Default container, two old SNI containers ordered differently
# and one new SNI container.
# Test order and validation behavior.
listener_data.pop('protocol')
listener_data['sni_container_refs'] = [sni_tls_container_ref_4,
sni_tls_container_ref_3,
sni_tls_container_ref_5]
listener = self.plugin.update_listener(context.get_admin_context(),
listener['id'],
{'listener': listener_data})
self.assertEqual([sni_tls_container_ref_4,
sni_tls_container_ref_3,
sni_tls_container_ref_5],
listener['sni_container_refs'])
def test_update_listener_with_empty_tls(self):
default_tls_container_ref = uuidutils.generate_uuid()
sni_tls_container_ref_1 = uuidutils.generate_uuid()
sni_tls_container_ref_2 = uuidutils.generate_uuid()
sni_tls_container_ref_3 = uuidutils.generate_uuid()
sni_tls_container_ref_4 = uuidutils.generate_uuid()
listener_data = {
'protocol': lb_const.PROTOCOL_TERMINATED_HTTPS,
'default_tls_container_ref': default_tls_container_ref,
'sni_container_refs': [sni_tls_container_ref_1,
sni_tls_container_ref_2],
'protocol_port': 443,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancer_id': self.lb_id
}
with mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'cert_parser.validate_cert') as validate_cert_mock,\
mock.patch('neutron_lbaas.services.loadbalancer.plugin.'
'CERT_MANAGER_PLUGIN.CertManager.'
'get_cert') as get_cert_mock:
get_cert_mock.start().return_value = CertMock(
'mock_cert')
validate_cert_mock.start().return_value = True
# Default container and two SNI containers
# Test order and validation behavior.
listener = self.plugin.create_listener(
context.get_admin_context(), {'listener': listener_data})
expected = [sni_tls_container_ref_1, sni_tls_container_ref_2]
self.assertEqual(expected, listener['sni_container_refs'])
# Default container and two other SNI containers
# Test order and validation behavior.
listener_data.pop('loadbalancer_id')
listener_data.pop('protocol')
listener_data.pop('provisioning_status')
listener_data.pop('operating_status')
listener_data['sni_container_refs'] = [
sni_tls_container_ref_3, sni_tls_container_ref_4]
listener_data['default_tls_container_ref'] = ''
listener = self.plugin.update_listener(
context.get_admin_context(),
listener['id'],
{'listener': listener_data}
)
self.assertEqual('', listener['default_tls_container_ref'])
def test_delete_listener(self):
with self.listener(no_delete=True,
loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
resp = self._delete_listener_api(listener_id)
self.assertEqual(webob.exc.HTTPNoContent.code, resp.status_int)
resp, body = self._get_loadbalancer_api(self.lb_id)
self.assertEqual(0, len(body['loadbalancer']['listeners']))
def test_delete_listener_with_l7policy(self):
with self.listener(loadbalancer_id=self.lb_id,
no_delete=True) as listener:
with self.l7policy(listener['listener']['id'], no_delete=True):
ctx = context.get_admin_context()
self.assertRaises(
loadbalancerv2.EntityInUse,
self.plugin.delete_listener,
ctx, listener['listener']['id'])
def test_show_listener(self):
name = 'show_listener'
expected_values = {'name': name,
'protocol_port': 80,
'protocol': 'HTTP',
'connection_limit': -1,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'default_pool_id': None,
'loadbalancers': [{'id': self.lb_id}]}
with self.listener(name=name, loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
resp, body = self._get_listener_api(listener_id)
for k in expected_values:
self.assertEqual(expected_values[k], body['listener'][k])
def test_list_listeners(self):
name = 'list_listeners'
expected_values = {'name': name,
'protocol_port': 80,
'protocol': 'HTTP',
'connection_limit': -1,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'loadbalancers': [{'id': self.lb_id}]}
with self.listener(name=name, loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
expected_values['id'] = listener_id
resp, body = self._list_listeners_api()
listener_list = body['listeners']
self.assertEqual(1, len(listener_list))
for k in expected_values:
self.assertEqual(expected_values[k], listener_list[0][k])
def test_list_listeners_with_sort_emulated(self):
with self.listener(name='listener1', protocol_port=81,
loadbalancer_id=self.lb_id) as listener1:
with self.listener(name='listener2',
protocol_port=82,
loadbalancer_id=self.lb_id) as listener2:
with self.listener(name='listener3',
protocol_port=83,
loadbalancer_id=self.lb_id) as listener3:
self._test_list_with_sort(
'listener',
(listener1, listener2, listener3),
[('protocol_port', 'asc'), ('name', 'desc')]
)
def test_list_listeners_with_pagination_emulated(self):
with self.listener(name='listener1', protocol_port=80,
loadbalancer_id=self.lb_id) as listener1:
with self.listener(name='listener2', protocol_port=81,
loadbalancer_id=self.lb_id) as listener2:
with self.listener(name='listener3', protocol_port=82,
loadbalancer_id=self.lb_id) as listener3:
self._test_list_with_pagination(
'listener',
(listener1, listener2, listener3),
('name', 'asc'), 2, 2
)
def test_list_listeners_with_pagination_reverse_emulated(self):
with self.listener(name='listener1', protocol_port=80,
loadbalancer_id=self.lb_id) as listener1:
with self.listener(name='listener2', protocol_port=81,
loadbalancer_id=self.lb_id) as listener2:
with self.listener(name='listener3', protocol_port=82,
loadbalancer_id=self.lb_id) as listener3:
self._test_list_with_pagination(
'listener',
(listener3, listener2, listener1),
('name', 'desc'), 2, 2
)
class LbaasL7Tests(ListenerTestBase):
def test_create_l7policy_invalid_listener_id(self, **extras):
self._create_l7policy(self.fmt, uuidutils.generate_uuid(),
lb_const.L7_POLICY_ACTION_REJECT,
expected_res_status=webob.exc.HTTPNotFound.code)
def test_create_l7policy_redirect_no_pool(self, **extras):
l7policy_data = {
'name': '',
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
'description': '',
'position': 1,
'redirect_pool_id': None,
'redirect_url': 'http://radware.com',
'tenant_id': self._tenant_id,
'admin_state_up': True,
}
l7policy_data.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
ctx = context.get_admin_context()
l7policy_data['listener_id'] = listener['listener']['id']
l7policy_data['action'] = (
lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL)
self.assertRaises(
l7.L7PolicyRedirectPoolIdMissing,
self.plugin.create_l7policy,
ctx, {'l7policy': l7policy_data})
def test_create_l7policy_redirect_invalid_pool(self, **extras):
l7policy_data = {
'name': '',
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
'description': '',
'position': 1,
'redirect_pool_id': None,
'tenant_id': self._tenant_id,
'admin_state_up': True,
}
l7policy_data.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
ctx = context.get_admin_context()
l7policy_data['listener_id'] = listener['listener']['id']
# Test pool redirect action with invalid pool id specified
l7policy_data['redirect_pool_id'] = uuidutils.generate_uuid()
self.assertRaises(
loadbalancerv2.EntityNotFound,
self.plugin.create_l7policy,
ctx, {'l7policy': l7policy_data})
def test_create_l7policy_redirect_foreign_pool(self, **extras):
l7policy_data = {
'name': '',
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
'description': '',
'position': 1,
'redirect_pool_id': None,
'tenant_id': self._tenant_id,
'admin_state_up': True,
}
l7policy_data.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
ctx = context.get_admin_context()
l7policy_data['listener_id'] = listener['listener']['id']
# Test pool redirect action with another loadbalancer pool id
with self.pool(loadbalancer_id=self.lb_id2) as p:
l7policy_data['redirect_pool_id'] = p['pool']['id']
self.assertRaises(
sharedpools.ListenerAndPoolMustBeOnSameLoadbalancer,
self.plugin.create_l7policy,
ctx, {'l7policy': l7policy_data})
def test_create_l7policy_redirect_no_url(self, **extras):
l7policy_data = {
'name': '',
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL,
'description': '',
'position': 1,
'redirect_pool_id': None,
'redirect_url': 'http://radware.com',
'tenant_id': self._tenant_id,
'admin_state_up': True,
}
l7policy_data.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
ctx = context.get_admin_context()
l7policy_data['listener_id'] = listener['listener']['id']
# Test url redirect action without url specified
del l7policy_data['redirect_url']
l7policy_data['action'] = lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL
self.assertRaises(
l7.L7PolicyRedirectUrlMissing,
self.plugin.create_l7policy,
ctx, {'l7policy': l7policy_data})
def test_create_l7policy_redirect_invalid_url(self, **extras):
l7policy_data = {
'name': '',
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL,
'description': '',
'position': 1,
'redirect_pool_id': None,
'redirect_url': 'http://radware.com',
'tenant_id': self._tenant_id,
'admin_state_up': True,
}
l7policy_data.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
l7policy_data['listener_id'] = listener['listener']['id']
# Test url redirect action with invalid url specified
try:
with self.l7policy(listener['listener']['id'],
action=lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL,
redirect_url='https:/acme.com'):
self.assertTrue(False)
except webob.exc.HTTPClientError:
pass
def test_create_l7policy_invalid_position(self, **extras):
l7policy_data = {
'name': '',
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL,
'description': '',
'position': 1,
'redirect_pool_id': None,
'redirect_url': 'http://radware.com',
'tenant_id': self._tenant_id,
'admin_state_up': True,
}
l7policy_data.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
l7policy_data['listener_id'] = listener['listener']['id']
# Test invalid zero position for policy
try:
with self.l7policy(listener['listener']['id'], position=0):
self.assertTrue(False)
except webob.exc.HTTPClientError:
pass
def test_create_l7policy(self, **extras):
expected = {
'action': lb_const.L7_POLICY_ACTION_REJECT,
'redirect_pool_id': None,
'redirect_url': None,
'tenant_id': self._tenant_id,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id) as p:
expected['listener_id'] = listener_id
actual = {}
for k, v in p['l7policy'].items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
self._validate_statuses(self.lb_id, listener_id,
p['l7policy']['id'])
def test_create_l7policy_pool_redirect(self, **extras):
expected = {
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
'redirect_pool_id': None,
'redirect_url': None,
'tenant_id': self._tenant_id,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.pool(loadbalancer_id=self.lb_id) as pool:
pool_id = pool['pool']['id']
with self.l7policy(
listener_id,
action=lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
redirect_pool_id=pool_id) as p:
expected['listener_id'] = listener_id
expected['redirect_pool_id'] = pool_id
actual = {}
for k, v in p['l7policy'].items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
def test_l7policy_pool_deletion(self, **extras):
expected = {
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
'redirect_pool_id': None,
'redirect_url': None,
'tenant_id': self._tenant_id,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener1, \
self.listener(loadbalancer_id=self.lb_id, protocol_port=8080) as listener2, \
self.pool(loadbalancer_id=self.lb_id, no_delete=True) as pool1, \
self.pool(loadbalancer_id=self.lb_id) as pool2, \
self.l7policy(listener1['listener']['id'],
action=lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
redirect_pool_id=pool1['pool']['id']) as policy1, \
self.l7policy(listener1['listener']['id'],
action=lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
redirect_pool_id=pool2['pool']['id']), \
self.l7policy(listener2['listener']['id'],
action=lb_const.L7_POLICY_ACTION_REDIRECT_TO_POOL,
redirect_pool_id=pool1['pool']['id']) as policy3:
ctx = context.get_admin_context()
self.plugin.delete_pool(ctx, pool1['pool']['id'])
l7policy1 = self.plugin.get_l7policy(
ctx, policy1['l7policy']['id'])
self.assertEqual(l7policy1['action'],
lb_const.L7_POLICY_ACTION_REJECT)
self.assertIsNone(l7policy1['redirect_pool_id'])
l7policy3 = self.plugin.get_l7policy(
ctx, policy3['l7policy']['id'])
self.assertEqual(l7policy3['action'],
lb_const.L7_POLICY_ACTION_REJECT)
self.assertIsNone(l7policy3['redirect_pool_id'])
def test_create_l7policies_ordering(self, **extras):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id, name="1"), \
self.l7policy(listener_id, name="2"), \
self.l7policy(listener_id, name="3"), \
self.l7policy(listener_id, position=1, name="4"), \
self.l7policy(listener_id, position=2, name="5"), \
self.l7policy(listener_id, position=4, name="6"), \
self.l7policy(listener_id, name="7"), \
self.l7policy(listener_id, position=8, name="8"), \
self.l7policy(listener_id, position=1, name="9"), \
self.l7policy(listener_id, position=1, name="10"):
listener_db = self.plugin.db._get_resource(
context.get_admin_context(),
models.Listener, listener['listener']['id'])
names = ['10', '9', '4', '5', '1', '6', '2', '3', '7', '8']
for pos in range(0, 10):
self.assertEqual(
listener_db.l7_policies[pos]['position'], pos + 1)
self.assertEqual(
listener_db.l7_policies[pos]['name'], names[pos])
def test_update_l7policy(self, **extras):
expected = {
'admin_state_up': False,
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL,
'redirect_pool_id': None,
'redirect_url': 'redirect_url',
'tenant_id': self._tenant_id,
'position': 1,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id) as p:
l7policy_id = p['l7policy']['id']
data = {
'l7policy': {
'action': lb_const.L7_POLICY_ACTION_REDIRECT_TO_URL,
'redirect_url': 'redirect_url',
'admin_state_up': False}}
ctx = context.get_admin_context()
self.plugin.update_l7policy(ctx, l7policy_id, data)
l7policy = self.plugin.get_l7policy(ctx, l7policy_id)
actual = {}
for k, v in l7policy.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
self._validate_statuses(self.lb_id, listener_id,
p['l7policy']['id'],
l7policy_disabled=True)
def test_update_l7policies_ordering(self, **extras):
expected = {
'action': lb_const.L7_POLICY_ACTION_REJECT,
'redirect_pool_id': None,
'redirect_url': '',
'tenant_id': self._tenant_id,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id, name="1") as p1, \
self.l7policy(listener_id, name="2") as p2, \
self.l7policy(listener_id, name="3"), \
self.l7policy(listener_id, name="4"), \
self.l7policy(listener_id, name="5") as p5, \
self.l7policy(listener_id, name="6") as p6, \
self.l7policy(listener_id, name="7"), \
self.l7policy(listener_id, name="8"), \
self.l7policy(listener_id, name="9"), \
self.l7policy(listener_id, name="10") as p10:
c = context.get_admin_context()
listener_db = self.plugin.db._get_resource(
context.get_admin_context(),
models.Listener, listener['listener']['id'])
expected['position'] = 1
self.plugin.db.update_status(
c, models.L7Policy, p2['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p2['l7policy']['id'],
{'l7policy': expected})
expected['position'] = 3
self.plugin.db.update_status(
c, models.L7Policy, p1['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p1['l7policy']['id'],
{'l7policy': expected})
expected['position'] = 4
self.plugin.db.update_status(
c, models.L7Policy, p6['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p6['l7policy']['id'],
{'l7policy': expected})
expected['position'] = 11
self.plugin.db.update_status(
c, models.L7Policy, p2['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p2['l7policy']['id'],
{'l7policy': expected})
expected['position'] = 1
self.plugin.db.update_status(
c, models.L7Policy, p1['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p1['l7policy']['id'],
{'l7policy': expected})
expected['position'] = 8
self.plugin.db.update_status(
c, models.L7Policy, p5['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p5['l7policy']['id'],
{'l7policy': expected})
expected['position'] = 3
self.plugin.db.update_status(
c, models.L7Policy, p10['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.update_l7policy(c, p10['l7policy']['id'],
{'l7policy': expected})
listener_db = self.plugin.db._get_resource(
context.get_admin_context(),
models.Listener, listener['listener']['id'])
names = ['1', '3', '10', '6', '4', '7', '8', '9', '5', '2']
for pos in range(0, 10):
self.assertEqual(
listener_db.l7_policies[pos]['position'], pos + 1)
self.assertEqual(
listener_db.l7_policies[pos]['name'], names[pos])
def test_delete_l7policy(self, **extras):
expected = {
'position': 1,
'action': lb_const.L7_POLICY_ACTION_REJECT,
'redirect_pool_id': None,
'redirect_url': '',
'tenant_id': self._tenant_id,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id, name="0"), \
self.l7policy(listener_id, name="1"), \
self.l7policy(listener_id, name="2"), \
self.l7policy(listener_id, name="3", no_delete=True) as p3, \
self.l7policy(listener_id, name="4"), \
self.l7policy(listener_id, name="5", no_delete=True) as p5, \
self.l7policy(listener_id, name="6"):
c = context.get_admin_context()
self.plugin.db.update_status(
c, models.L7Policy, p3['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.delete_l7policy(c, p3['l7policy']['id'])
self.plugin.db.update_status(
c, models.L7Policy, p5['l7policy']['id'],
lb_const.OFFLINE)
self.plugin.delete_l7policy(c, p5['l7policy']['id'])
listener_db = self.plugin.db._get_resource(
context.get_admin_context(),
models.Listener, listener['listener']['id'])
names = ['0', '1', '2', '4', '6']
for pos in range(0, 4):
self.assertEqual(
listener_db.l7_policies[pos]['position'], pos + 1)
self.assertEqual(
listener_db.l7_policies[pos]['name'], names[pos])
self.assertRaises(
loadbalancerv2.EntityNotFound,
self.plugin.get_l7policy,
c, p3['l7policy']['id'])
self.assertRaises(
loadbalancerv2.EntityNotFound,
self.plugin.get_l7policy,
c, p5['l7policy']['id'])
def test_show_l7policy(self, **extras):
expected = {
'position': 1,
'action': lb_const.L7_POLICY_ACTION_REJECT,
'redirect_pool_id': None,
'redirect_url': None,
'tenant_id': self._tenant_id,
}
expected.update(extras)
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
expected['listener_id'] = listener_id
with self.l7policy(listener_id, name="0") as p:
req = self.new_show_request('l7policies',
p['l7policy']['id'],
fmt=self.fmt)
res = self.deserialize(self.fmt,
req.get_response(self.ext_api))
actual = {}
for k, v in res['l7policy'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
return p
def test_list_l7policies_with_sort_emulated(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id, name="b") as p1, \
self.l7policy(listener_id, name="c") as p2, \
self.l7policy(listener_id, name="a") as p3:
self._test_list_with_sort('l7policy', (p3, p1, p2),
[('name', 'asc')],
resources='l7policies')
def test_list_l7policies_with_pagination_emulated(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id, name="b") as p1, \
self.l7policy(listener_id, name="c") as p2, \
self.l7policy(listener_id, name="e") as p3, \
self.l7policy(listener_id, name="d") as p4, \
self.l7policy(listener_id, name="f") as p5, \
self.l7policy(listener_id, name="g") as p6, \
self.l7policy(listener_id, name="a") as p7:
self._test_list_with_pagination(
'l7policy', (p6, p5, p3, p4, p2, p1, p7),
('name', 'desc'), 2, 4, resources='l7policies')
def test_list_l7policies_with_pagination_reverse_emulated(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id, name="b") as p1, \
self.l7policy(listener_id, name="c") as p2, \
self.l7policy(listener_id, name="e") as p3, \
self.l7policy(listener_id, name="d") as p4, \
self.l7policy(listener_id, name="f") as p5, \
self.l7policy(listener_id, name="g") as p6, \
self.l7policy(listener_id, name="a") as p7:
self._test_list_with_pagination_reverse(
'l7policy', (p6, p5, p3, p4, p2, p1, p7),
('name', 'desc'), 2, 4, resources='l7policies')
def test_create_l7rule_invalid_policy_id(self, **extras):
with self.listener(loadbalancer_id=self.lb_id) as listener:
with self.l7policy(listener['listener']['id']):
self._create_l7policy_rule(
self.fmt, uuidutils.generate_uuid(),
lb_const.L7_RULE_TYPE_HOST_NAME,
lb_const.L7_RULE_COMPARE_TYPE_REGEX,
'value',
expected_res_status=webob.exc.HTTPNotFound.code)
def test_create_invalid_l7rule(self, **extras):
rule = {
'type': lb_const.L7_RULE_TYPE_HEADER,
'compare_type': lb_const.L7_RULE_COMPARE_TYPE_REGEX,
'value': '*'
}
with self.listener(loadbalancer_id=self.lb_id) as listener:
with self.l7policy(listener['listener']['id']) as policy:
policy_id = policy['l7policy']['id']
ctx = context.get_admin_context()
# test invalid regex
self.assertRaises(
l7.L7RuleInvalidRegex,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test missing key for HEADER type
rule['value'] = '/*/'
self.assertRaises(
l7.L7RuleKeyMissing,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test missing key for COOKIE type
rule['type'] = lb_const.L7_RULE_TYPE_COOKIE
self.assertRaises(
l7.L7RuleKeyMissing,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test invalid key for HEADER type
rule['type'] = lb_const.L7_RULE_TYPE_HEADER
rule['key'] = '/'
self.assertRaises(
l7.L7RuleInvalidKey,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test invalid value for COOKIE type
rule['compare_type'] =\
lb_const.L7_RULE_COMPARE_TYPE_CONTAINS
rule['type'] = lb_const.L7_RULE_TYPE_COOKIE
rule['key'] = 'a'
rule['value'] = ';'
self.assertRaises(
l7.L7RuleInvalidCookieValue,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test invalid value for !COOKIE type
rule['type'] = lb_const.L7_RULE_TYPE_PATH
rule['value'] = ' '
self.assertRaises(
l7.L7RuleInvalidHeaderValue,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test invalid value for !COOKIE type quated
rule['value'] = ' '
self.assertRaises(
l7.L7RuleInvalidHeaderValue,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
# test unsupported compare type for FILE type
rule['type'] = lb_const.L7_RULE_TYPE_FILE_TYPE
self.assertRaises(
l7.L7RuleUnsupportedCompareType,
self.plugin.db.create_l7policy_rule,
ctx, rule, policy_id)
def test_create_l7rule(self, **extras):
expected = {
'type': lb_const.L7_RULE_TYPE_HOST_NAME,
'compare_type': lb_const.L7_RULE_COMPARE_TYPE_EQUAL_TO,
'key': None,
'value': 'value1'
}
with self.listener(loadbalancer_id=self.lb_id) as listener:
with self.l7policy(listener['listener']['id']) as policy:
policy_id = policy['l7policy']['id']
with self.l7policy_rule(policy_id) as r_def, \
self.l7policy_rule(policy_id,
key='key1') as r_key, \
self.l7policy_rule(policy_id,
value='value2') as r_value, \
self.l7policy_rule(policy_id,
type=lb_const.L7_RULE_TYPE_PATH) as r_type, \
self.l7policy_rule(policy_id, compare_type=lb_const.
L7_RULE_COMPARE_TYPE_REGEX) as r_compare_type, \
self.l7policy_rule(policy_id,
invert=True) as r_invert:
ctx = context.get_admin_context()
rdb = self.plugin.get_l7policy_rule(
ctx, r_def['rule']['id'], policy_id)
actual = {}
for k, v in rdb.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
rdb = self.plugin.get_l7policy_rule(
ctx, r_key['rule']['id'], policy_id)
expected['key'] = 'key1'
actual = {}
for k, v in rdb.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
rdb = self.plugin.get_l7policy_rule(
ctx, r_value['rule']['id'], policy_id)
expected['key'] = None
expected['value'] = 'value2'
actual = {}
for k, v in rdb.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
rdb = self.plugin.get_l7policy_rule(
ctx, r_type['rule']['id'], policy_id)
expected['value'] = 'value1'
expected['type'] = lb_const.L7_RULE_TYPE_PATH
actual = {}
for k, v in rdb.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
rdb = self.plugin.get_l7policy_rule(
ctx, r_compare_type['rule']['id'], policy_id)
expected['type'] = lb_const.L7_RULE_TYPE_HOST_NAME
expected['compare_type'] =\
lb_const.L7_RULE_COMPARE_TYPE_REGEX
actual = {}
for k, v in rdb.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
rdb = self.plugin.get_l7policy_rule(
ctx, r_invert['rule']['id'], policy_id)
expected['invert'] = True
expected['compare_type'] =\
lb_const.L7_RULE_COMPARE_TYPE_EQUAL_TO
actual = {}
for k, v in rdb.items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
def test_invalid_update_l7rule(self, **extras):
rule = {
'type': lb_const.L7_RULE_TYPE_HEADER,
'compare_type': lb_const.L7_RULE_COMPARE_TYPE_REGEX,
'value': '*'
}
with self.listener(loadbalancer_id=self.lb_id) as listener:
with self.l7policy(listener['listener']['id']) as policy:
policy_id = policy['l7policy']['id']
with self.l7policy_rule(policy_id) as r:
rule_id = r['rule']['id']
ctx = context.get_admin_context()
# test invalid regex
self.assertRaises(
l7.L7RuleInvalidRegex,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test missing key for HEADER type
rule['value'] = '/*/'
self.assertRaises(
l7.L7RuleKeyMissing,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test missing key for COOKIE type
rule['type'] = lb_const.L7_RULE_TYPE_COOKIE
self.assertRaises(
l7.L7RuleKeyMissing,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test invalid key for HEADER type
rule['type'] = lb_const.L7_RULE_TYPE_HEADER
rule['key'] = '/'
self.assertRaises(
l7.L7RuleInvalidKey,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test invalid value for COOKIE type
rule['compare_type'] =\
lb_const.L7_RULE_COMPARE_TYPE_CONTAINS
rule['type'] = lb_const.L7_RULE_TYPE_COOKIE
rule['key'] = 'a'
rule['value'] = ';'
self.assertRaises(
l7.L7RuleInvalidCookieValue,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test invalid value for !COOKIE type
rule['type'] = lb_const.L7_RULE_TYPE_PATH
rule['value'] = ' '
self.assertRaises(
l7.L7RuleInvalidHeaderValue,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test invalid value for !COOKIE type quated
rule['value'] = ' '
self.assertRaises(
l7.L7RuleInvalidHeaderValue,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
# test unsupported compare type for FILE type
rule['type'] = lb_const.L7_RULE_TYPE_FILE_TYPE
self.assertRaises(
l7.L7RuleUnsupportedCompareType,
self.plugin.db.update_l7policy_rule,
ctx, rule_id, rule, policy_id)
def test_update_l7rule(self, **extras):
with self.listener(loadbalancer_id=self.lb_id) as listener:
with self.l7policy(listener['listener']['id']) as policy:
policy_id = policy['l7policy']['id']
with self.l7policy_rule(policy_id) as r:
req = self.new_show_request('l7policies',
policy_id,
fmt=self.fmt)
policy_show = self.deserialize(
self.fmt,
req.get_response(self.ext_api)
)
self.assertEqual(
len(policy_show['l7policy']['rules']), 1)
expected = {}
expected['type'] = lb_const.L7_RULE_TYPE_HEADER
expected['compare_type'] = (
lb_const.L7_RULE_COMPARE_TYPE_REGEX)
expected['value'] = '/.*/'
expected['key'] = 'HEADER1'
expected['invert'] = True
expected['admin_state_up'] = False
req = self.new_update_request(
'l7policies', {'rule': expected},
policy_id, subresource='rules',
sub_id=r['rule']['id'])
res = self.deserialize(
self.fmt,
req.get_response(self.ext_api)
)
actual = {}
for k, v in res['rule'].items():
if k in expected:
actual[k] = v
self.assertEqual(actual, expected)
self._validate_statuses(self.lb_id,
listener['listener']['id'],
policy_id, r['rule']['id'],
l7rule_disabled=True)
def test_delete_l7rule(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
with self.l7policy(listener['listener']['id']) as policy:
policy_id = policy['l7policy']['id']
with self.l7policy_rule(policy_id, no_delete=True) as r0, \
self.l7policy_rule(policy_id, no_delete=True):
req = self.new_show_request('l7policies',
policy_id,
fmt=self.fmt)
policy_update = self.deserialize(
self.fmt,
req.get_response(self.ext_api)
)
self.assertEqual(
len(policy_update['l7policy']['rules']), 2)
req = self.new_delete_request('l7policies',
policy_id,
subresource='rules',
sub_id=r0['rule']['id'])
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int,
webob.exc.HTTPNoContent.code)
req = self.new_show_request('l7policies',
policy_id,
fmt=self.fmt)
policy_update = self.deserialize(
self.fmt,
req.get_response(self.ext_api)
)
self.assertEqual(
len(policy_update['l7policy']['rules']), 1)
def test_list_l7rules_with_sort_emulated(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id) as policy:
policy_id = policy['l7policy']['id']
with self.l7policy_rule(policy_id, value="b") as r1, \
self.l7policy_rule(policy_id, value="c") as r2, \
self.l7policy_rule(policy_id, value="a") as r3:
self._test_list_with_sort('l7policy', (r3, r1, r2),
[('value', 'asc')],
id=policy_id,
resources='l7policies',
subresource='rule',
subresources='rules')
def test_list_l7rules_with_pagination_emulated(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id) as policy:
policy_id = policy['l7policy']['id']
with self.l7policy_rule(policy_id, value="b") as r1, \
self.l7policy_rule(policy_id, value="c") as r2, \
self.l7policy_rule(policy_id, value="e") as r3, \
self.l7policy_rule(policy_id, value="d") as r4, \
self.l7policy_rule(policy_id, value="f") as r5, \
self.l7policy_rule(policy_id, value="g") as r6, \
self.l7policy_rule(policy_id, value="a") as r7:
self._test_list_with_pagination(
'l7policy', (r6, r5, r3, r4, r2, r1, r7),
('value', 'desc'), 2, 4,
id=policy_id,
resources='l7policies',
subresource='rule',
subresources='rules')
def test_list_l7rules_with_pagination_reverse_emulated(self):
with self.listener(loadbalancer_id=self.lb_id) as listener:
listener_id = listener['listener']['id']
with self.l7policy(listener_id) as p:
policy_id = p['l7policy']['id']
with self.l7policy_rule(policy_id, value="b") as r1, \
self.l7policy_rule(policy_id, value="c") as r2, \
self.l7policy_rule(policy_id, value="e") as r3, \
self.l7policy_rule(policy_id, value="d") as r4, \
self.l7policy_rule(policy_id, value="f") as r5, \
self.l7policy_rule(policy_id, value="g") as r6, \
self.l7policy_rule(policy_id, value="a") as r7:
self._test_list_with_pagination_reverse(
'l7policy', (r6, r5, r3, r4, r2, r1, r7),
('value', 'desc'), 2, 4,
id=policy_id,
resources='l7policies',
subresource='rule',
subresources='rules')
class PoolTestBase(ListenerTestBase):
def setUp(self):
super(PoolTestBase, self).setUp()
listener_res = self._create_listener(self.fmt, lb_const.PROTOCOL_HTTP,
80, self.lb_id)
self.def_listener = self.deserialize(self.fmt, listener_res)
self.listener_id = self.def_listener['listener']['id']
self.addCleanup(self._delete_listener_api, self.listener_id)
listener_res2 = self._create_listener(self.fmt, lb_const.PROTOCOL_HTTP,
80, self.lb_id2)
self.def_listener2 = self.deserialize(self.fmt, listener_res2)
self.listener_id2 = self.def_listener2['listener']['id']
self.loadbalancer_id = self.lb_id
self.loadbalancer_id2 = self.lb_id2
def _create_pool_api(self, data):
req = self.new_create_request("pools", data, self.fmt)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _update_pool_api(self, pool_id, data):
req = self.new_update_request('pools', data, pool_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _delete_pool_api(self, pool_id):
req = self.new_delete_request('pools', pool_id)
resp = req.get_response(self.ext_api)
return resp
def _get_pool_api(self, pool_id):
req = self.new_show_request('pools', pool_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _list_pools_api(self):
req = self.new_list_request('pools')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
class LbaasPoolTests(PoolTestBase):
def test_create_pool(self, **extras):
expected = {
'name': '',
'description': '',
'protocol': 'HTTP',
'lb_algorithm': 'ROUND_ROBIN',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'healthmonitor_id': None,
'members': []
}
expected.update(extras)
with self.pool(listener_id=self.listener_id, **extras) as pool:
pool_id = pool['pool'].get('id')
if ('session_persistence' in expected.keys() and
expected['session_persistence'] is not None and
not expected['session_persistence'].get('cookie_name')):
expected['session_persistence']['cookie_name'] = None
self.assertTrue(pool_id)
actual = {}
for k, v in pool['pool'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=pool_id)
return pool
def test_create_pool_with_loadbalancer_no_listener(self, **extras):
expected = {
'name': '',
'description': '',
'protocol': 'HTTP',
'lb_algorithm': 'ROUND_ROBIN',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'healthmonitor_id': None,
'members': []
}
expected.update(extras)
with self.pool(loadbalancer_id=self.loadbalancer_id, **extras) as pool:
pool_id = pool['pool'].get('id')
if 'session_persistence' in expected:
if not expected['session_persistence'].get('cookie_name'):
expected['session_persistence']['cookie_name'] = None
self.assertTrue(pool_id)
actual = {}
for k, v in pool['pool'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, None, pool_id=pool_id)
return pool
def test_show_pool(self, **extras):
expected = {
'name': '',
'description': '',
'protocol': 'HTTP',
'lb_algorithm': 'ROUND_ROBIN',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'listeners': [{'id': self.listener_id}],
'healthmonitor_id': None,
'members': []
}
expected.update(extras)
with self.pool(listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
resp, body = self._get_pool_api(pool_id)
actual = {}
for k, v in body['pool'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
return pool
def test_update_pool(self, **extras):
expected = {
'name': '',
'description': '',
'protocol': 'HTTP',
'lb_algorithm': 'LEAST_CONNECTIONS',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'listeners': [{'id': self.listener_id}],
'healthmonitor_id': None,
'members': []
}
expected.update(extras)
with self.pool(listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
self.assertTrue(pool_id)
data = {'pool': {'lb_algorithm': 'LEAST_CONNECTIONS'}}
resp, body = self._update_pool_api(pool_id, data)
actual = {}
for k, v in body['pool'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=pool_id)
return pool
def test_delete_pool(self):
with self.pool(no_delete=True, listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
ctx = context.get_admin_context()
qry = ctx.session.query(models.PoolV2)
qry = qry.filter_by(id=pool_id)
self.assertIsNotNone(qry.first())
resp = self._delete_pool_api(pool_id)
self.assertEqual(webob.exc.HTTPNoContent.code, resp.status_int)
qry = ctx.session.query(models.PoolV2)
qry = qry.filter_by(id=pool['pool']['id'])
self.assertIsNone(qry.first())
def test_delete_pool_and_members(self):
with self.pool(listener_id=self.listener_id, no_delete=True) as pool:
pool_id = pool['pool']['id']
with self.member(pool_id=pool_id, no_delete=True) as member:
member_id = member['member']['id']
ctx = context.get_admin_context()
# this will only set status, it requires driver to delete
# from db. Since the LoggingNoopDriver is being used it
# should delete from db
self.plugin.delete_pool(ctx, pool_id)
# verify member got deleted as well
self.assertRaises(
loadbalancerv2.EntityNotFound,
self.plugin.db.get_pool_member,
ctx, member_id)
def test_delete_pool_and_hm(self):
with self.pool(listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
with self.healthmonitor(pool_id=pool_id):
# verify pool deletion is prevented if HM is associated
ctx = context.get_admin_context()
self.assertRaises(
loadbalancerv2.EntityInUse,
self.plugin.delete_pool,
ctx, pool_id)
def test_cannot_add_multiple_pools_to_listener(self):
with self.pool(listener_id=self.listener_id):
data = {'pool': {'name': '',
'description': '',
'protocol': 'HTTP',
'lb_algorithm': 'ROUND_ROBIN',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'listener_id': self.listener_id}}
resp, body = self._create_pool_api(data)
self.assertEqual(webob.exc.HTTPConflict.code, resp.status_int)
def test_create_pool_with_pool_protocol_mismatch(self):
with self.listener(protocol=lb_const.PROTOCOL_HTTPS,
loadbalancer_id=self.lb_id,
protocol_port=443) as listener:
listener_id = listener['listener']['id']
data = {'pool': {'listener_id': listener_id,
'protocol': lb_const.PROTOCOL_HTTP,
'lb_algorithm': lb_const.LB_METHOD_ROUND_ROBIN,
'tenant_id': self._tenant_id}}
resp, body = self._create_pool_api(data)
self.assertEqual(webob.exc.HTTPConflict.code, resp.status_int)
def test_create_pool_with_protocol_invalid(self):
data = {'pool': {
'name': '',
'description': '',
'protocol': 'BLANK',
'lb_algorithm': 'LEAST_CONNECTIONS',
'admin_state_up': True,
'tenant_id': self._tenant_id
}}
resp, body = self._create_pool_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_can_create_pool_with_listener_loadbalancer_match(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet) as loadbalancer:
lb_id = loadbalancer['loadbalancer']['id']
with self.listener(loadbalancer_id=lb_id) as l1:
l_id = l1['listener']['id']
with self.pool(listener_id=l_id,
loadbalancer_id=lb_id):
pass
def test_cannot_create_pool_with_listener_loadbalancer_mismatch(self):
with self.subnet() as subnet:
with self.loadbalancer(subnet=subnet) as lb1, \
self.loadbalancer(subnet=subnet) as lb2:
lb_id1 = lb1['loadbalancer']['id']
lb_id2 = lb2['loadbalancer']['id']
with self.listener(loadbalancer_id=lb_id1) as l1:
l_id = l1['listener']['id']
data = {'pool': {'name': '',
'description': '',
'protocol': 'HTTP',
'lb_algorithm': 'ROUND_ROBIN',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'listener_id': l_id,
'loadbalancer_id': lb_id2}}
resp, body = self._create_pool_api(data)
self.assertEqual(resp.status_int,
webob.exc.HTTPBadRequest.code)
def test_create_pool_with_session_persistence(self):
self.test_create_pool(session_persistence={'type': 'HTTP_COOKIE'})
def test_create_pool_with_session_persistence_none(self):
self.test_create_pool(session_persistence=None)
def test_create_pool_with_session_persistence_with_app_cookie(self):
sp = {'type': 'APP_COOKIE', 'cookie_name': 'sessionId'}
self.test_create_pool(session_persistence=sp)
def test_create_pool_with_session_persistence_unsupported_type(self):
with testtools.ExpectedException(webob.exc.HTTPClientError):
self.test_create_pool(session_persistence={'type': 'UNSUPPORTED'})
def test_create_pool_with_unnecessary_cookie_name(self):
sp = {'type': "SOURCE_IP", 'cookie_name': 'sessionId'}
with testtools.ExpectedException(webob.exc.HTTPClientError):
self.test_create_pool(session_persistence=sp)
def test_create_pool_with_session_persistence_without_cookie_name(self):
sp = {'type': "APP_COOKIE"}
with testtools.ExpectedException(webob.exc.HTTPClientError):
self.test_create_pool(session_persistence=sp)
def test_validate_session_persistence_valid_with_cookie_name(self):
sp = {'type': 'APP_COOKIE', 'cookie_name': 'MyCookie'}
self.assertIsNone(
self.plugin._validate_session_persistence_info(sp_info=sp))
def test_validate_session_persistence_invalid_with_cookie_name(self):
sp = {'type': 'HTTP', 'cookie_name': 'MyCookie'}
with testtools.ExpectedException(
loadbalancerv2.SessionPersistenceConfigurationInvalid):
self.plugin._validate_session_persistence_info(sp_info=sp)
def test_validate_session_persistence_invalid_without_cookie_name(self):
sp = {'type': 'APP_COOKIE'}
with testtools.ExpectedException(
loadbalancerv2.SessionPersistenceConfigurationInvalid):
self.plugin._validate_session_persistence_info(sp_info=sp)
def test_reset_session_persistence(self):
name = 'pool4'
sp = {'type': "HTTP_COOKIE"}
update_info = {'pool': {'session_persistence': None}}
with self.pool(name=name, session_persistence=sp,
listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
sp['cookie_name'] = None
# Ensure that pool has been created properly
self.assertEqual(pool['pool']['session_persistence'],
sp)
# Try resetting session_persistence
resp, body = self._update_pool_api(pool_id, update_info)
self.assertIsNone(body['pool'].get('session_persistence'))
def test_update_no_change_session_persistence(self):
name = 'pool4'
sp = {'type': "HTTP_COOKIE"}
update_info = {'pool': {'lb_algorithm': 'ROUND_ROBIN'}}
with self.pool(name=name, session_persistence=sp,
listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
sp['cookie_name'] = None
# Ensure that pool has been created properly
self.assertEqual(pool['pool']['session_persistence'],
sp)
# Try updating something other than session_persistence
resp, body = self._update_pool_api(pool_id, update_info)
# Make sure session_persistence is unchanged
self.assertEqual(pool['pool']['session_persistence'],
sp)
def test_update_pool_with_protocol(self):
with self.pool(listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
data = {'pool': {'protocol': 'BLANK'}}
resp, body = self._update_pool_api(pool_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_list_pools(self):
name = 'list_pools'
expected_values = {'name': name,
'protocol': 'HTTP',
'description': 'apool',
'lb_algorithm': 'ROUND_ROBIN',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'session_persistence': {'cookie_name': None,
'type': 'HTTP_COOKIE'},
'loadbalancers': [{'id': self.lb_id}],
'members': []}
with self.pool(name=name, listener_id=self.listener_id,
description='apool',
session_persistence={'type': 'HTTP_COOKIE'},
members=[]) as pool:
pool_id = pool['pool']['id']
expected_values['id'] = pool_id
resp, body = self._list_pools_api()
pool_list = body['pools']
self.assertEqual(1, len(pool_list))
for k in expected_values:
self.assertEqual(expected_values[k], pool_list[0][k])
def test_list_pools_with_sort_emulated(self):
with self.listener(loadbalancer_id=self.lb_id,
protocol_port=81,
protocol=lb_const.PROTOCOL_HTTPS) as l1, \
self.listener(loadbalancer_id=self.lb_id,
protocol_port=82,
protocol=lb_const.PROTOCOL_TCP) as l2, \
self.listener(loadbalancer_id=self.lb_id,
protocol_port=83,
protocol=lb_const.PROTOCOL_HTTP) as l3, \
self.pool(listener_id=l1['listener']['id'],
protocol=lb_const.PROTOCOL_HTTPS) as p1, \
self.pool(listener_id=l2['listener']['id'],
protocol=lb_const.PROTOCOL_TCP) as p2, \
self.pool(listener_id=l3['listener']['id'],
protocol=lb_const.PROTOCOL_HTTP) as p3:
self._test_list_with_sort('pool', (p2, p1, p3),
[('protocol', 'desc')])
def test_list_pools_with_pagination_emulated(self):
with self.listener(loadbalancer_id=self.lb_id,
protocol_port=81,
protocol=lb_const.PROTOCOL_HTTPS) as l1, \
self.listener(loadbalancer_id=self.lb_id,
protocol_port=82,
protocol=lb_const.PROTOCOL_TCP) as l2, \
self.listener(loadbalancer_id=self.lb_id,
protocol_port=83,
protocol=lb_const.PROTOCOL_HTTP) as l3, \
self.pool(listener_id=l1['listener']['id'],
protocol=lb_const.PROTOCOL_HTTPS) as p1, \
self.pool(listener_id=l2['listener']['id'],
protocol=lb_const.PROTOCOL_TCP) as p2, \
self.pool(listener_id=l3['listener']['id'],
protocol=lb_const.PROTOCOL_HTTP) as p3:
self._test_list_with_pagination('pool',
(p3, p1, p2),
('protocol', 'asc'), 2, 2)
def test_list_pools_with_pagination_reverse_emulated(self):
with self.listener(loadbalancer_id=self.lb_id,
protocol_port=81,
protocol=lb_const.PROTOCOL_HTTPS) as l1, \
self.listener(loadbalancer_id=self.lb_id,
protocol_port=82,
protocol=lb_const.PROTOCOL_TCP) as l2, \
self.listener(loadbalancer_id=self.lb_id,
protocol_port=83,
protocol=lb_const.PROTOCOL_HTTP) as l3, \
self.pool(listener_id=l1['listener']['id'],
protocol=lb_const.PROTOCOL_HTTPS) as p1, \
self.pool(listener_id=l2['listener']['id'],
protocol=lb_const.PROTOCOL_TCP) as p2, \
self.pool(listener_id=l3['listener']['id'],
protocol=lb_const.PROTOCOL_HTTP) as p3:
self._test_list_with_pagination_reverse('pool',
(p3, p1, p2),
('protocol', 'asc'),
2, 2)
def test_get_listener_shows_default_pool(self):
with self.pool(listener_id=self.listener_id) as pool:
pool_id = pool['pool']['id']
resp, body = self._get_listener_api(self.listener_id)
self.assertEqual(pool_id, body['listener']['default_pool_id'])
class MemberTestBase(PoolTestBase):
def setUp(self):
super(MemberTestBase, self).setUp()
pool_res = self._create_pool(
self.fmt, lb_const.PROTOCOL_HTTP,
lb_const.LB_METHOD_ROUND_ROBIN,
self.listener_id,
self.lb_id,
session_persistence={'type':
lb_const.SESSION_PERSISTENCE_HTTP_COOKIE})
self.pool = self.deserialize(self.fmt, pool_res)
self.pool_id = self.pool['pool']['id']
alt_listener_res = self._create_listener(
self.fmt, lb_const.PROTOCOL_HTTP,
self.def_listener['listener']['protocol_port'] + 1,
self.lb_id
)
self.alt_listener = self.deserialize(self.fmt, alt_listener_res)
self.alt_listener_id = self.alt_listener['listener']['id']
alt_pool_res = self._create_pool(
self.fmt, lb_const.PROTOCOL_HTTP,
lb_const.LB_METHOD_ROUND_ROBIN,
self.alt_listener_id,
session_persistence={'type':
lb_const.SESSION_PERSISTENCE_HTTP_COOKIE})
self.alt_pool = self.deserialize(self.fmt, alt_pool_res)
self.alt_pool_id = self.alt_pool['pool']['id']
def tearDown(self):
self._delete('pools', self.alt_pool_id)
self._delete('pools', self.pool_id)
super(MemberTestBase, self).tearDown()
def _create_member_api(self, pool_id, data):
req = self.new_create_request("pools", data, self.fmt, id=pool_id,
subresource='members')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _update_member_api(self, pool_id, member_id, data):
req = self.new_update_request('pools', data, pool_id,
subresource='members', sub_id=member_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _delete_member_api(self, pool_id, member_id):
req = self.new_delete_request('pools', pool_id, subresource='members',
sub_id=member_id)
resp = req.get_response(self.ext_api)
return resp
def _get_member_api(self, pool_id, member_id):
req = self.new_show_request('pools', pool_id, subresource='members',
sub_id=member_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _list_members_api(self, pool_id):
req = self.new_list_request('pools', id=pool_id, subresource='members')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
class LbaasMemberTests(MemberTestBase):
def test_create_member(self, **extras):
expected = {
'address': '127.0.0.1',
'protocol_port': 80,
'weight': 1,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'subnet_id': '',
'name': 'member1'
}
expected.update(extras)
expected['subnet_id'] = self.test_subnet_id
with self.member(pool_id=self.pool_id, name='member1') as member:
member_id = member['member'].get('id')
self.assertTrue(member_id)
actual = {}
for k, v in member['member'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id,
member_id=member_id)
return member
def test_create_member_with_existing_address_port_pool_combination(self):
with self.member(pool_id=self.pool_id) as member1:
member1 = member1['member']
member_data = {
'address': member1['address'],
'protocol_port': member1['protocol_port'],
'weight': 1,
'subnet_id': member1['subnet_id'],
'admin_state_up': True,
'tenant_id': member1['tenant_id']
}
self.assertRaises(
loadbalancerv2.MemberExists,
self.plugin.create_pool_member,
context.get_admin_context(),
self.pool_id,
{'member': member_data})
def test_create_member_nonexistent_subnet(self):
member_data = {
'address': '127.0.0.1',
'protocol_port': 80,
'weight': 1,
'subnet_id': uuidutils.generate_uuid(),
'admin_state_up': True,
'tenant_id': self._tenant_id
}
self.assertRaises(
loadbalancerv2.EntityNotFound,
self.plugin.create_pool_member,
context.get_admin_context(),
self.pool_id,
{'member': member_data})
def test_create_member_nonexistent_pool(self):
member_data = {
'address': '127.0.0.1',
'protocol_port': 80,
'weight': 1,
'subnet_id': self.test_subnet_id,
'admin_state_up': True,
'tenant_id': self._tenant_id
}
self.assertRaises(
loadbalancerv2.EntityNotFound,
self.plugin.create_pool_member,
context.get_admin_context(),
uuidutils.generate_uuid(),
{'member': member_data})
def test_update_member(self):
keys = [('address', "127.0.0.1"),
('tenant_id', self._tenant_id),
('protocol_port', 80),
('weight', 10),
('admin_state_up', False),
('name', 'member2')]
with self.member(pool_id=self.pool_id) as member:
member_id = member['member']['id']
resp, pool1_update = self._get_pool_api(self.pool_id)
self.assertEqual(1, len(pool1_update['pool']['members']))
data = {'member': {'weight': 10, 'admin_state_up': False,
'name': 'member2'}}
resp, body = self._update_member_api(self.pool_id, member_id, data)
for k, v in keys:
self.assertEqual(v, body['member'][k])
resp, pool1_update = self._get_pool_api(self.pool_id)
self.assertEqual(1, len(pool1_update['pool']['members']))
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id,
member_id=member_id, member_disabled=True)
def test_delete_member(self):
with self.member(pool_id=self.pool_id, no_delete=True) as member:
member_id = member['member']['id']
resp = self._delete_member_api(self.pool_id, member_id)
self.assertEqual(webob.exc.HTTPNoContent.code, resp.status_int)
resp, pool_update = self._get_pool_api(self.pool_id)
self.assertEqual(0, len(pool_update['pool']['members']))
def test_show_member(self):
keys = [('address', "127.0.0.1"),
('tenant_id', self._tenant_id),
('protocol_port', 80),
('weight', 1),
('admin_state_up', True),
('name', 'member1')]
with self.member(pool_id=self.pool_id,
name='member1') as member:
member_id = member['member']['id']
resp, body = self._get_member_api(self.pool_id, member_id)
for k, v in keys:
self.assertEqual(v, body['member'][k])
def test_list_members(self):
with self.member(pool_id=self.pool_id,
name='member1', protocol_port=81):
resp, body = self._list_members_api(self.pool_id)
self.assertEqual(1, len(body['members']))
def test_list_members_only_for_pool(self):
with self.member(pool_id=self.alt_pool_id):
with self.member(pool_id=self.pool_id,
protocol_port=81) as in_member:
resp, body = self._list_members_api(self.pool_id)
self.assertEqual(len(body['members']), 1)
self.assertIn(in_member['member'], body['members'])
def test_list_members_with_sort_emulated(self):
with self.member(pool_id=self.pool_id, protocol_port=81) as m1:
with self.member(pool_id=self.pool_id, protocol_port=82) as m2:
with self.member(pool_id=self.pool_id, protocol_port=83) as m3:
self._test_list_with_sort(
'pool', (m3, m2, m1),
[('protocol_port', 'desc')],
id=self.pool_id,
subresource='member')
def test_list_members_with_pagination_emulated(self):
with self.member(pool_id=self.pool_id, protocol_port=81) as m1:
with self.member(pool_id=self.pool_id, protocol_port=82) as m2:
with self.member(pool_id=self.pool_id, protocol_port=83) as m3:
self._test_list_with_pagination(
'pool', (m1, m2, m3), ('protocol_port', 'asc'),
2, 2,
id=self.pool_id, subresource='member'
)
def test_list_members_with_pagination_reverse_emulated(self):
with self.member(pool_id=self.pool_id, protocol_port=81) as m1:
with self.member(pool_id=self.pool_id, protocol_port=82) as m2:
with self.member(pool_id=self.pool_id, protocol_port=83) as m3:
self._test_list_with_pagination_reverse(
'pool', (m1, m2, m3), ('protocol_port', 'asc'),
2, 2,
id=self.pool_id, subresource='member'
)
def test_list_members_invalid_pool_id(self):
resp, body = self._list_members_api('WRONG_POOL_ID')
self.assertEqual(webob.exc.HTTPNotFound.code, resp.status_int)
resp, body = self._list_members_api(self.pool_id)
self.assertEqual(webob.exc.HTTPOk.code, resp.status_int)
def test_get_member_invalid_pool_id(self):
with self.member(pool_id=self.pool_id) as member:
member_id = member['member']['id']
resp, body = self._get_member_api('WRONG_POOL_ID', member_id)
self.assertEqual(webob.exc.HTTPNotFound.code, resp.status_int)
resp, body = self._get_member_api(self.pool_id, member_id)
self.assertEqual(webob.exc.HTTPOk.code, resp.status_int)
def test_create_member_invalid_pool_id(self):
data = {'member': {'address': '127.0.0.1',
'protocol_port': 80,
'weight': 1,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'subnet_id': self.test_subnet_id}}
resp, body = self._create_member_api('WRONG_POOL_ID', data)
self.assertEqual(webob.exc.HTTPNotFound.code, resp.status_int)
def test_update_member_invalid_pool_id(self):
with self.member(pool_id=self.pool_id) as member:
member_id = member['member']['id']
data = {'member': {'weight': 1}}
resp, body = self._update_member_api(
'WRONG_POOL_ID', member_id, data)
self.assertEqual(webob.exc.HTTPNotFound.code, resp.status_int)
def test_create_member_invalid_name(self):
data = {'member': {'address': '127.0.0.1',
'protocol_port': 80,
'weight': 1,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'subnet_id': self.test_subnet_id,
'name': 123}}
resp, body = self._create_member_api('POOL_ID', data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_delete_member_invalid_pool_id(self):
with self.member(pool_id=self.pool_id) as member:
member_id = member['member']['id']
resp = self._delete_member_api('WRONG_POOL_ID', member_id)
self.assertEqual(webob.exc.HTTPNotFound.code, resp.status_int)
def test_get_pool_shows_members(self):
with self.member(pool_id=self.pool_id,
name='member1') as member:
expected = {'id': member['member']['id']}
resp, body = self._get_pool_api(self.pool_id)
self.assertIn(expected, body['pool']['members'])
class HealthMonitorTestBase(MemberTestBase):
def _create_healthmonitor_api(self, data):
req = self.new_create_request("healthmonitors", data, self.fmt)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _update_healthmonitor_api(self, hm_id, data):
req = self.new_update_request('healthmonitors', data, hm_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _delete_healthmonitor_api(self, hm_id):
req = self.new_delete_request('healthmonitors', hm_id)
resp = req.get_response(self.ext_api)
return resp
def _get_healthmonitor_api(self, hm_id):
req = self.new_show_request('healthmonitors', hm_id)
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
def _list_healthmonitors_api(self):
req = self.new_list_request('healthmonitors')
resp = req.get_response(self.ext_api)
body = self.deserialize(self.fmt, resp)
return resp, body
class TestLbaasHealthMonitorTests(HealthMonitorTestBase):
def test_create_healthmonitor(self, **extras):
expected = {
'type': 'HTTP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'http_method': 'GET',
'url_path': '/',
'expected_codes': '200',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor1'
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id, type='HTTP',
name='monitor1', **extras) as healthmonitor:
hm_id = healthmonitor['healthmonitor'].get('id')
self.assertTrue(hm_id)
actual = {}
for k, v in healthmonitor['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id,
hm_id=hm_id)
_, pool = self._get_pool_api(self.pool_id)
self.assertEqual(
{'type': lb_const.SESSION_PERSISTENCE_HTTP_COOKIE,
'cookie_name': None},
pool['pool'].get('session_persistence'))
return healthmonitor
def test_show_healthmonitor(self, **extras):
expected = {
'type': 'HTTP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'http_method': 'GET',
'url_path': '/',
'expected_codes': '200',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor1'
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id, type='HTTP',
name='monitor1') as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
resp, body = self._get_healthmonitor_api(hm_id)
actual = {}
for k, v in body['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
return healthmonitor
def test_update_healthmonitor(self, **extras):
expected = {
'type': 'HTTP',
'delay': 30,
'timeout': 10,
'max_retries': 4,
'http_method': 'GET',
'url_path': '/index.html',
'expected_codes': '200,404',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor2'
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id, type='HTTP',
name='monitor1') as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'delay': 30,
'timeout': 10,
'max_retries': 4,
'expected_codes': '200,404',
'url_path': '/index.html',
'name': 'monitor2'}}
resp, body = self._update_healthmonitor_api(hm_id, data)
actual = {}
for k, v in body['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id,
hm_id=hm_id)
return healthmonitor
def test_delete_healthmonitor(self):
with self.healthmonitor(pool_id=self.pool_id,
no_delete=True) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
resp = self._delete_healthmonitor_api(hm_id)
self.assertEqual(webob.exc.HTTPNoContent.code, resp.status_int)
def test_create_healthmonitor_with_type_tcp(self, **extras):
expected = {
'type': 'TCP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor1'
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id,
type='TCP',
name='monitor1') as healthmonitor:
hm_id = healthmonitor['healthmonitor'].get('id')
self.assertTrue(hm_id)
actual = {}
for k, v in healthmonitor['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id, hm_id=hm_id)
return healthmonitor
def test_show_healthmonitor_with_type_tcp(self, **extras):
expected = {
'type': 'TCP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor1'
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id,
type='TCP',
name='monitor1') as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
resp, body = self._get_healthmonitor_api(hm_id)
actual = {}
for k, v in body['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
return healthmonitor
def test_update_healthmonitor_with_type_tcp(self, **extras):
expected = {
'type': 'TCP',
'delay': 30,
'timeout': 10,
'max_retries': 4,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor2'
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id,
type='TCP',
name='monitor1') as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'delay': 30,
'timeout': 10,
'max_retries': 4,
'name': 'monitor2'}}
resp, body = self._update_healthmonitor_api(hm_id, data)
actual = {}
for k, v in body['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id, hm_id=hm_id)
return healthmonitor
def test_create_health_monitor_with_timeout_invalid(self):
data = {'healthmonitor': {'type': 'HTTP',
'delay': 1,
'timeout': -1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_update_health_monitor_with_timeout_invalid(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'delay': 10,
'timeout': -1,
'max_retries': 2,
'admin_state_up': False}}
resp, body = self._update_healthmonitor_api(hm_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_health_monitor_with_delay_invalid(self):
data = {'healthmonitor': {'type': 'HTTP',
'delay': -1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_update_health_monitor_with_delay_invalid(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'delay': -1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': False}}
resp, body = self._update_healthmonitor_api(hm_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_health_monitor_with_max_retries_invalid(self):
data = {'healthmonitor': {'type': 'HTTP',
'delay': 1,
'timeout': 1,
'max_retries': 20,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_update_health_monitor_with_max_retries_invalid(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'delay': 1,
'timeout': 1,
'max_retries': 20,
'admin_state_up': False}}
resp, body = self._update_healthmonitor_api(hm_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_health_monitor_with_type_invalid(self):
data = {'healthmonitor': {'type': 1,
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_update_health_monitor_with_type_invalid(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'type': 1,
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': False}}
resp, body = self._update_healthmonitor_api(hm_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_health_monitor_with_http_method_non_default(self):
data = {'healthmonitor': {'type': 'HTTP',
'http_method': 'POST',
'delay': 2,
'timeout': 1,
'max_retries': 2,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(201, resp.status_int)
self._delete('healthmonitors', body['healthmonitor']['id'])
def test_create_health_monitor_with_http_method_invalid(self):
data = {'healthmonitor': {'type': 'HTTP',
'http_method': 'FOO',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_update_health_monitor_with_http_method_invalid(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'type': 'HTTP',
'http_method': 'FOO',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': False}}
resp, body = self._update_healthmonitor_api(hm_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_health_monitor_with_url_path_non_default(self):
data = {'healthmonitor': {'type': 'HTTP',
'url_path': '/a/b_c-d/e%20f',
'delay': 2,
'timeout': 1,
'max_retries': 2,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(201, resp.status_int)
self._delete('healthmonitors', body['healthmonitor']['id'])
def test_create_health_monitor_with_url_path_invalid(self):
data = {'healthmonitor': {'type': 'HTTP',
'url_path': 1,
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_update_health_monitor_with_url_path_invalid(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
data = {'healthmonitor': {'url_path': 1,
'delay': 1,
'timeout': 1,
'max_retries': 2,
'admin_state_up': False}}
resp, body = self._update_healthmonitor_api(hm_id, data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_healthmonitor_invalid_pool_id(self):
data = {'healthmonitor': {'type': lb_const.HEALTH_MONITOR_TCP,
'delay': 1,
'timeout': 1,
'max_retries': 1,
'tenant_id': self._tenant_id,
'pool_id': uuidutils.generate_uuid()}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPNotFound.code, resp.status_int)
def test_create_healthmonitor_invalid_name(self):
data = {'healthmonitor': {'type': lb_const.HEALTH_MONITOR_TCP,
'delay': 1,
'timeout': 1,
'max_retries': 1,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id,
'name': 123}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPBadRequest.code, resp.status_int)
def test_create_health_monitor_with_max_retries_down(self, **extras):
expected = {
'type': 'HTTP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'http_method': 'GET',
'url_path': '/',
'expected_codes': '200',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor1',
'max_retries_down': 1
}
expected.update(extras)
with self.healthmonitor(pool_id=self.pool_id, type='HTTP',
name='monitor1', max_retries_down=1,
**extras) as healthmonitor:
hm_id = healthmonitor['healthmonitor'].get('id')
self.assertTrue(hm_id)
actual = {}
for k, v in healthmonitor['healthmonitor'].items():
if k in expected:
actual[k] = v
self.assertEqual(expected, actual)
self._validate_statuses(self.lb_id, self.listener_id,
pool_id=self.pool_id,
hm_id=hm_id)
_, pool = self._get_pool_api(self.pool_id)
self.assertEqual(
{'type': lb_const.SESSION_PERSISTENCE_HTTP_COOKIE,
'cookie_name': None},
pool['pool'].get('session_persistence'))
return healthmonitor
def test_only_one_healthmonitor_per_pool(self):
with self.healthmonitor(pool_id=self.pool_id):
data = {'healthmonitor': {'type': lb_const.HEALTH_MONITOR_TCP,
'delay': 1,
'timeout': 1,
'max_retries': 1,
'tenant_id': self._tenant_id,
'pool_id': self.pool_id}}
resp, body = self._create_healthmonitor_api(data)
self.assertEqual(webob.exc.HTTPConflict.code, resp.status_int)
def test_get_healthmonitor(self):
expected = {
'type': 'HTTP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'http_method': 'GET',
'url_path': '/',
'expected_codes': '200',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': 'monitor1',
'max_retries_down': 3
}
with self.healthmonitor(pool_id=self.pool_id, type='HTTP',
name='monitor1') as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
expected['id'] = hm_id
resp, body = self._get_healthmonitor_api(hm_id)
self.assertEqual(expected, body['healthmonitor'])
def test_list_healthmonitors(self):
expected = {
'type': 'HTTP',
'delay': 1,
'timeout': 1,
'max_retries': 2,
'http_method': 'GET',
'url_path': '/',
'expected_codes': '200',
'admin_state_up': True,
'tenant_id': self._tenant_id,
'pools': [{'id': self.pool_id}],
'name': '',
'max_retries_down': 3
}
with self.healthmonitor(pool_id=self.pool_id,
type='HTTP') as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
expected['id'] = hm_id
resp, body = self._list_healthmonitors_api()
self.assertEqual([expected], body['healthmonitors'])
def test_get_pool_shows_healthmonitor_id(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor']['id']
resp, body = self._get_pool_api(self.pool_id)
self.assertEqual(hm_id, body['pool']['healthmonitor_id'])
def test_update_healthmonitor_status(self):
with self.healthmonitor(pool_id=self.pool_id) as healthmonitor:
hm_id = healthmonitor['healthmonitor'].get('id')
ctx = context.get_admin_context()
self.plugin.db.update_status(
ctx, models.HealthMonitorV2, hm_id,
provisioning_status=n_constants.ACTIVE,
operating_status=lb_const.DEGRADED)
db_hm = self.plugin.db.get_healthmonitor(ctx, hm_id)
self.assertEqual(n_constants.ACTIVE, db_hm.provisioning_status)
self.assertFalse(hasattr(db_hm, 'operating_status'))
def test_create_healthmonitor_admin_state_down(self):
self.test_create_healthmonitor(admin_state_up=False)
class LbaasStatusesTest(MemberTestBase):
def setUp(self):
super(LbaasStatusesTest, self).setUp()
self.lbs_to_clean = []
self.addCleanup(self.cleanup_lbs)
def cleanup_lbs(self):
for lb_dict in self.lbs_to_clean:
self._delete_populated_lb(lb_dict)
def test_disable_lb(self):
ctx = context.get_admin_context()
lb_dict = self._create_new_populated_loadbalancer()
lb_id = lb_dict['id']
opt = {'admin_state_up': False}
self.plugin.db.update_loadbalancer(ctx, lb_id, opt)
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
n_disabled = self._countDisabledChildren(statuses, 0)
self.assertEqual(11, n_disabled)
def _countDisabledChildren(self, obj, count):
if isinstance(obj, dict):
for key, value in obj.items():
if key == "operating_status":
count += 1
continue
count = self._countDisabledChildren(value, count)
if isinstance(obj, list):
for value in obj:
count = self._countDisabledChildren(value, count)
return count
def test_disable_trickles_down(self):
lb_dict = self._create_new_populated_loadbalancer()
lb_id = lb_dict['id']
self._update_loadbalancer_api(lb_id,
{'loadbalancer': {
'admin_state_up': False}})
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
self._assertDisabled(self._traverse_statuses(statuses))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTP'))
self._assertDisabled(self._traverse_statuses(
statuses, listener='listener_HTTPS'))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTP',
pool='pool_HTTP'))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTPS',
pool='pool_HTTPS'))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTP',
pool='pool_HTTP',
member='127.0.0.1'))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTPS',
pool='pool_HTTPS',
member='127.0.0.4'))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTP',
pool='pool_HTTP',
healthmonitor=True))
self._assertDisabled(self._traverse_statuses(statuses,
listener='listener_HTTPS',
pool='pool_HTTPS',
healthmonitor=True))
def test_disable_not_calculated_in_degraded(self):
lb_dict = self._create_new_populated_loadbalancer()
lb_id = lb_dict['id']
listener_id = lb_dict['listeners'][0]['id']
listener = 'listener_HTTP'
self._update_listener_api(listener_id,
{'listener': {'admin_state_up': False}})
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
self._assertOnline(self._traverse_statuses(statuses))
self._update_listener_api(listener_id,
{'listener': {'admin_state_up': True}})
pool_id = lb_dict['listeners'][0]['pools'][0]['id']
pool = 'pool_HTTP'
member_id = lb_dict['listeners'][0]['pools'][0]['members'][0]['id']
member = '127.0.0.1'
self._update_member_api(pool_id, member_id,
{'member': {'admin_state_up': False}})
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
self._assertOnline(self._traverse_statuses(statuses))
self._assertOnline(self._traverse_statuses(statuses,
listener=listener))
self._assertOnline(self._traverse_statuses(statuses,
listener=listener,
pool=pool))
self._assertDisabled(self._traverse_statuses(statuses,
listener=listener,
pool=pool,
member=member))
def test_that_failures_trickle_up_on_prov_errors(self):
ctx = context.get_admin_context()
ERROR = n_constants.ERROR
lb_dict = self._create_new_populated_loadbalancer()
lb_id = lb_dict['id']
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
stat = self._traverse_statuses(statuses, listener="listener_HTTP",
pool="pool_HTTP", member='127.0.0.1')
member_id = stat['id']
self.plugin.db.update_status(ctx, models.MemberV2, member_id,
provisioning_status=ERROR)
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
#Assert the parents of the member are degraded
self._assertDegraded(self._traverse_statuses(statuses,
listener='listener_HTTP',
pool='pool_HTTP'))
self._assertDegraded(self._traverse_statuses(statuses,
listener='listener_HTTP'))
self._assertDegraded(self._traverse_statuses(statuses))
#Verify siblings are not degraded
self._assertNotDegraded(self._traverse_statuses(statuses,
listener='listener_HTTPS', pool='pool_HTTPS'))
self._assertNotDegraded(self._traverse_statuses(statuses,
listener='listener_HTTPS'))
def test_that_failures_trickle_up_on_non_ONLINE_prov_status(self):
ctx = context.get_admin_context()
lb_dict = self._create_new_populated_loadbalancer()
lb_id = lb_dict['id']
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
stat = self._traverse_statuses(statuses, listener="listener_HTTP",
pool="pool_HTTP", member='127.0.0.1')
member_id = stat['id']
self.plugin.db.update_status(ctx, models.MemberV2, member_id,
operating_status=lb_const.OFFLINE)
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
#Assert the parents of the member are degraded
self._assertDegraded(self._traverse_statuses(statuses,
listener='listener_HTTP',
pool='pool_HTTP'))
self._assertDegraded(self._traverse_statuses(statuses,
listener='listener_HTTP'))
self._assertDegraded(self._traverse_statuses(statuses))
#Verify siblings are not degraded
self._assertNotDegraded(self._traverse_statuses(statuses,
listener='listener_HTTPS', pool='pool_HTTPS'))
self._assertNotDegraded(self._traverse_statuses(statuses,
listener='listener_HTTPS'))
def test_degraded_with_pool_error(self):
ctx = context.get_admin_context()
ERROR = n_constants.ERROR
lb_dict = self._create_new_populated_loadbalancer()
lb_id = lb_dict['id']
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
stat = self._traverse_statuses(statuses, listener="listener_HTTP",
pool="pool_HTTP")
pool_id = stat['id']
self.plugin.db.update_status(ctx, models.PoolV2, pool_id,
provisioning_status=ERROR)
statuses = self._get_loadbalancer_statuses_api(lb_id)[1]
#Assert the parents of the pool are degraded
self._assertDegraded(self._traverse_statuses(statuses,
listener='listener_HTTP'))
self._assertDegraded(self._traverse_statuses(statuses))
#Verify siblings are not degraded
self._assertNotDegraded(self._traverse_statuses(statuses,
listener='listener_HTTPS'))
def _assertOnline(self, obj):
OS = "operating_status"
if OS in obj:
self.assertEqual(lb_const.ONLINE, obj[OS])
def _assertDegraded(self, obj):
OS = "operating_status"
if OS in obj:
self.assertEqual(lb_const.DEGRADED, obj[OS])
def _assertNotDegraded(self, obj):
OS = "operating_status"
if OS in obj:
self.assertNotEqual(lb_const.DEGRADED, obj[OS])
def _assertDisabled(self, obj):
OS = "operating_status"
if OS in obj:
self.assertEqual(lb_const.DISABLED, obj[OS])
def _delete_populated_lb(self, lb_dict):
lb_id = lb_dict['id']
for pool in lb_dict['pools']:
pool_id = pool['id']
for member in pool['members']:
member_id = member['id']
self._delete_member_api(pool_id, member_id)
self._delete_pool_api(pool_id)
for listener in lb_dict['listeners']:
listener_id = listener['id']
self._delete_listener_api(listener_id)
self._delete_loadbalancer_api(lb_id)
def _traverse_statuses(self, statuses, listener=None, pool=None,
member=None, healthmonitor=False):
lb = statuses['statuses']['loadbalancer']
if listener is None:
return copy.copy(lb)
listener_list = lb['listeners']
for listener_obj in listener_list:
if listener_obj['name'] == listener:
if pool is None:
return copy.copy(listener_obj)
pool_list = listener_obj['pools']
for pool_obj in pool_list:
if pool_obj['name'] == pool:
if healthmonitor:
return copy.copy(pool_obj['healthmonitor'])
if member is None:
return copy.copy(pool_obj)
member_list = pool_obj['members']
for member_obj in member_list:
if member_obj['address'] == member:
return copy.copy(member_obj)
pool_list = lb['pools']
for pool_obj in pool_list:
if pool_obj['name'] == pool:
if healthmonitor:
return copy.copy(pool_obj['healthmonitor'])
if member is None:
return copy.copy(pool_obj)
member_list = pool_obj['members']
for member_obj in member_list:
if member_obj['address'] == member:
return copy.copy(member_obj)
raise KeyError
def _create_new_populated_loadbalancer(self):
oct4 = 1
subnet_id = self.test_subnet_id
HTTP = lb_const.PROTOCOL_HTTP
HTTPS = lb_const.PROTOCOL_HTTPS
ROUND_ROBIN = lb_const.LB_METHOD_ROUND_ROBIN
fmt = self.fmt
lb_dict = {}
lb_res = self._create_loadbalancer(
self.fmt, subnet_id=self.test_subnet_id,
name='test_loadbalancer')
lb = self.deserialize(fmt, lb_res)
lb_id = lb['loadbalancer']['id']
lb_dict['id'] = lb_id
lb_dict['listeners'] = []
lb_dict['pools'] = []
for prot, port in [(HTTP, 80), (HTTPS, 443)]:
res = self._create_listener(fmt, prot, port, lb_id,
name="listener_%s" % prot)
listener = self.deserialize(fmt, res)
listener_id = listener['listener']['id']
lb_dict['listeners'].append({'id': listener_id, 'pools': []})
res = self._create_pool(fmt, prot, ROUND_ROBIN, listener_id,
loadbalancer_id=lb_id,
name="pool_%s" % prot)
pool = self.deserialize(fmt, res)
pool_id = pool['pool']['id']
members = []
lb_dict['listeners'][-1]['pools'].append({'id': pool['pool']['id'],
'members': members})
lb_dict['pools'].append({'id': pool['pool']['id'],
'members': members})
res = self._create_healthmonitor(fmt, pool_id, type=prot, delay=1,
timeout=1, max_retries=1)
health_monitor = self.deserialize(fmt, res)
lb_dict['listeners'][-1]['pools'][-1]['health_monitor'] = {
'id': health_monitor['healthmonitor']['id']}
lb_dict['pools'][-1]['health_monitor'] = {
'id': health_monitor['healthmonitor']['id']}
for i in six.moves.range(0, 3):
address = "127.0.0.%i" % oct4
oct4 += 1
res = self._create_member(fmt, pool_id, address, port,
subnet_id)
member = self.deserialize(fmt, res)
members.append({'id': member['member']['id']})
self.lbs_to_clean.append(lb_dict)
return lb_dict
| 45.010057
| 93
| 0.537764
|
d670709c8374b8a90ec39a424bcd0315902095c7
| 10,689
|
py
|
Python
|
util/visualizer.py
|
supri-a/TXM2SEM
|
300dbbbad6996d025381551bf968759b56d35d4d
|
[
"MIT"
] | 1
|
2021-08-13T06:13:21.000Z
|
2021-08-13T06:13:21.000Z
|
util/visualizer.py
|
supri-a/TXM2SEM
|
300dbbbad6996d025381551bf968759b56d35d4d
|
[
"MIT"
] | null | null | null |
util/visualizer.py
|
supri-a/TXM2SEM
|
300dbbbad6996d025381551bf968759b56d35d4d
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
import sys
import ntpath
import time
from . import util, html
from subprocess import Popen, PIPE
from skimage.transform import resize
if sys.version_info[0] == 2:
VisdomExceptionBase = Exception
else:
VisdomExceptionBase = ConnectionError
def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256):
"""Save images to the disk.
Parameters:
webpage (the HTML class) -- the HTML webpage class that stores these imaegs (see html.py for more details)
visuals (OrderedDict) -- an ordered dictionary that stores (name, images (either tensor or numpy) ) pairs
image_path (str) -- the string is used to create image paths
aspect_ratio (float) -- the aspect ratio of saved images
width (int) -- the images will be resized to width x width
This function will save images stored in 'visuals' to the HTML file specified by 'webpage'.
"""
image_dir = webpage.get_image_dir()
short_path = ntpath.basename(image_path[0])
name = os.path.splitext(short_path)[0]
webpage.add_header(name)
ims, txts, links = [], [], []
for label, im_data in visuals.items():
im = util.tensor2im(im_data)
image_name = '%s_%s.png' % (name, label)
save_path = os.path.join(image_dir, image_name)
h, w, _ = im.shape
if aspect_ratio > 1.0:
im = resize(im, (h, int(w * aspect_ratio)), order=3)
if aspect_ratio < 1.0:
im = resize(im, (int(h / aspect_ratio), w), order=3)
util.save_image(im, save_path)
ims.append(image_name)
txts.append(label)
links.append(image_name)
webpage.add_images(ims, txts, links, width=width)
class Visualizer():
"""This class includes several functions that can display/save images and print/save logging information.
It uses a Python library 'visdom' for display, and a Python library 'dominate' (wrapped in 'HTML') for creating HTML files with images.
"""
def __init__(self, opt):
"""Initialize the Visualizer class
Parameters:
opt -- stores all the experiment flags; needs to be a subclass of BaseOptions
Step 1: Cache the training/test options
Step 2: connect to a visdom server
Step 3: create an HTML object for saveing HTML filters
Step 4: create a logging file to store training losses
"""
self.opt = opt # cache the option
self.display_id = opt.display_id
self.use_html = opt.isTrain and not opt.no_html
self.win_size = opt.display_winsize
self.name = opt.name
self.port = opt.display_port
self.saved = False
if self.display_id > 0: # connect to a visdom server given <display_port> and <display_server>
import visdom
self.ncols = opt.display_ncols
self.vis = visdom.Visdom(server=opt.display_server, port=opt.display_port, env=opt.display_env)
if not self.vis.check_connection():
self.create_visdom_connections()
if self.use_html: # create an HTML object at <checkpoints_dir>/web/; images will be saved under <checkpoints_dir>/web/images/
self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web')
self.img_dir = os.path.join(self.web_dir, 'images')
print('create web directory %s...' % self.web_dir)
util.mkdirs([self.web_dir, self.img_dir])
# create a logging file to store training losses
self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt')
with open(self.log_name, "a") as log_file:
now = time.strftime("%c")
log_file.write('================ Training Loss (%s) ================\n' % now)
def reset(self):
"""Reset the self.saved status"""
self.saved = False
def create_visdom_connections(self):
"""If the program could not connect to Visdom server, this function will start a new server at port < self.port > """
cmd = sys.executable + ' -m visdom.server -p %d &>/dev/null &' % self.port
print('\n\nCould not connect to Visdom server. \n Trying to start a server....')
print('Command: %s' % cmd)
Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
def display_current_results(self, visuals, epoch, save_result):
"""Display current results on visdom; save current results to an HTML file.
Parameters:
visuals (OrderedDict) - - dictionary of images to display or save
epoch (int) - - the current epoch
save_result (bool) - - if save the current results to an HTML file
"""
if self.display_id > 0: # show images in the browser using visdom
ncols = self.ncols
if ncols > 0: # show all the images in one visdom panel
ncols = min(ncols, len(visuals))
h, w = next(iter(visuals.values())).shape[:2]
table_css = """<style>
table {border-collapse: separate; border-spacing: 4px; white-space: nowrap; text-align: center}
table td {width: % dpx; height: % dpx; padding: 4px; outline: 4px solid black}
</style>""" % (w, h) # create a table css
# create a table of images.
title = self.name
label_html = ''
label_html_row = ''
images = []
idx = 0
for label, image in visuals.items():
image_numpy = util.tensor2im(image)
label_html_row += '<td>%s</td>' % label
images.append(image_numpy.transpose([2, 0, 1]))
idx += 1
if idx % ncols == 0:
label_html += '<tr>%s</tr>' % label_html_row
label_html_row = ''
white_image = np.ones_like(image_numpy.transpose([2, 0, 1])) * 255
while idx % ncols != 0:
images.append(white_image)
label_html_row += '<td></td>'
idx += 1
if label_html_row != '':
label_html += '<tr>%s</tr>' % label_html_row
try:
self.vis.images(images, nrow=ncols, win=self.display_id + 1,
padding=2, opts=dict(title=title + ' images'))
label_html = '<table>%s</table>' % label_html
self.vis.text(table_css + label_html, win=self.display_id + 2,
opts=dict(title=title + ' labels'))
except VisdomExceptionBase:
self.create_visdom_connections()
else: # show each image in a separate visdom panel;
idx = 1
try:
for label, image in visuals.items():
image_numpy = util.tensor2im(image)
self.vis.image(image_numpy.transpose([2, 0, 1]), opts=dict(title=label),
win=self.display_id + idx)
idx += 1
except VisdomExceptionBase:
self.create_visdom_connections()
if self.use_html and (save_result or not self.saved): # save images to an HTML file if they haven't been saved.
self.saved = True
# save images to the disk
for label, image in visuals.items():
image_numpy = util.tensor2im(image)
img_path = os.path.join(self.img_dir, 'epoch%.3d_%s.png' % (epoch, label))
util.save_image(image_numpy, img_path)
# update website
webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, refresh=1)
for n in range(epoch, 0, -1):
webpage.add_header('epoch [%d]' % n)
ims, txts, links = [], [], []
for label, image_numpy in visuals.items():
image_numpy = util.tensor2im(image)
img_path = 'epoch%.3d_%s.png' % (n, label)
ims.append(img_path)
txts.append(label)
links.append(img_path)
webpage.add_images(ims, txts, links, width=self.win_size)
webpage.save()
def plot_current_losses(self, epoch, counter_ratio, losses):
"""display the current losses on visdom display: dictionary of error labels and values
Parameters:
epoch (int) -- current epoch
counter_ratio (float) -- progress (percentage) in the current epoch, between 0 to 1
losses (OrderedDict) -- training losses stored in the format of (name, float) pairs
"""
if not hasattr(self, 'plot_data'):
self.plot_data = {'X': [], 'Y': [], 'legend': list(losses.keys())}
self.plot_data['X'].append(epoch + counter_ratio)
self.plot_data['Y'].append([losses[k] for k in self.plot_data['legend']])
try:
self.vis.line(
X=np.stack([np.array(self.plot_data['X'])] * len(self.plot_data['legend']), 1),
Y=np.array(self.plot_data['Y']),
opts={
'title': self.name + ' loss over time',
'legend': self.plot_data['legend'],
'xlabel': 'epoch',
'ylabel': 'loss'},
win=self.display_id)
except VisdomExceptionBase:
self.create_visdom_connections()
# losses: same format as |losses| of plot_current_losses
def print_current_losses(self, epoch, iters, losses, t_comp, t_data):
"""print current losses on console; also save the losses to the disk
Parameters:
epoch (int) -- current epoch
iters (int) -- current training iteration during this epoch (reset to 0 at the end of every epoch)
losses (OrderedDict) -- training losses stored in the format of (name, float) pairs
t_comp (float) -- computational time per data point (normalized by batch_size)
t_data (float) -- data loading time per data point (normalized by batch_size)
"""
message = '(epoch: %d, iters: %d, time: %.3f, data: %.3f) ' % (epoch, iters, t_comp, t_data)
for k, v in losses.items():
message += '%s: %.3f ' % (k, v)
print(message) # print the message
with open(self.log_name, "a") as log_file:
log_file.write('%s\n' % message) # save the message
| 46.881579
| 139
| 0.567499
|
d5c9e59d6e15e05d472c3a74e268a3a69668c95d
| 14,461
|
py
|
Python
|
wpc/models.py
|
paked/WPC-fix
|
bca46731bce6a76f6b7eb79123b79b9c6377bc52
|
[
"MIT"
] | null | null | null |
wpc/models.py
|
paked/WPC-fix
|
bca46731bce6a76f6b7eb79123b79b9c6377bc52
|
[
"MIT"
] | null | null | null |
wpc/models.py
|
paked/WPC-fix
|
bca46731bce6a76f6b7eb79123b79b9c6377bc52
|
[
"MIT"
] | null | null | null |
from wpc import db, app, login_manager
from wpc.utils import requests_get_with_retries
from flask.ext.login import UserMixin, current_user
from sqlalchemy.orm.properties import ColumnProperty
import humanize
from datetime import datetime, timedelta
@login_manager.user_loader
def load_user(reddit_username):
return Streamer.query.filter_by(reddit_username=reddit_username).first()
stream_tag = db.Table('stream_tag',
db.Column('stream_id', db.Integer(), db.ForeignKey('stream.id')),
db.Column('tag_name', db.String(256), db.ForeignKey('tag.name')))
stream_sub = db.Table('stream_sub',
db.Column('stream_id', db.Integer(), db.ForeignKey('stream.id')),
db.Column('submission_id', db.String(6), db.ForeignKey('submission.submission_id')))
class Submission(db.Model):
submission_id = db.Column(db.String(6), primary_key=True)
def __repr__(self):
return '<Submission %r>' % (self.submission_id)
class Stream(db.Model):
id = db.Column(db.Integer, primary_key=True)
type = db.Column(db.String(50))
scheduled_start_time = db.Column(db.DateTime())
actual_start_time = db.Column(db.DateTime())
status = db.Column(db.Enum('upcoming', 'live', 'completed', name='stream_status'))
title = db.Column(db.String(200))
submissions = db.relationship('Submission', secondary=stream_sub, backref=db.backref('streams', lazy='dynamic'))
streamer_id = db.Column('streamer_id', db.Integer(), db.ForeignKey('streamer.id'))
streamer = db.relationship('Streamer', backref=db.backref('streams', lazy='dynamic'))
tags = db.relationship('Tag', secondary=stream_tag, backref=db.backref('streams', lazy='dynamic'))
current_viewers = db.Column(db.Integer)
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': 'stream'
}
def format_start_time(self, countdown=True, start_time=True):
if not self.scheduled_start_time or (not countdown and not start_time):
return None
if countdown:
return humanize.naturaltime(datetime.utcnow() - self.scheduled_start_time) +\
((", " + datetime.strftime(self.scheduled_start_time, "%Y-%m-%d %H:%M UTC")) if start_time else "")
else:
return datetime.strftime(self.scheduled_start_time, "%Y-%m-%d %H:%M UTC")
def add_submission(self, submission):
if submission not in self.submissions:
self.submissions.append(submission)
class YoutubeStream(Stream):
ytid = db.Column(db.String(11), unique=True)
def __init__(self, id):
self.ytid = id
self.submissions = []
def __eq__(self, other):
return type(self) == type(other) and self.ytid == other.ytid
def __hash__(self):
return hash(self.ytid)
def __repr__(self):
return '<YoutubeStream %d %r>' % (self.id, self.ytid)
def _update_status(self):
app.logger.info("Updating status for {}".format(self))
try:
r = requests_get_with_retries(
"https://www.googleapis.com/youtube/v3/videos?id={}&part=snippet,liveStreamingDetails&key={}".format(
self.ytid,
app.config['YOUTUBE_KEY'],
retries_num=15))
r.raise_for_status()
except Exception as e:
app.logger.error("Error while updating {}".format(YoutubeStream))
app.logger.exception(e)
raise
if not r.json()['items']:
self.status = 'completed'
return
for item in r.json()['items']:
self.title = item['snippet']['title']
if 'liveStreamingDetails' in item:
self.scheduled_start_time = item['liveStreamingDetails']['scheduledStartTime']
if 'concurrentViewers' in item['liveStreamingDetails']:
self.current_viewers = item['liveStreamingDetails']['concurrentViewers']
if item['snippet']['liveBroadcastContent'] == 'live':
self.status = 'live'
self.actual_start_time = item['liveStreamingDetails']['actualStartTime']
elif item['snippet']['liveBroadcastContent'] == 'upcoming':
self.status = 'upcoming'
else:
self.status = 'completed'
# add channel to streamer table if it's needed and fix if it's needed
if self.streamer is not None:
yc = item['snippet']['channelId']
streamer = Streamer.query.filter_by(youtube_channel=yc).first()
# if there is streamer with that channel
if streamer:
self.streamer = streamer
# there is no streamer with that channel
elif not self.streamer.checked:
self.streamer.youtube_channel = yc
self.streamer.youtube_name = item['snippet']['channelTitle']
def _get_flair(self):
fst = self.format_start_time(start_time=False)
status_to_flair = {"live": (u"Live", u"one"),
"completed": (u"Recording Available", u"four"),
"upcoming": (fst if fst else u"Upcoming", u"two"),
None: (None, None)}
return status_to_flair[self.status]
def normal_url(self):
return "http://www.youtube.com/watch?v={}".format(self.ytid)
def html_code(self, autoplay=False):
return """
<iframe width="640" height="390"
src="http://www.youtube.com/embed/{}?rel=0&autoplay={}">
</iframe>
""".format(self.ytid, int(autoplay))
__mapper_args__ = {
'polymorphic_identity': 'youtube_stream'
}
class TwitchStream(Stream):
channel = db.Column(db.String(25), unique=True)
last_time_live = db.Column(db.DateTime())
def __init__(self, channel):
self.channel = channel
self.status = 'upcoming'
self.submissions = []
def __eq__(self, other):
return type(self) == type(other) and self.channel == other.channel
def __hash__(self):
return hash(self.channel)
def __repr__(self):
return '<TwitchStream {} {}>'.format(self.id, self.channel)
def _update_title_from_channel(self):
r = requests_get_with_retries("https://api.twitch.tv/kraken/channels/{}".format(self.channel))
r.raise_for_status()
stream = r.json()
if stream is not None:
if stream['status'] is not None:
self.title = stream['status']
def _update_status(self):
app.logger.info("Updating status for {}".format(self))
try:
r = requests_get_with_retries("https://api.twitch.tv/kraken/streams/{}".format(self.channel))
r.raise_for_status()
except Exception as e:
app.logger.error("Error while updating {}".format(self))
app.logger.exception(e)
raise
stream = r.json()['stream']
if stream is not None:
self.status = 'live'
self.title = stream['channel']['status']
self.current_viewers = stream['viewers']
self.last_time_live = datetime.utcnow()
if self.actual_start_time is None:
self.actual_start_time = self.last_time_live
else:
if self.status == 'live':
# this is workaround for situations like stream going offline shortly
if datetime.utcnow() - self.last_time_live > timedelta(minutes=12):
self.status = 'completed'
if self.status == 'upcoming':
self._update_title_from_channel()
# add channel to streamer table if it's needed and fix if it's needed
if self.streamer is not None:
streamer = Streamer.query.filter_by(twitch_channel=self.channel).first()
# if there is streamer with that channel
if streamer:
self.streamer = streamer
# there is no streamer with that channel
elif not self.streamer.checked:
self.streamer.twitch_channel = self.channel
def _get_flair(self):
fst = self.format_start_time(start_time=False)
status_to_flair = {"live": (u"Live", u"one"),
"completed": (u"Finished", u"three"),
"upcoming": (fst if fst else u"Upcoming", u"two"),
None: (None, None)}
return status_to_flair[self.status]
def add_submission(self, submission):
if submission not in self.submissions:
self.status = 'upcoming'
self.scheduled_start_time = None
self.actual_start_time = None
Stream.add_submission(self, submission)
def normal_url(self):
return "http://www.twitch.tv/" + self.channel
def html_code(self, autoplay=False):
return """
<object type="application/x-shockwave-flash"
height="390"
width="640"
id="live_embed_player_flash"
data="http://www.twitch.tv/widgets/live_embed_player.swf?channel={}"
bgcolor="#000000">
<param name="allowFullScreen"
value="true" />
<param name="allowScriptAccess"
value="always" />
<param name="allowNetworking"
value="all" />
<param name="movie"
value="http://www.twitch.tv/widgets/live_embed_player.swf" />
<param name="flashvars"
value="hostname=www.twitch.tv&channel={}&auto_play={}" />
</object>
""".format(self.channel, self.channel, "true" if autoplay else "false")
__mapper_args__ = {
'polymorphic_identity': 'twitch_stream'
}
class MozillaStreamHack(object):
def html_code(self, autoplay=None):
return '''<iframe src="https://air.mozilla.org/the-joy-of-coding-mconley-livehacks-on-firefox-episode-4-20150311/video/" width="640" height="380" frameborder="0" allowfullscreen></iframe>''' # NOQA
def normal_url(self):
return "https://air.mozilla.org/the-joy-of-coding-mconley-livehacks-on-firefox-episode-4-20150311/"
class CaseInsensitiveComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return db.func.lower(self.__clause_element__()) == db.func.lower(other)
class Subscriber(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.column_property(db.Column(db.String(256), unique=True, nullable=False), comparator_factory=CaseInsensitiveComparator)
def __repr__(self):
return '<Subscriber %d %r>' % (self.id, self.email)
class Streamer(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
reddit_username = db.column_property(db.Column(db.String(20), unique=True), comparator_factory=CaseInsensitiveComparator)
twitch_channel = db.column_property(db.Column(db.String(25), unique=True), comparator_factory=CaseInsensitiveComparator)
youtube_channel = db.Column(db.String(24), unique=True)
youtube_name = db.Column(db.String(30))
info = db.Column(db.Text())
checked = db.Column(db.Boolean(), default=False)
def __init__(self, reddit_username, checked=False):
self.reddit_username = reddit_username
self.checked = checked
def __repr__(self):
return '<Streamer %d %r>' % (self.id, self.reddit_username)
def get_id(self):
return self.reddit_username
def populate(self, form):
self.info = form.info.data
tc = form.twitch_channel_extract()
# delete inapropriate tstream
if tc != self.twitch_channel:
ts = self.streams.filter_by(type='twitch_stream').first()
if ts:
ts.streamer = None
# rebind tstream
streamer = Streamer.query.filter_by(twitch_channel=tc).first()
if streamer and streamer != current_user:
streamer.twitch_channel = None
for ts in streamer.streams.filter_by(type='twitch_stream'):
ts.streamer = self
self.twitch_channel = tc if tc else None
yc = form.youtube_channel_extract()
# delete inapropriate ystreams
if yc != self.youtube_channel:
for ys in self.streams.filter_by(type='youtube_stream'):
ys.streamer = None
# rebind ystreams
streamer = Streamer.query.filter_by(youtube_channel=yc).first()
if streamer and streamer != current_user:
# to not make api-requests
yn = streamer.youtube_name
if yn is not None:
self.youtube_name = yn
self.youtube_channel = streamer.youtube_channel
streamer.youtube_name = None
streamer.youtube_channel = None
for ys in streamer.streams.filter_by(type='youtube_stream'):
ys.streamer = self
# get yc name
if yc and (yc != self.youtube_channel or self.youtube_name is None):
try:
r = requests_get_with_retries(
"https://www.googleapis.com/youtube/v3/channels?id={}&part=snippet&key={}".format(
yc,
app.config['YOUTUBE_KEY'],
retries_num=15))
r.raise_for_status()
except Exception as e:
app.logger.error("Error while updating {}".format(Streamer))
app.logger.exception(e)
raise
for item in r.json()['items']:
self.youtube_name = item['snippet']['title']
self.youtube_channel = yc if yc else None
class Tag(db.Model):
__tablename__ = 'tag'
name = db.column_property(db.Column(db.String(256), primary_key=True), comparator_factory=CaseInsensitiveComparator)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Tag {}>'.format(self.name)
def get_or_create(model, **kwargs):
instance = model.query.filter_by(**kwargs).first()
if instance is None:
instance = model(**kwargs)
db.session.add(instance)
return instance
| 38.256614
| 206
| 0.599889
|
1cf8d1430c578400d1a93d62790a3f2c7305156f
| 886
|
py
|
Python
|
examples/sample_pandas_type_check.py
|
skmatz/gokart
|
ba1dc497dca1c7901bc861f49b1f081adc2a1888
|
[
"MIT"
] | null | null | null |
examples/sample_pandas_type_check.py
|
skmatz/gokart
|
ba1dc497dca1c7901bc861f49b1f081adc2a1888
|
[
"MIT"
] | null | null | null |
examples/sample_pandas_type_check.py
|
skmatz/gokart
|
ba1dc497dca1c7901bc861f49b1f081adc2a1888
|
[
"MIT"
] | null | null | null |
from typing import Dict, Any
import gokart
import pandas as pd
# Please define a class which inherits `gokart.PandasTypeConfig`.
# **In practice, please import `SamplePandasTypeConfig` in `__init__`.**
class SamplePandasTypeConfig(gokart.PandasTypeConfig):
task_namespace = 'sample_pandas_type_check'
@classmethod
def type_dict(cls) -> Dict[str, Any]:
return {'int_column': int}
class SampleTask(gokart.TaskOnKart):
# Please set the same `task_namespace` as `SamplePandasTypeConfig`.
task_namespace = 'sample_pandas_type_check'
def run(self):
df = pd.DataFrame(dict(int_column=['a']))
self.dump(df) # This line causes PandasTypeError, because expected type is `int`, but `str` is passed.
if __name__ == '__main__':
gokart.run([
'sample_pandas_type_check.SampleTask',
'--local-scheduler',
'--rerun'])
| 28.580645
| 111
| 0.697517
|
275df02d92a3e83bf35d3a51c03bf43e2eed629a
| 513
|
py
|
Python
|
src/main/python/karellen/kombu/transport/django/migrations/__init__.py
|
jonashaag/karellen-kombu-ext
|
edabd7e17b5565d9d4c1861c35bc9d6852a0a061
|
[
"Apache-2.0"
] | 6
|
2017-01-27T22:13:22.000Z
|
2017-11-23T17:22:46.000Z
|
src/main/python/karellen/kombu/transport/django/migrations/__init__.py
|
jonashaag/karellen-kombu-ext
|
edabd7e17b5565d9d4c1861c35bc9d6852a0a061
|
[
"Apache-2.0"
] | 7
|
2016-12-19T03:53:22.000Z
|
2018-09-24T14:29:12.000Z
|
src/main/python/karellen/kombu/transport/django/migrations/__init__.py
|
jonashaag/karellen-kombu-ext
|
edabd7e17b5565d9d4c1861c35bc9d6852a0a061
|
[
"Apache-2.0"
] | 4
|
2016-12-17T05:07:14.000Z
|
2018-02-20T10:49:38.000Z
|
from __future__ import absolute_import, unicode_literals
SOUTH_ERROR_MESSAGE = """
For South support, customize the SOUTH_MIGRATION_MODULES setting
to point to the correct migrations module:
SOUTH_MIGRATION_MODULES = {
'karellen_kombu_transport_django': 'karellen.kombu.transport.django.south_migrations',
}
"""
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
| 30.176471
| 94
| 0.791423
|
1845499d05da93f8af09966ee987cd2975e5209c
| 7,056
|
py
|
Python
|
LAUNCHER.py
|
EinKung/AI-Image-Denoising
|
e679091b0bd21c5259da2804b5c6de79b6f1ba4c
|
[
"Apache-2.0"
] | null | null | null |
LAUNCHER.py
|
EinKung/AI-Image-Denoising
|
e679091b0bd21c5259da2804b5c6de79b6f1ba4c
|
[
"Apache-2.0"
] | null | null | null |
LAUNCHER.py
|
EinKung/AI-Image-Denoising
|
e679091b0bd21c5259da2804b5c6de79b6f1ba4c
|
[
"Apache-2.0"
] | null | null | null |
from UI import UI
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import sys,os,shutil,torch
import UTILS as utils
import PIL.Image as Image
import numpy as np
from NETWORK import *
class Launcher(QWidget,UI):
def __init__(self):
utils.logMaker('INFO','APPLICATION LAUNCHED')
super(Launcher,self).__init__()
self.setupUi(self)
self.setFixedSize(self.width(),self.height())
self.setWindowIcon(QIcon(config.icon_path))
self.setWindowFlags(Qt.FramelessWindowHint)
self.open.clicked.connect(self.callManage)
self.denoising.clicked.connect(self.callManage)
self.save.clicked.connect(self.callManage)
self.exit.clicked.connect(self.callManage)
self.timer.timeout.connect(self.tipClose)
self.thumbnailPath=None
self.filePath=None
self.denoised_thumbnailPath=None
self.denoisedPath=None
self.denoised_fileName=None
def callManage(self):
if self.sender()==self.open:
filePath = QFileDialog.getOpenFileName(self,'选择文件','','*.jpg;*.jpeg;*.png')[0].replace('/','\\')
if filePath=='':
return
utils.logMaker('INFO','FILE CHOSEN',[filePath])
self.thumbnailPath=None
self.filePath=None
self.denoised_thumbnailPath=None
self.denoisedPath=None
self.denoised_fileName=None
utils.logMaker('INFO','CACHE DELETED',os.listdir(config.cache_dir))
for cache in os.listdir(config.cache_dir):
os.remove(os.path.join(config.cache_dir,cache))
self.filePath=filePath
self.thumbnailPath=utils.thumbnail(filePath)
utils.logMaker('INFO','THUMBNAIL FILE CREATED',[self.thumbnailPath])
self.orgPic.clear()
self.denoisedPic.clear()
self.pixOrg=QPixmap(self.thumbnailPath)
self.orgPic.setPixmap(self.pixOrg)
if self.sender()==self.denoising:
if self.filePath==None:
utils.logMaker('WARRING','FILE NOT CHOSEN')
self.saveTip.setText('未选择文件')
self.saveTip.setVisible(True)
self.timer.start(1500)
else:
self.buttonDisable.show()
self.saveTip.setVisible(True)
self.denoisedPath=self.runNet(self.filePath)
self.saveTip.setText('处理完成')
self.timer.start(1500)
self.denoised_fileName=self.denoisedPath.split('\\')[-1]
self.denoised_thumbnailPath=utils.thumbnail(self.denoisedPath)
utils.logMaker('INFO','DENOISED THUMBNAIL FILE CREATED',[self.denoised_thumbnailPath])
self.denoisedPic.clear()
self.pixDen=QPixmap(self.denoised_thumbnailPath)
self.denoisedPic.setPixmap(self.pixDen)
if self.sender()==self.save:
if self.denoisedPath==None:
utils.logMaker('WARRING','NETWORK NOT LAUNCH YET')
self.saveTip.setText('未处理')
self.saveTip.setVisible(True)
self.timer.start(1500)
else:
savePath=QFileDialog.getSaveFileName(self,'保存','{}'.format(self.denoised_fileName),'*.jpg;;*.jpeg;;*.png')[0].replace('/','\\')
if savePath=='':
return
saveDir=savePath.rsplit('\\',1)[0]
shutil.copy(self.denoisedPath,saveDir)
os.rename(os.path.join(saveDir,self.denoised_fileName),savePath)
utils.logMaker('INFO','DENOISED FILE SAVED',[savePath])
self.saveTip.setText('保存成功')
self.saveTip.setVisible(True)
self.timer.start(1500)
if self.sender()==self.exit:
self.close()
utils.logMaker('INFO','APPLICATION CLOSED')
def mousePressEvent(self, event):
if event.button()==Qt.LeftButton:
self.mouseFlag=True
self.mousePosition=event.globalPos()-self.pos()
event.accept()
self.setCursor(QCursor(Qt.OpenHandCursor))
def mouseMoveEvent(self, QMouseEvent):
if Qt.LeftButton and self.mouseFlag:
self.move(QMouseEvent.globalPos()-self.mousePosition)
QMouseEvent.accept()
def mouseReleaseEvent(self, QMouseEvent):
self.mouseFlag=False
self.setCursor(QCursor(Qt.ArrowCursor))
def tipClose(self):
self.saveTip.setVisible(False)
self.buttonDisable.close()
self.timer.stop()
def netInit(self):
utils.logMaker('INFO','NETWORK FILES INITIALIZING...')
self.device=torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.generator_srgan=torch.load(config.network_srg_path,map_location=self.device.type)
self.generator_idgan=torch.load(config.network_idg_path,map_location=self.device.type)
def runNet(self,path):
fileName=path.split('\\')[-1]
data=torch.tensor(np.array(Image.open(path).convert('RGB'),dtype=np.float32).transpose([2,0,1])/255-0.5).unsqueeze(dim=0).to(self.device)
self.saveTip.setText('网络计算中....')
data_denoised=self.generator_idgan(data)
data_upscale=self.generator_srgan(data_denoised)
utils.logMaker('INFO','OPERATION SUCCESSFULLY')
pic_array=(data_upscale[0].cpu().detach().numpy()+0.5)*255
picDenoised=Image.fromarray(pic_array.transpose([1,2,0]).astype(np.uint8))
cachePath=os.path.join(config.cache_dir,'denoised_{}'.format(fileName))
picDenoised.save(cachePath)
utils.logMaker('INFO','DENOISED FILE SAVED IN CACHE',[cachePath])
return cachePath
def completion(self):
check=utils.completionCheck()
if not check[0]:
self.ok=QPushButton("确定")
self.ok.setStyleSheet("background-color:rgb(110,200,209);color:white;")
self.tipBox=QMessageBox()
self.tipBox.setWindowFlags(Qt.FramelessWindowHint)
self.tipBox.setText("文件缺失")
self.tipBox.setWindowTitle("提示")
self.tipBox.setStyleSheet("background-color:rgb(51,51,51);color:white;")
self.tipBox.addButton(self.ok,QMessageBox.AcceptRole)
self.tipBox.setIcon(QMessageBox.NoIcon)
self.tipBox.show()
utils.logMaker('ERROR','FILES NOT EXIST',check[1])
utils.logMaker('INFO','EXCEPTION CLOSED')
else:
self.netInit()
self.show()
if __name__=="__main__":
app=QApplication(sys.argv)
launcher=Launcher()
launcher.completion()
app.exec_()
utils.logMaker('INFO','CACHE DELETED',os.listdir(config.cache_dir))
for cache in os.listdir(config.cache_dir):
os.remove(os.path.join(config.cache_dir,cache))
| 40.786127
| 146
| 0.6053
|
4981676803b3df1bc71cdc7bbd6b7826c814f9f9
| 2,258
|
py
|
Python
|
oauth_provider/runtests/settings.py
|
TimSC/django-oauth10a-mod
|
3b9786b33cbd3d2ab6de7a667f9737167b349db0
|
[
"BSD-3-Clause"
] | null | null | null |
oauth_provider/runtests/settings.py
|
TimSC/django-oauth10a-mod
|
3b9786b33cbd3d2ab6de7a667f9737167b349db0
|
[
"BSD-3-Clause"
] | null | null | null |
oauth_provider/runtests/settings.py
|
TimSC/django-oauth10a-mod
|
3b9786b33cbd3d2ab6de7a667f9737167b349db0
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import django
ROOT_PATH = os.path.dirname(__file__)
TEMPLATE_DEBUG = DEBUG = True
MANAGERS = ADMINS = ()
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'testdb.sqlite', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
MEDIA_ROOT = ''
MEDIA_URL = ''
ADMIN_MEDIA_PREFIX = '/media/'
SECRET_KEY = '2+@4vnr#v8e273^+a)g$8%dre^dwcn#d&n#8+l6jk7r#$p&3zk'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'oauth_provider.backends.XAuthAuthenticationBackend',
)
MIDDLEWARE = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
if django.VERSION[0] == 1 and django.VERSION[1] < 10:
MIDDLEWARE_CLASSES = MIDDLEWARE
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (os.path.join(ROOT_PATH, 'templates'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'oauth_provider',
'oauth_provider.tests'
)
OAUTH_UNSAFE_REDIRECTS = True
OAUTH_NONCE_VALID_PERIOD = 120
import django
if django.VERSION >= (1, 5):
# custom user model for tests issue #22
INSTALLED_APPS += ('test_app',)
AUTH_USER_MODEL = 'test_app.TestUser'
if django.VERSION < (1, 7):
INSTALLED_APPS += ('south',)
# try:
# import xmlrunner
# except ImportError:
# pass
# else:
# TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
# TEST_OUTPUT_VERBOSE = True
# TEST_OUTPUT_DESCRIPTIONS = True
# TEST_OUTPUT_DIR = 'junitxml'
| 28.225
| 108
| 0.658547
|
d9439c09fbc12500a002359d22ce39f2ecbb7f5f
| 150
|
py
|
Python
|
docs/plots/plot_400m2.py
|
simontorres/goodman-lamps-lib
|
895a3835123e078b781d77a9d22a7a0d956563ba
|
[
"BSD-3-Clause"
] | null | null | null |
docs/plots/plot_400m2.py
|
simontorres/goodman-lamps-lib
|
895a3835123e078b781d77a9d22a7a0d956563ba
|
[
"BSD-3-Clause"
] | 1
|
2019-07-19T14:51:29.000Z
|
2019-08-12T21:00:33.000Z
|
docs/plots/plot_400m2.py
|
simontorres/goodman-lamps-lib
|
895a3835123e078b781d77a9d22a7a0d956563ba
|
[
"BSD-3-Clause"
] | 1
|
2021-09-20T03:30:39.000Z
|
2021-09-20T03:30:39.000Z
|
from __future__ import absolute_import
from goodman_lamps.goodman_lamps import create_plot
if __name__ == '__main__':
create_plot(mode='400M2')
| 21.428571
| 51
| 0.8
|
d7e77cd920114a0a30bf8b16f8bbed6e5e2d3491
| 76
|
py
|
Python
|
ocrd_olahd_client/__init__.py
|
OCR-D/ocrd_olahd_client
|
9809d0c1d7008deb55ba4210ae373ec264b68701
|
[
"Apache-2.0"
] | 1
|
2022-01-26T10:05:36.000Z
|
2022-01-26T10:05:36.000Z
|
ocrd_olahd_client/__init__.py
|
OCR-D/ocrd_olahd_client
|
9809d0c1d7008deb55ba4210ae373ec264b68701
|
[
"Apache-2.0"
] | 1
|
2020-09-21T16:36:42.000Z
|
2020-09-21T16:36:42.000Z
|
ocrd_olahd_client/__init__.py
|
OCR-D/ocrd_olahd_client
|
9809d0c1d7008deb55ba4210ae373ec264b68701
|
[
"Apache-2.0"
] | 1
|
2020-10-08T08:30:17.000Z
|
2020-10-08T08:30:17.000Z
|
from .client import OlaHdClient
from .processor import OlaHdClientProcessor
| 25.333333
| 43
| 0.868421
|
774ee97185a52cca5e743225c7623adb64395ab1
| 614
|
py
|
Python
|
study/chainer_study/chainer_study-2.py
|
strawsyz/straw
|
db313c78c2e3c0355cd10c70ac25a15bb5632d41
|
[
"MIT"
] | 2
|
2020-04-06T09:09:19.000Z
|
2020-07-24T03:59:55.000Z
|
study/chainer_study/chainer_study-2.py
|
strawsyz/straw
|
db313c78c2e3c0355cd10c70ac25a15bb5632d41
|
[
"MIT"
] | null | null | null |
study/chainer_study/chainer_study-2.py
|
strawsyz/straw
|
db313c78c2e3c0355cd10c70ac25a15bb5632d41
|
[
"MIT"
] | null | null | null |
from chainer import Variable
import numpy as np
from chainer import Chain, Variable
import chainer.links as L
# Defining your own neural networks using `Chain` class
class MyChain(Chain):
def __init__(self):
super(MyChain, self).__init__()
with self.init_scope():
self.l1 = L.Linear(2, 2)
self.l2 = L.Linear(2, 1)
def __call__(self, x):
h = self.l1(x)
return self.l2(h)
x = Variable(np.array([[1, 2], [3, 4]], dtype=np.float32))
model = MyChain()
y = model(x)
print('x: ', x.data) # input is x0 & x1
print('y: ', y.data) # output is y0 & y1
| 22.740741
| 58
| 0.609121
|
12165ff9bb97ab8dcadf92bd17bb032500609509
| 638
|
py
|
Python
|
eelbrain/_trf/tests/test_error_functions.py
|
reddigari/Eelbrain
|
6c02b99955d4b5dc7e3054042c182e1a4629b13c
|
[
"BSD-3-Clause"
] | null | null | null |
eelbrain/_trf/tests/test_error_functions.py
|
reddigari/Eelbrain
|
6c02b99955d4b5dc7e3054042c182e1a4629b13c
|
[
"BSD-3-Clause"
] | null | null | null |
eelbrain/_trf/tests/test_error_functions.py
|
reddigari/Eelbrain
|
6c02b99955d4b5dc7e3054042c182e1a4629b13c
|
[
"BSD-3-Clause"
] | null | null | null |
# Author: Christian Brodbeck <christianbrodbeck@nyu.edu>
from nose.tools import assert_almost_equal
import numpy as np
from eelbrain._trf._boosting_opt import l1, l2
PRECISION = 10
# numpy-based error functions
#############################
def np_l2(x):
return np.dot(x, x[:, None])[0]
def np_l1(x):
return np.abs(x).sum()
# test function
###############
def test_error_functions():
"Test optimized error functions"
x = np.random.normal(0., 1., 100)
index = np.array(((0, 100),), np.int64)
assert_almost_equal(l1(x, index), np_l1(x), PRECISION)
assert_almost_equal(l2(x, index), np_l2(x), PRECISION)
| 22
| 58
| 0.647335
|
7d7d5e9a2e2f3aa6427d638ad9264991cf02b6a8
| 707
|
py
|
Python
|
src/action/migrations/0007_auto_20171206_2221.py
|
japesone/ontask_b
|
17af441f9893c521d2e14011e7790ba4077e3318
|
[
"MIT"
] | 3
|
2018-08-24T10:48:40.000Z
|
2020-05-29T06:33:23.000Z
|
src/action/migrations/0007_auto_20171206_2221.py
|
japesone/ontask_b
|
17af441f9893c521d2e14011e7790ba4077e3318
|
[
"MIT"
] | null | null | null |
src/action/migrations/0007_auto_20171206_2221.py
|
japesone/ontask_b
|
17af441f9893c521d2e14011e7790ba4077e3318
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-06 11:21
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('action', '0006_action_columns'),
]
operations = [
migrations.AlterModelOptions(
name='action',
options={'ordering': ('name',)},
),
migrations.AddField(
model_name='action',
name='filter',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict, help_text='Preselect rows satisfying this condition', null=True),
),
]
| 27.192308
| 156
| 0.630835
|
1b125acb4853d3b0d787f27a4a2f17eea6df7a4e
| 7,089
|
py
|
Python
|
lib/gzMyModule/gzMyModuleClient.py
|
Polygrant/gzMyModule
|
9d334c07b4179ad5f05abdbbca65571baa369d80
|
[
"MIT"
] | null | null | null |
lib/gzMyModule/gzMyModuleClient.py
|
Polygrant/gzMyModule
|
9d334c07b4179ad5f05abdbbca65571baa369d80
|
[
"MIT"
] | null | null | null |
lib/gzMyModule/gzMyModuleClient.py
|
Polygrant/gzMyModule
|
9d334c07b4179ad5f05abdbbca65571baa369d80
|
[
"MIT"
] | null | null | null |
############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
############################################################
try:
import json as _json
except ImportError:
import sys
sys.path.append('simplejson-2.3.3')
import simplejson as _json
import requests as _requests
import urlparse as _urlparse
import random as _random
import base64 as _base64
from ConfigParser import ConfigParser as _ConfigParser
import os as _os
_CT = 'content-type'
_AJ = 'application/json'
_URL_SCHEME = frozenset(['http', 'https'])
def _get_token(user_id, password,
auth_svc='https://nexus.api.globusonline.org/goauth/token?' +
'grant_type=client_credentials'):
# This is bandaid helper function until we get a full
# KBase python auth client released
auth = _base64.encodestring(user_id + ':' + password)
headers = {'Authorization': 'Basic ' + auth}
ret = _requests.get(auth_svc, headers=headers, allow_redirects=True)
status = ret.status_code
if status >= 200 and status <= 299:
tok = _json.loads(ret.text)
elif status == 403:
raise Exception('Authentication failed: Bad user_id/password ' +
'combination for user %s' % (user_id))
else:
raise Exception(ret.text)
return tok['access_token']
def _read_rcfile(file=_os.environ['HOME'] + '/.authrc'): # @ReservedAssignment
# Another bandaid to read in the ~/.authrc file if one is present
authdata = None
if _os.path.exists(file):
try:
with open(file) as authrc:
rawdata = _json.load(authrc)
# strip down whatever we read to only what is legit
authdata = {x: rawdata.get(x) for x in (
'user_id', 'token', 'client_secret', 'keyfile',
'keyfile_passphrase', 'password')}
except Exception, e:
print "Error while reading authrc file %s: %s" % (file, e)
return authdata
def _read_inifile(file=_os.environ.get( # @ReservedAssignment
'KB_DEPLOYMENT_CONFIG', _os.environ['HOME'] +
'/.kbase_config')):
# Another bandaid to read in the ~/.kbase_config file if one is present
authdata = None
if _os.path.exists(file):
try:
config = _ConfigParser()
config.read(file)
# strip down whatever we read to only what is legit
authdata = {x: config.get('authentication', x)
if config.has_option('authentication', x)
else None for x in ('user_id', 'token',
'client_secret', 'keyfile',
'keyfile_passphrase', 'password')}
except Exception, e:
print "Error while reading INI file %s: %s" % (file, e)
return authdata
class ServerError(Exception):
def __init__(self, name, code, message, data=None, error=None):
self.name = name
self.code = code
self.message = '' if message is None else message
self.data = data or error or ''
# data = JSON RPC 2.0, error = 1.1
def __str__(self):
return self.name + ': ' + str(self.code) + '. ' + self.message + \
'\n' + self.data
class _JSONObjectEncoder(_json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
return _json.JSONEncoder.default(self, obj)
class gzMyModule(object):
def __init__(self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False):
if url is None:
raise ValueError('A url is required')
scheme, _, _, _, _, _ = _urlparse.urlparse(url)
if scheme not in _URL_SCHEME:
raise ValueError(url + " isn't a valid http url")
self.url = url
self.timeout = int(timeout)
self._headers = dict()
self.trust_all_ssl_certificates = trust_all_ssl_certificates
# token overrides user_id and password
if token is not None:
self._headers['AUTHORIZATION'] = token
elif user_id is not None and password is not None:
self._headers['AUTHORIZATION'] = _get_token(user_id, password)
elif 'KB_AUTH_TOKEN' in _os.environ:
self._headers['AUTHORIZATION'] = _os.environ.get('KB_AUTH_TOKEN')
elif not ignore_authrc:
authdata = _read_inifile()
if authdata is None:
authdata = _read_rcfile()
if authdata is not None:
if authdata.get('token') is not None:
self._headers['AUTHORIZATION'] = authdata['token']
elif(authdata.get('user_id') is not None
and authdata.get('password') is not None):
self._headers['AUTHORIZATION'] = _get_token(
authdata['user_id'], authdata['password'])
if self.timeout < 1:
raise ValueError('Timeout value must be at least 1 second')
def _call(self, method, params, json_rpc_context = None):
arg_hash = {'method': method,
'params': params,
'version': '1.1',
'id': str(_random.random())[2:]
}
if json_rpc_context:
arg_hash['context'] = json_rpc_context
body = _json.dumps(arg_hash, cls=_JSONObjectEncoder)
ret = _requests.post(self.url, data=body, headers=self._headers,
timeout=self.timeout,
verify=not self.trust_all_ssl_certificates)
if ret.status_code == _requests.codes.server_error:
json_header = None
if _CT in ret.headers:
json_header = ret.headers[_CT]
if _CT in ret.headers and ret.headers[_CT] == _AJ:
err = _json.loads(ret.text)
if 'error' in err:
raise ServerError(**err['error'])
else:
raise ServerError('Unknown', 0, ret.text)
else:
raise ServerError('Unknown', 0, ret.text)
if ret.status_code != _requests.codes.OK:
ret.raise_for_status()
ret.encoding = 'utf-8'
resp = _json.loads(ret.text)
if 'result' not in resp:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
return resp['result']
def filter_contigs(self, params, json_rpc_context = None):
if json_rpc_context and type(json_rpc_context) is not dict:
raise ValueError('Method filter_contigs: argument json_rpc_context is not type dict as required.')
resp = self._call('gzMyModule.filter_contigs',
[params], json_rpc_context)
return resp[0]
| 39.383333
| 110
| 0.572436
|
2dca1b3dfa8b9cad2e6dedab6b168f6c197962fe
| 12,236
|
py
|
Python
|
fastai/medical/imaging.py
|
raphaelr/fastai
|
0eec9f87e10d0095220c1460e8b80bb66aed3460
|
[
"Apache-2.0"
] | null | null | null |
fastai/medical/imaging.py
|
raphaelr/fastai
|
0eec9f87e10d0095220c1460e8b80bb66aed3460
|
[
"Apache-2.0"
] | null | null | null |
fastai/medical/imaging.py
|
raphaelr/fastai
|
0eec9f87e10d0095220c1460e8b80bb66aed3460
|
[
"Apache-2.0"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/60_medical.imaging.ipynb (unless otherwise specified).
__all__ = ['DcmDataset', 'DcmTag', 'DcmMultiValue', 'dcmread', 'get_dicom_files', 'get_dicom_files', 'TensorDicom',
'PILDicom', 'pixels', 'scaled_px', 'array_freqhist_bins', 'dicom_windows', 'TensorCTScan', 'PILCTScan',
'uniform_blur2d', 'gauss_blur2d', 'mask2bbox', 'crop_resize', 'shape']
# Cell
from ..basics import *
from ..vision.all import *
from ..data.transforms import *
import pydicom,kornia,skimage
from pydicom.dataset import Dataset as DcmDataset
from pydicom.tag import BaseTag as DcmTag
from pydicom.multival import MultiValue as DcmMultiValue
from PIL import Image
try:
import cv2
cv2.setNumThreads(0)
except: pass
# Cell
#nbdev_comment _all_ = ['DcmDataset', 'DcmTag', 'DcmMultiValue', 'dcmread', 'get_dicom_files']
# Cell
def get_dicom_files(path, recurse=True, folders=None):
"Get dicom files in `path` recursively, only in `folders`, if specified."
return get_files(path, extensions=[".dcm"], recurse=recurse, folders=folders)
# Cell
@patch
def dcmread(fn:Path, force = False):
"Open a `DICOM` file"
return pydicom.dcmread(str(fn), force)
# Cell
class TensorDicom(TensorImage):
"Inherits from `TensorImage` and converts the `pixel_array` into a `TensorDicom`"
_show_args = {'cmap':'gray'}
# Cell
class PILDicom(PILBase):
_open_args,_tensor_cls,_show_args = {},TensorDicom,TensorDicom._show_args
@classmethod
def create(cls, fn:(Path,str,bytes), mode=None)->None:
"Open a `DICOM file` from path `fn` or bytes `fn` and load it as a `PIL Image`"
if isinstance(fn,bytes): im = Image.fromarray(pydicom.dcmread(pydicom.filebase.DicomBytesIO(fn)).pixel_array)
if isinstance(fn,(Path,str)): im = Image.fromarray(dcmread(fn).pixel_array)
im.load()
im = im._new(im.im)
return cls(im.convert(mode) if mode else im)
PILDicom._tensor_cls = TensorDicom
# Cell
@patch_property
def pixels(self:DcmDataset):
"`pixel_array` as a tensor"
return tensor(self.pixel_array.astype(np.float32))
# Cell
@patch_property
def scaled_px(self:DcmDataset):
"`pixels` scaled by `RescaleSlope` and `RescaleIntercept`"
img = self.pixels
return img if self.Modality == "CR" else img * self.RescaleSlope + self.RescaleIntercept
# Cell
def array_freqhist_bins(self, n_bins=100):
"A numpy based function to split the range of pixel values into groups, such that each group has around the same number of pixels"
imsd = np.sort(self.flatten())
t = np.array([0.001])
t = np.append(t, np.arange(n_bins)/n_bins+(1/2/n_bins))
t = np.append(t, 0.999)
t = (len(imsd)*t+0.5).astype(np.int)
return np.unique(imsd[t])
# Cell
@patch
def freqhist_bins(self:Tensor, n_bins=100):
"A function to split the range of pixel values into groups, such that each group has around the same number of pixels"
imsd = self.view(-1).sort()[0]
t = torch.cat([tensor([0.001]),
torch.arange(n_bins).float()/n_bins+(1/2/n_bins),
tensor([0.999])])
t = (len(imsd)*t).long()
return imsd[t].unique()
# Cell
@patch
def hist_scaled_pt(self:Tensor, brks=None):
# Pytorch-only version - switch to this if/when interp_1d can be optimized
if brks is None: brks = self.freqhist_bins()
brks = brks.to(self.device)
ys = torch.linspace(0., 1., len(brks)).to(self.device)
return self.flatten().interp_1d(brks, ys).reshape(self.shape).clamp(0.,1.)
# Cell
@patch
def hist_scaled(self:Tensor, brks=None):
"Scales a tensor using `freqhist_bins` to values between 0 and 1"
if self.device.type=='cuda': return self.hist_scaled_pt(brks)
if brks is None: brks = self.freqhist_bins()
ys = np.linspace(0., 1., len(brks))
x = self.numpy().flatten()
x = np.interp(x, brks.numpy(), ys)
return tensor(x).reshape(self.shape).clamp(0.,1.)
# Cell
@patch
def hist_scaled(self:DcmDataset, brks=None, min_px=None, max_px=None):
"Pixels scaled to a `min_px` and `max_px` value"
px = self.scaled_px
if min_px is not None: px[px<min_px] = min_px
if max_px is not None: px[px>max_px] = max_px
return px.hist_scaled(brks=brks)
# Cell
@patch
def windowed(self:Tensor, w, l):
px = self.clone()
px_min = l - w//2
px_max = l + w//2
px[px<px_min] = px_min
px[px>px_max] = px_max
return (px-px_min) / (px_max-px_min)
# Cell
@patch
def windowed(self:DcmDataset, w, l):
return self.scaled_px.windowed(w,l)
# Cell
# From https://radiopaedia.org/articles/windowing-ct
dicom_windows = types.SimpleNamespace(
brain=(80,40),
subdural=(254,100),
stroke=(8,32),
brain_bone=(2800,600),
brain_soft=(375,40),
lungs=(1500,-600),
mediastinum=(350,50),
abdomen_soft=(400,50),
liver=(150,30),
spine_soft=(250,50),
spine_bone=(1800,400)
)
# Cell
class TensorCTScan(TensorImageBW): _show_args = {'cmap':'bone'}
# Cell
class PILCTScan(PILBase): _open_args,_tensor_cls,_show_args = {},TensorCTScan,TensorCTScan._show_args
# Cell
@patch
@delegates(show_image)
def show(self:DcmDataset, scale=True, cmap=plt.cm.bone, min_px=-1100, max_px=None, **kwargs):
px = (self.windowed(*scale) if isinstance(scale,tuple)
else self.hist_scaled(min_px=min_px,max_px=max_px,brks=scale) if isinstance(scale,(ndarray,Tensor))
else self.hist_scaled(min_px=min_px,max_px=max_px) if scale
else self.scaled_px)
show_image(px, cmap=cmap, **kwargs)
# Cell
@patch
@delegates(show_image, show_images)
def show(self:DcmDataset, frames=1, scale=True, cmap=plt.cm.bone, min_px=-1100, max_px=None, **kwargs):
"""Adds functionality to view dicom images where each file may have more than 1 frame"""
px = (self.windowed(*scale) if isinstance(scale,tuple)
else self.hist_scaled(min_px=min_px,max_px=max_px,brks=scale) if isinstance(scale,(ndarray,Tensor))
else self.hist_scaled(min_px=min_px,max_px=max_px) if scale
else self.scaled_px)
if px.ndim > 2:
gh=[]
p = px.shape; print(f'{p[0]} frames per file')
for i in range(frames): u = px[i]; gh.append(u)
show_images(gh, **kwargs)
else:
print('1 frame per file')
show_image(px, cmap=cmap, **kwargs)
# Cell
@patch
def pct_in_window(dcm:DcmDataset, w, l):
"% of pixels in the window `(w,l)`"
px = dcm.scaled_px
return ((px > l-w//2) & (px < l+w//2)).float().mean().item()
# Cell
def uniform_blur2d(x,s):
"Uniformly apply blurring"
w = x.new_ones(1,1,1,s)/s
# Factor 2d conv into 2 1d convs
x = unsqueeze(x, dim=0, n=4-x.dim())
r = (F.conv2d(x, w, padding=s//2))
r = (F.conv2d(r, w.transpose(-1,-2), padding=s//2)).cpu()[:,0]
return r.squeeze()
# Cell
def gauss_blur2d(x,s):
"Apply gaussian_blur2d kornia filter"
s2 = int(s/4)*2+1
x2 = unsqueeze(x, dim=0, n=4-x.dim())
res = kornia.filters.gaussian_blur2d(x2, (s2,s2), (s,s), 'replicate')
return res.squeeze()
# Cell
@patch
def mask_from_blur(x:Tensor, window, sigma=0.3, thresh=0.05, remove_max=True):
p = x.windowed(*window)
if remove_max: p[p==1] = 0
return gauss_blur2d(p, s=sigma*x.shape[-1])>thresh
# Cell
@patch
def mask_from_blur(x:DcmDataset, window, sigma=0.3, thresh=0.05, remove_max=True):
return to_device(x.scaled_px).mask_from_blur(window, sigma, thresh, remove_max=remove_max)
# Cell
def _px_bounds(x, dim):
c = x.sum(dim).nonzero().cpu()
idxs,vals = torch.unique(c[:,0],return_counts=True)
vs = torch.split_with_sizes(c[:,1],tuple(vals))
d = {k.item():v for k,v in zip(idxs,vs)}
default_u = tensor([0,x.shape[-1]-1])
b = [d.get(o,default_u) for o in range(x.shape[0])]
b = [tensor([o.min(),o.max()]) for o in b]
return torch.stack(b)
# Cell
def mask2bbox(mask):
no_batch = mask.dim()==2
if no_batch: mask = mask[None]
bb1 = _px_bounds(mask,-1).t()
bb2 = _px_bounds(mask,-2).t()
res = torch.stack([bb1,bb2],dim=1).to(mask.device)
return res[...,0] if no_batch else res
# Cell
def _bbs2sizes(crops, init_sz, use_square=True):
bb = crops.flip(1)
szs = (bb[1]-bb[0])
if use_square: szs = szs.max(0)[0][None].repeat((2,1))
overs = (szs+bb[0])>init_sz
bb[0][overs] = init_sz-szs[overs]
lows = (bb[0]/float(init_sz))
return lows,szs/float(init_sz)
# Cell
def crop_resize(x, crops, new_sz):
# NB assumes square inputs. Not tested for non-square anythings!
bs = x.shape[0]
lows,szs = _bbs2sizes(crops, x.shape[-1])
if not isinstance(new_sz,(list,tuple)): new_sz = (new_sz,new_sz)
id_mat = tensor([[1.,0,0],[0,1,0]])[None].repeat((bs,1,1)).to(x.device)
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=UserWarning)
sp = F.affine_grid(id_mat, (bs,1,*new_sz))+1.
grid = sp*unsqueeze(szs.t(),1,n=2)+unsqueeze(lows.t()*2.,1,n=2)
return F.grid_sample(x.unsqueeze(1), grid-1)
# Cell
@patch
def to_nchan(x:Tensor, wins, bins=None):
res = [x.windowed(*win) for win in wins]
if not isinstance(bins,int) or bins!=0: res.append(x.hist_scaled(bins).clamp(0,1))
dim = [0,1][x.dim()==3]
return TensorCTScan(torch.stack(res, dim=dim))
# Cell
@patch
def to_nchan(x:DcmDataset, wins, bins=None):
return x.scaled_px.to_nchan(wins, bins)
# Cell
@patch
def to_3chan(x:Tensor, win1, win2, bins=None):
return x.to_nchan([win1,win2],bins=bins)
# Cell
@patch
def to_3chan(x:DcmDataset, win1, win2, bins=None):
return x.scaled_px.to_3chan(win1, win2, bins)
# Cell
@patch
def save_jpg(x:(Tensor,DcmDataset), path, wins, bins=None, quality=90):
fn = Path(path).with_suffix('.jpg')
x = (x.to_nchan(wins, bins)*255).byte()
im = Image.fromarray(x.permute(1,2,0).numpy(), mode=['RGB','CMYK'][x.shape[0]==4])
im.save(fn, quality=quality)
# Cell
@patch
def to_uint16(x:(Tensor,DcmDataset), bins=None):
d = x.hist_scaled(bins).clamp(0,1) * 2**16
return d.numpy().astype(np.uint16)
# Cell
@patch
def save_tif16(x:(Tensor,DcmDataset), path, bins=None, compress=True):
fn = Path(path).with_suffix('.tif')
Image.fromarray(x.to_uint16(bins)).save(str(fn), compression='tiff_deflate' if compress else None)
# Cell
@patch
def set_pixels(self:DcmDataset, px):
self.PixelData = px.tobytes()
self.Rows,self.Columns = px.shape
DcmDataset.pixel_array = property(DcmDataset.pixel_array.fget, set_pixels)
# Cell
@patch
def zoom(self:DcmDataset, ratio):
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
self.pixel_array = ndimage.zoom(self.pixel_array, ratio)
# Cell
@patch
def zoom_to(self:DcmDataset, sz):
if not isinstance(sz,(list,tuple)): sz=(sz,sz)
rows,cols = sz
self.zoom((rows/self.Rows,cols/self.Columns))
# Cell
@patch_property
def shape(self:DcmDataset): return self.Rows,self.Columns
# Cell
def _cast_dicom_special(x):
cls = type(x)
if not cls.__module__.startswith('pydicom'): return x
if cls.__base__ == object: return x
return cls.__base__(x)
def _split_elem(res,k,v):
if not isinstance(v,DcmMultiValue): return
res[f'Multi{k}'] = 1
for i,o in enumerate(v): res[f'{k}{"" if i==0 else i}']=o
# Cell
@patch
def as_dict(self:DcmDataset, px_summ=True, window=dicom_windows.brain):
pxdata = (0x7fe0,0x0010)
vals = [self[o] for o in self.keys() if o != pxdata]
its = [(v.keyword,v.value) for v in vals]
res = dict(its)
res['fname'] = self.filename
for k,v in its: _split_elem(res,k,v)
if not px_summ: return res
stats = 'min','max','mean','std'
try:
pxs = self.pixel_array
for f in stats: res['img_'+f] = getattr(pxs,f)()
res['img_pct_window'] = self.pct_in_window(*window)
except Exception as e:
for f in stats: res['img_'+f] = 0
print(res,e)
for k in res: res[k] = _cast_dicom_special(res[k])
return res
# Cell
def _dcm2dict(fn, **kwargs): return fn.dcmread().as_dict(**kwargs)
# Cell
@delegates(parallel)
def _from_dicoms(cls, fns, n_workers=0, **kwargs):
return pd.DataFrame(parallel(_dcm2dict, fns, n_workers=n_workers, **kwargs))
pd.DataFrame.from_dicoms = classmethod(_from_dicoms)
| 32.892473
| 134
| 0.666558
|
ea4d4f40c164f0566dadf25e7dbfad215e9dd6fb
| 5,514
|
py
|
Python
|
apsdocs/source/conf.py
|
dgursoy/apsdocs
|
c4e1f74dda98d49f470e5861a4b5b658765dabc4
|
[
"BSD-3-Clause"
] | null | null | null |
apsdocs/source/conf.py
|
dgursoy/apsdocs
|
c4e1f74dda98d49f470e5861a4b5b658765dabc4
|
[
"BSD-3-Clause"
] | null | null | null |
apsdocs/source/conf.py
|
dgursoy/apsdocs
|
c4e1f74dda98d49f470e5861a4b5b658765dabc4
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# apsdocs documentation build configuration file, created by
# sphinx-quickstart on Sat May 11 13:26:24 2019.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'apsdocs'
copyright = '2019, Doga Gursoy'
author = 'Doga Gursoy'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'apsdocsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'apsdocs.tex', 'apsdocs Documentation',
'Doga Gursoy', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'apsdocs', 'apsdocs Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'apsdocs', 'apsdocs Documentation',
author, 'apsdocs', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 29.174603
| 79
| 0.679362
|
478a6931151350d535b019d0787ff01041b12887
| 5,185
|
py
|
Python
|
AprioriAlgorithm/Apriori.py
|
AvisenaAlwi/DataMining
|
5a2dc404c7c120a3ff99f93b9a4627804cd1486a
|
[
"Apache-2.0"
] | 2
|
2019-03-22T12:29:08.000Z
|
2019-04-05T06:27:01.000Z
|
AprioriAlgorithm/Apriori.py
|
AvisenaAlwi/DataMining
|
5a2dc404c7c120a3ff99f93b9a4627804cd1486a
|
[
"Apache-2.0"
] | null | null | null |
AprioriAlgorithm/Apriori.py
|
AvisenaAlwi/DataMining
|
5a2dc404c7c120a3ff99f93b9a4627804cd1486a
|
[
"Apache-2.0"
] | null | null | null |
class Apriori:
def __init__(self, minimum_support=3):
"""
Konstruktor class
:param minimum_support: minimum support, default = 3
"""
self.__minimum_support = minimum_support
self.__transaction = dict()
self.__items = set()
def add_transaction(self, items):
"""
Menambahkan transaksi baru
:param items: berupa list nama nama barang
:return: None
"""
if self.__transaction:
last_tid = max(self.__transaction)
else:
last_tid = 0
self.__transaction[last_tid + 1] = items
def print_transaction(self):
print("TID\t\tItems")
for tid, items in self.__transaction.items():
str_items = ", ".join(sorted(list(items)))
print("{} : {}".format(str(tid), str_items))
def __print_dict(self, d):
"""
Mencetak dictionary C / L dengan lebih bagus
:param d: dictionary yang akan di cetak
:return: None
"""
for froset, value in d.items():
items = ', '.join(list(froset))
print("{%s} : \033[1m%d\033[0m" % (items, value))
def __count_support_of_itemset(self, other):
"""
Method untuk meghitung berapa transaksi yang mengandung itemset pada variabel other
:param other: harus berupa set() atau frozenset()
:return: int
"""
count = 0
for tid, items in self.__transaction.items():
if set(items).issuperset(other):
count += 1
return count
def __create_C2(self, L):
list_key = list(L.keys())
result_set = set()
for key_first in list_key:
key_first = list(key_first)[0]
for key_second in list_key:
key_second = list(key_second)[0]
if key_second != key_first:
new_item_set = {key_first, key_second}
result_set.add(frozenset(new_item_set))
return result_set
def __create_candidate(self, L, k):
"""
Method untuk membuat kandidat dari L sebelumnya
:param L: L-1
:param k: Index (perulangan) saat ini
:return:
"""
indexes = range(k - 2)
c = set()
for index, (itemset_i, sup) in enumerate(L.items()):
itemset_i = list(itemset_i)
same_items_i = []
for i in indexes:
same_items_i.append(itemset_i[i])
setz = set(itemset_i)
ada_yang_kembar = False
for indexj, itemset_j in enumerate(L):
itemset_j = list(itemset_j)
same_items_j = []
for i in indexes:
same_items_j.append(itemset_j[i])
if index != indexj:
if same_items_i == same_items_j:
ada_yang_kembar = True
itemsetj = list(itemset_j)
setz.update(itemsetj)
break
if not ada_yang_kembar:
setz.clear()
if setz:
c.add(frozenset(setz))
return c
def scan(self, min_support=-1):
"""
Method untuk memindai transaksi untuk mendapatkan C dan L pada data transaksi
:return:
"""
if min_support < 0:
min_support = self.__minimum_support
print("\nScan dengan minimum support : %d" % min_support)
C = dict()
L = dict()
idx = 0
# Perulangan selama L ada isinya atau idx = 0
while L or idx == 0:
idx += 1
if idx == 1:
# Membuat C1
for tid, items in self.__transaction.items():
for item in items:
item = frozenset({item})
if item in C:
C[item] += 1
else:
C[item] = 1
elif idx == 2:
# Membuat C2 dengan nilai support_count awal = 0
C = dict.fromkeys(self.__create_C2(L), 0)
else:
# Membuat C3 - n dengan nilai support_count awal = 0
C = dict.fromkeys(self.__create_candidate(L, idx), 0)
# Menghitung berapa transaksi yang mengandung itemset pada setiap C
for k, val in C.items():
C[k] = self.__count_support_of_itemset(k)
# Mengeliminasi itemset di C yang kurang dari minimum support dan disimpan pada variable L
L.clear()
for key, val in C.items():
if val >= min_support:
L[key] = val
if C:
print("\n" + "=" * 50)
print("C%d :" % idx)
self.__print_dict(C)
print("L%d :" % idx)
if L:
self.__print_dict(L)
else:
print("Kosong karena semua kandidat tidak memenuhi minimum support")
else:
print("\nTidak dapat membentuk kandidat selanjutnya. Program Berhenti.")
| 34.337748
| 102
| 0.502797
|
f6dc3daf31589353c4a8c13d5ff652f7648700d4
| 1,280
|
py
|
Python
|
localstack/mock/apis/dynamodbstreams_api.py
|
WesleyCharlesBlake/localstack
|
7402ce0edae4f314538ced4a84b4bbb5b0ade0dd
|
[
"Apache-2.0"
] | 1
|
2017-05-03T12:17:59.000Z
|
2017-05-03T12:17:59.000Z
|
localstack/mock/apis/dynamodbstreams_api.py
|
WesleyCharlesBlake/localstack
|
7402ce0edae4f314538ced4a84b4bbb5b0ade0dd
|
[
"Apache-2.0"
] | null | null | null |
localstack/mock/apis/dynamodbstreams_api.py
|
WesleyCharlesBlake/localstack
|
7402ce0edae4f314538ced4a84b4bbb5b0ade0dd
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import os
import json
import uuid
import logging
from flask import Flask, jsonify, request
from localstack.utils.aws import aws_stack
APP_NAME = 'ddb_streams_mock'
app = Flask(APP_NAME)
DDB_STREAMS = []
def add_dynamodb_stream(table_name, view_type='NEW_AND_OLD_IMAGES', enabled=True):
if enabled:
stream = {
'StreamArn': aws_stack.dynamodb_stream_arn(table_name=table_name),
'TableName': table_name,
'StreamLabel': 'TODO'
}
DDB_STREAMS.append(stream)
@app.route('/', methods=['POST'])
def post_request():
action = request.headers.get('x-amz-target')
data = json.loads(request.data)
result = None
if action == 'DynamoDBStreams_20120810.ListStreams':
result = {
'Streams': DDB_STREAMS,
'LastEvaluatedStreamArn': 'TODO'
}
else:
print('WARNING: Unknown operation "%s"' % action)
return jsonify(result)
def serve(port, quiet=True):
if quiet:
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
app.run(port=int(port), threaded=True, host='0.0.0.0')
if __name__ == '__main__':
port = DEFAULT_PORT_DYNAMODBSTREAMS
print("Starting server on port %s" % port)
serve(port)
| 24.615385
| 82
| 0.650781
|
87e127b12471b83d3bf79066ac981346502d0161
| 5,636
|
py
|
Python
|
top_block.py
|
xueyuecanfeng/C-LQI
|
f489c6447428d6affb2159e9d8f895caab2868c7
|
[
"BSD-2-Clause"
] | 2
|
2021-11-30T02:35:48.000Z
|
2021-11-30T02:53:02.000Z
|
top_block.py
|
xueyuecanfeng/C-LQI
|
f489c6447428d6affb2159e9d8f895caab2868c7
|
[
"BSD-2-Clause"
] | null | null | null |
top_block.py
|
xueyuecanfeng/C-LQI
|
f489c6447428d6affb2159e9d8f895caab2868c7
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Top Block
# Generated: Fri Aug 10 08:11:40 2018
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from PyQt4 import Qt
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import fft
from gnuradio import gr
from gnuradio.eng_option import eng_option
from gnuradio.fft import window
from gnuradio.filter import firdes
from optparse import OptionParser
import bitTobyte
import byteTo6bit
import carrier48
import carrier_corresponding
import deconvolution
import decycleprefix
import deinterleaver
import demapping
import descramble
import sys
class top_block(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "Top Block")
Qt.QWidget.__init__(self)
self.setWindowTitle("Top Block")
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "top_block")
self.restoreGeometry(self.settings.value("geometry").toByteArray())
##################################################
# Variables
##################################################
self.samp_rate = samp_rate = 32000
##################################################
# Blocks
##################################################
self.fft_vxx_0_1 = fft.fft_vcc(64, True, (window.rectangular(64)), True, 1)
self.descramble_descramble_0 = descramble.descramble(9)
self.demapping_my_demapping_64_0 = demapping.my_demapping_64(64)
self.deinterleaver_deinterleaver_0 = deinterleaver.deinterleaver()
self.decycleprefix_decycleprefix_0 = decycleprefix.decycleprefix()
self.deconvolution_deconvolution_0 = deconvolution.deconvolution()
self.carrier_corresponding_carrier_correponding_0 = carrier_corresponding.carrier_correponding()
self.carrier48_carrier48_0 = carrier48.carrier48()
self.byteTo6bit_byteTo6bit_0 = byteTo6bit.byteTo6bit()
self.blocks_stream_to_vector_0 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 80)
self.blocks_file_source_0 = blocks.file_source(gr.sizeof_gr_complex*1, "/home/zj/学姐实验/8.10/zigbee9_double", False)
self.blocks_file_sink_0_0_0_1_0_0 = blocks.file_sink(gr.sizeof_char*27, "/home/zj/学姐实验/8.10/zigbee9_webee", False)
self.blocks_file_sink_0_0_0_1_0_0.set_unbuffered(False)
self.bitTobyte_bitTobyte_1 = bitTobyte.bitTobyte()
##################################################
# Connections
##################################################
self.connect((self.bitTobyte_bitTobyte_1, 0), (self.blocks_file_sink_0_0_0_1_0_0, 0))
self.connect((self.blocks_file_source_0, 0), (self.blocks_stream_to_vector_0, 0))
self.connect((self.blocks_stream_to_vector_0, 0), (self.decycleprefix_decycleprefix_0, 0))
self.connect((self.byteTo6bit_byteTo6bit_0, 0), (self.deinterleaver_deinterleaver_0, 0))
self.connect((self.carrier48_carrier48_0, 0), (self.byteTo6bit_byteTo6bit_0, 0))
self.connect((self.carrier_corresponding_carrier_correponding_0, 0), (self.carrier48_carrier48_0, 0))
self.connect((self.deconvolution_deconvolution_0, 0), (self.descramble_descramble_0, 0))
self.connect((self.decycleprefix_decycleprefix_0, 0), (self.fft_vxx_0_1, 0))
self.connect((self.deinterleaver_deinterleaver_0, 0), (self.deconvolution_deconvolution_0, 0))
self.connect((self.demapping_my_demapping_64_0, 0), (self.carrier_corresponding_carrier_correponding_0, 0))
self.connect((self.descramble_descramble_0, 0), (self.bitTobyte_bitTobyte_1, 0))
self.connect((self.fft_vxx_0_1, 0), (self.demapping_my_demapping_64_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "top_block")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
def main(top_block_cls=top_block, options=None):
from distutils.version import StrictVersion
if StrictVersion(Qt.qVersion()) >= StrictVersion("4.5.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls()
tb.start()
tb.show()
def quitting():
tb.stop()
tb.wait()
qapp.connect(qapp, Qt.SIGNAL("aboutToQuit()"), quitting)
qapp.exec_()
if __name__ == '__main__':
main()
| 41.138686
| 122
| 0.649042
|
a187a89f90aa4b860effbeef276000b0dff8557f
| 1,345
|
py
|
Python
|
ScrappingData.py
|
Harshit161999/Air_Quality_Index
|
17acc726ccf6fe19fcc1294966b886bb7bdb0b0f
|
[
"MIT"
] | 1
|
2020-12-31T10:47:11.000Z
|
2020-12-31T10:47:11.000Z
|
ScrappingData.py
|
Harshit161999/Air_Quality_Index
|
17acc726ccf6fe19fcc1294966b886bb7bdb0b0f
|
[
"MIT"
] | null | null | null |
ScrappingData.py
|
Harshit161999/Air_Quality_Index
|
17acc726ccf6fe19fcc1294966b886bb7bdb0b0f
|
[
"MIT"
] | 5
|
2021-01-13T18:47:13.000Z
|
2021-08-03T11:33:51.000Z
|
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
import os
import sys
years = list(range(2010,2019))
months = ["%02d"%x for x in range(1, 13)]
for year in years:
for month in months:
file_html = open('Data/Html_Data/{}/{}.html'.format(year,month), 'rb')
plain_text = file_html.read()
soup = BeautifulSoup(plain_text, 'lxml')
table = soup.find('table', class_ = 'medias mensuales numspan')
tbody = table.find_all('tr')
tcols = [table.find('tr').find_all('th')[i].text for i in range(15)]
days_in_month = len(tbody) - 3 # (1 for cols, last 2 for avg of the month)
tdata_of_month = [[table.find_all('tr')[day].find_all('td')[i].text for i in range (15)] for day in range(1, days_in_month + 1)]
df_month = pd.DataFrame(tdata_of_month, columns = tcols)
df_month.drop(columns = ['VG', 'RA', 'SN', 'TS', 'FG'], inplace = True)
df_month.replace('', np.nan, inplace = True)
df_month.dropna(subset = tcols[1 : 9], inplace = True)
if not os.path.exists("Data/Scrapped_Data/{}".format(year)):
os.makedirs("Data/Scrapped_Data/{}".format(year))
df_month.to_csv("Data/Scrapped_Data/{}/{}.csv".format(year, month), index = False)
sys.stdout.flush()
| 37.361111
| 136
| 0.600743
|
35300795bf44e47b49b460b6d3a8707caa0a4894
| 12,501
|
py
|
Python
|
report_merger.py
|
xenoxine/GyoiThon
|
94567e18261be5152bb809b5b6f056a60c710122
|
[
"Apache-2.0"
] | 666
|
2018-03-14T04:54:49.000Z
|
2022-03-31T07:54:55.000Z
|
report_merger.py
|
FDlucifer/GyoiThon
|
3abd7032905b718fb04eeba9c4658799fc748336
|
[
"Apache-2.0"
] | 47
|
2018-03-23T08:39:31.000Z
|
2021-07-11T06:56:07.000Z
|
report_merger.py
|
FDlucifer/GyoiThon
|
3abd7032905b718fb04eeba9c4658799fc748336
|
[
"Apache-2.0"
] | 261
|
2018-03-14T00:44:25.000Z
|
2022-03-28T08:06:38.000Z
|
#!/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import traceback
import re
import codecs
import glob
import configparser
import pandas as pd
from util import Utilty
# Type of printing.
OK = 'ok' # [*]
NOTE = 'note' # [+]
FAIL = 'fail' # [-]
WARNING = 'warn' # [!]
NONE = 'none' # No label.
# Merge report.
class MergeReport:
def __init__(self, utility):
self.utility = utility
# Read config file.
config = configparser.ConfigParser()
self.file_name = os.path.basename(__file__)
self.full_path = os.path.dirname(os.path.abspath(__file__))
config.read(os.path.join(self.full_path, 'config.ini'))
# Define report header.
self.header = ['No', '海外/国内', '会社名/組織名', 'カテゴリ', 'FQDN (URL)', 'リダイレクト/トップURL (URL)',
'ソース (URL)', 'FQDN (IPアドレス)', 'トップURL (IPアドレス)', 'フォーム (認証)', 'Basic (認証)',
'開発/本番 (環境)', 'クラウド (環境)', '製品 (CMS)', '管理画面 (CMS)', '不要なコンテンツ',
'ディレクトリ一覧の表示', 'エラーメッセージ', '不適切なコメント', 'Apache (製品)', 'PHP (製品)',
'OpenSSL (製品)', 'nginx (製品)', 'IIS (製品)', '.NET (製品)',
'MVC (製品)', 'WordPress (製品)', 'その他 (製品)', '備考']
# Must product name.
self.require_prduct = ['apache@http_server', 'php@php', 'openssl@openssl', 'nginx@nginx',
'microsoft@internet_information_server', 'microsoft@asp.net', 'microsoft@mvc',
'wordpress@wordpress']
# Basic authentication regex.
self.basic_regex = 'WWW-Authenticate\:\s(Basic|Bearer|Digest|HOBA|Mutual|AWS4-HMAC-SHA256)\s'
self.basic_proxy_regex = 'Proxy-Authenticate\:\s(Basic|Bearer|Digest|HOBA|Mutual|AWS4-HMAC-SHA256)\s'
try:
self.local_header = (config['Report']['header']).split('@')
self.report_dir = os.path.join(self.full_path, config['Report']['report_path'])
self.in_report = os.path.join(self.report_dir, config['Report']['report_name'])
out_report_name = 'gyoithon_merge_report_{}.csv'.format(self.utility.get_current_date('%Y%m%d%H%M%S'))
self.out_report = os.path.join(self.report_dir, out_report_name)
except Exception as e:
self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e))
self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e))
sys.exit(1)
# Create report's header.
def create_report_header(self):
self.utility.print_message(NOTE, 'Create report header : {}'.format(self.out_report))
self.utility.write_log(20, '[In] Create report header [{}].'.format(self.out_report))
# Create report header.
if os.path.exists(self.out_report) is False:
pd.DataFrame([], columns=self.header).to_csv(self.out_report, mode='w', index=False, encoding='utf_8_sig')
self.utility.write_log(20, '[Out] Create report header [{}].'.format(self.out_report))
# Get target report (local report).
def get_target_report(self):
# Gather reporting items.
csv_file_list = glob.glob(self.in_report)
# Create DataFrame.
try:
for report_idx, file in enumerate(csv_file_list):
self.utility.print_message(OK, '{}/{} Processing: {}'.format(report_idx+1, len(csv_file_list), file))
record = []
df_local = pd.read_csv(file, names=self.local_header, header=0, sep=',', encoding='utf-8')
record.append(self.extract_report_element(report_idx+1, df_local))
# Add record.
pd.DataFrame(record).to_csv(self.out_report, mode='a', header=False, index=False, encoding='utf_8_sig')
except Exception as e:
t, v, tb = sys.exc_info()
self.utility.print_message(FAIL, 'Invalid file error: {}'.format(e))
self.utility.print_message(FAIL, traceback.format_exception(t, v, tb))
self.utility.print_message(FAIL, traceback.format_tb(e.__traceback__))
return
# Extract report's element from local reports.
def extract_report_element(self, report_idx, df_local):
record = []
record.insert(0, report_idx) # No.
record.insert(1, '-') # 海外/国内
record.insert(2, '-') # 会社名/組織名
record.insert(3, '-') # カテゴリ
record.insert(4, df_local['fqdn'][0]) # FQDN.
record.insert(5, df_local['origin_url'][0]) # トップURL
record.insert(6, '-') # ソース
record.insert(7, df_local['ip_addr'][0]) # FQDN.
origin_url_ip = (df_local['origin_url'][0]).replace(df_local['fqdn'][0], df_local['ip_addr'][0], 1)
record.insert(8, origin_url_ip) # トップURL.
# Check login form.
if self.check_login_form(df_local):
record.insert(9, '有')
else:
record.insert(9, '-')
# Check Basic authentication.
if self.check_basic_auth(df_local):
record.insert(10, '有')
else:
record.insert(10, '-')
record.insert(11, '-') # 開発/本番
record.insert(12, df_local['cloud_type'][0]) # クラウド
# Check CMS product.
cms_info = list(map(list, set(map(tuple, self.check_cms(df_local)))))
if len(cms_info) != 0:
cms_product = []
cms_manage_page = []
for cms in cms_info:
cms_product.append(cms[0])
cms_manage_page.append(cms[1])
record.insert(13, '\n'.join(cms_product)) # CMS 製品名
record.insert(14, '\n'.join(cms_manage_page)) # 管理画面
else:
record.insert(13, '-')
record.insert(14, '-')
# Check unnecessary contents.
record.insert(15, '\n'.join(self.check_unnecessary_content(df_local)))
record.insert(16, '-') # TODO:ディレクトリ一覧の表示
# Unnecessary comment and error message.
un_comment, error_msg = self.check_comment_error(df_local)
record.insert(17, '\n'.join(error_msg))
record.insert(18, '\n'.join(un_comment))
# Check products.
require_list, other_list = self.check_require_prduct(df_local)
for idx in range(len(require_list)):
if idx == 0: # Apache
self.set_require_prod(idx, 19, require_list, record)
elif idx == 1: # PHP
self.set_require_prod(idx, 20, require_list, record)
elif idx == 2: # OpenSSL
self.set_require_prod(idx, 21, require_list, record)
elif idx == 3: # nginx
self.set_require_prod(idx, 22, require_list, record)
elif idx == 4: # IIS
self.set_require_prod(idx, 23, require_list, record)
elif idx == 5: # .NET
self.set_require_prod(idx, 24, require_list, record)
elif idx == 6: # MVC
self.set_require_prod(idx, 25, require_list, record)
elif idx == 7: # WordPress
self.set_require_prod(idx, 26, require_list, record)
# Other products.
if len(other_list) != 0:
record.insert(27, '\n'.join(other_list))
else:
record.insert(27, '-')
# Note.
record.insert(28, '-')
return record
# Set requirement product.
def set_require_prod(self, prod_idx, rec_idx, require_list, record):
if require_list[prod_idx][0]:
if len(require_list[prod_idx][1]) != 0:
record.insert(rec_idx, '\n'.join(require_list[prod_idx][1]))
else:
record.insert(rec_idx, '○')
else:
record.insert(rec_idx, '-')
# Check login form.
def check_login_form(self, df_local):
df_login = df_local[df_local['origin_login'] != 'Log : - %\nUrl : 0.0 %']
if len(df_login) != 0:
return True
else:
return False
# Check Basic authentication.
def check_basic_auth(self, df_local):
is_basic_auth = False
for log_path in df_local['log']:
with codecs.open(log_path, 'r', encoding='utf-8') as fin:
log_file = fin.read()
obj_match = re.search(self.basic_regex, log_file, flags=re.IGNORECASE)
if obj_match is not None:
is_basic_auth = True
break
obj_match = re.search(self.basic_proxy_regex, log_file, flags=re.IGNORECASE)
if obj_match is not None:
is_basic_auth = True
break
return is_basic_auth
# Check CMS.
def check_cms(self, df_local):
cms_info = []
df_cms = df_local[df_local['prod_type'] == 'CMS']
if len(df_cms) != 0:
for idx, cms_record in df_cms.iterrows():
local_record = []
local_record.insert(0, cms_record['prod_name'] + '/' + cms_record['prod_version'])
if 'Url : 100%' in cms_record['origin_login']:
local_record.insert(1, cms_record['url'])
else:
local_record.insert(1, '-')
cms_info.append(local_record)
return cms_info
# Check unnecessary contents.
def check_unnecessary_content(self, df_local):
un_contents = df_local[(df_local['method'] == 'Direct') | (df_local['method'] == 'Search')]['url']
return list(set(un_contents))
# Check unnecessary comments and error messages.
def check_comment_error(self, df_local):
comments = list(set(df_local['wrong_comment']))
error_msg = list(set(df_local['error_msg']))
return [s for s in comments if s != '-'], [s for s in error_msg if s != '-']
# Check require products.
def check_require_prduct(self, df_local):
# Apache, PHP, OpenSSL, nginx, IIS, ASP.NET, WordPress.
require_list = {0: [False, []], 1: [False, []], 2: [False, []], 3: [False, []],
4: [False, []], 5: [False, []], 6: [False, []], 7: [False, []]}
# Other products.
other_list = []
# Check Requirement products.
for idx, target_product in enumerate(self.require_prduct):
target_item = target_product.split('@')
df_selected_record = df_local[(df_local['vendor_name'] == target_item[0]) &
(df_local['prod_name'] == target_item[1])]
version_list = []
if len(df_selected_record) != 0:
require_list[idx][0] = True
for pd_idx, record in df_selected_record.iterrows():
if record['prod_version'] != '*':
version_list.append('"' + str(record['prod_version']) + '"')
require_list[idx][1].extend(list(set(version_list)))
# Check other products.
df_rec = df_local[~((df_local['vendor_name'] == 'apache') & (df_local['prod_name'] == 'http_server')) &
~((df_local['vendor_name'] == 'php') & (df_local['prod_name'] == 'php')) &
~((df_local['vendor_name'] == 'openssl') & (df_local['prod_name'] == 'openssl')) &
~((df_local['vendor_name'] == 'nginx') & (df_local['prod_name'] == 'nginx')) &
~((df_local['vendor_name'] == 'microsoft') & (df_local['prod_name'] == 'internet_information_server')) &
~((df_local['vendor_name'] == 'microsoft') & (df_local['prod_name'] == 'asp.net')) &
~((df_local['vendor_name'] == 'microsoft') & (df_local['prod_name'] == 'mvc')) &
~((df_local['vendor_name'] == 'wordpress') & (df_local['prod_name'] == 'wordpress'))]
if len(df_rec) != 0:
for other_idx, record in df_rec.iterrows():
if record['prod_name'] != '-':
other_list.append(record['vendor_name'] + ' ' + record['prod_name'] + '/' + record['prod_version'])
return require_list, list(set(other_list))
# main.
if __name__ == '__main__':
merge = MergeReport(Utilty())
# Create report header.
merge.create_report_header()
# Merge report.
merge.get_target_report()
print('finish!!')
| 43.70979
| 130
| 0.553476
|
c79f278fc5d1bc6610f0866272d80b643d665158
| 1,531
|
py
|
Python
|
SoundOff-seperate_files_classes/Testing/interface_testing.py
|
DuquesneS22CPMA536/SoundOff
|
f36dd4be13abe964a1a0407515fe29afe5852e3b
|
[
"MIT"
] | null | null | null |
SoundOff-seperate_files_classes/Testing/interface_testing.py
|
DuquesneS22CPMA536/SoundOff
|
f36dd4be13abe964a1a0407515fe29afe5852e3b
|
[
"MIT"
] | 48
|
2022-03-17T18:07:59.000Z
|
2022-03-31T18:52:40.000Z
|
SoundOff-seperate_files_classes/Testing/interface_testing.py
|
DuquesneS22CPMA536/SoundOff
|
f36dd4be13abe964a1a0407515fe29afe5852e3b
|
[
"MIT"
] | null | null | null |
# Using pyautogui to go through a simple run of using the app on Gabbie's computer to compare a file to all platforms, LUFS and peak
# all coordinates were found using an example program from https://pyautogui.readthedocs.io/en/latest/mouse.html
import pyautogui as gui, time
screenWidth, screenHeight = gui.size() # get screen size
# opening and maximizing SoundOff_Application folder in file explorer
gui.moveTo(33, 1058)
gui.click()
gui.typewrite('SoundOff_Application', interval=0.25)
gui.press('enter')
time.sleep(2)
gui.keyDown('alt')
gui.press(' ')
gui.press('x')
gui.keyUp('alt')
# opening and maximizing SoundOff
gui.moveTo(352, 261) # coordinates of SoundOff.exe when folder is maximized
gui.doubleClick()
time.sleep(10) # waiting 10 seconds for SoundOff window to open
gui.moveTo(470, 543) # switching active window to SoundOff window
gui.click()
time.sleep(2)
gui.keyDown('alt')
gui.press(' ')
gui.press('x')
gui.keyUp('alt')
# selecting a file
gui.moveTo(990, 389) # coordinates of "Select a file" button when SoundOff window is maximized
gui.click()
time.sleep(2)
gui.moveTo(100, 478) # coordinates of folder with file when file explorer is opened
gui.click()
time.sleep(2)
gui.moveTo(379, 287) # coordinates of file within folder when file explorer is opened
gui.doubleClick()
time.sleep(10) # waiting 10 seconds to calculate and display "Select Report" window
# selecting all platforms
gui.moveTo(96, 723) # coordinates of "Select All Platforms" button
gui.doubleClick()
| 36.452381
| 133
| 0.744611
|
c1c1d45fa6f7b21219892043ec9cd24f2dee886f
| 488
|
py
|
Python
|
tests/integration/test_scroll_to_element.py
|
KalinkinaMaria/selene
|
859e1102c85740b52af8d0f08dd6b6490b4bd2ff
|
[
"MIT"
] | null | null | null |
tests/integration/test_scroll_to_element.py
|
KalinkinaMaria/selene
|
859e1102c85740b52af8d0f08dd6b6490b4bd2ff
|
[
"MIT"
] | 1
|
2021-06-02T04:21:17.000Z
|
2021-06-02T04:21:17.000Z
|
tests/integration/test_scroll_to_element.py
|
vkarpenko/selene
|
4776357430c940be38f38be9981006dd156f9730
|
[
"MIT"
] | null | null | null |
import logging
import os
from selene import config
from selene.browser import open_url, driver
from selene.support.jquery_style_selectors import s
start_page = 'file://' + os.path.abspath(os.path.dirname(__file__)) + '/../resources/start_page.html'
def setup_module(m):
config.browser_name = "chrome"
def test_can_scroll_to():
open_url(start_page)
logging.warning(driver().current_url)
driver().set_window_size(300, 400)
s("#invisible_link").scroll_to().click()
| 24.4
| 101
| 0.743852
|
c390c33eec2a0bba1a490400cf744ae92060ac32
| 2,317
|
py
|
Python
|
cmake/scripts/convert.py
|
RavenX8/osirosenew
|
b4ef1aade379e0eb4753b24c30ec43faca77aa37
|
[
"Apache-2.0"
] | 49
|
2017-02-07T15:10:04.000Z
|
2021-11-12T06:00:30.000Z
|
cmake/scripts/convert.py
|
RavenX8/osirosenew
|
b4ef1aade379e0eb4753b24c30ec43faca77aa37
|
[
"Apache-2.0"
] | 104
|
2016-10-31T01:42:59.000Z
|
2021-08-28T13:29:09.000Z
|
cmake/scripts/convert.py
|
RavenX8/osirosenew
|
b4ef1aade379e0eb4753b24c30ec43faca77aa37
|
[
"Apache-2.0"
] | 36
|
2016-11-01T11:25:34.000Z
|
2022-03-09T22:38:51.000Z
|
#!/usr/bin/env python
# coding: utf-8
# originally from:
# http://www.warp1337.com/content/how-use-ctest-jenkins-xunit-or-junit-plugin
# improved by:
# Jorge Araya Navarro <elcorreo@deshackra.com>
# Veni, Sancte Spiritus.
from lxml import etree
import argparse
from os.path import expanduser
from os.path import join
import logging
# configure logging
logging.basicConfig(format="%(levelname)s: %(message)s",
level=logging.ERROR)
desc = ("Converts ctest XML file to xUnit/JUnit XML "
"compatible file to use with Jenkins-CI. "
"Did you found any bug? please report it on: "
"https://bitbucket.org/shackra/ctest-jenkins/issues")
# configure argument parser.
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-x", "--xslt", help="the XSLT file to use", required=True)
parser.add_argument("-t", "--tag", help=("the directory where 'Testing/TAG'"
"file is. Remember to call ctest with"
" '-T test' option to generate it"),
required=True)
parsed = parser.parse_args()
# expanding user symbol "~"
parsed.xsl = expanduser(parsed.xslt)
parsed.tag = expanduser(parsed.tag)
# opening the TAG file
directory = None
try:
with open(join(parsed.tag, "Testing", "TAG")) as tagfile:
directory = tagfile.readline().strip()
except NotADirectoryError:
logging.error(
"'Testing/TAG' wasn't found on directory '{}'.".format(parsed.tag))
exit(1)
except FileNotFoundError:
logging.error(
"File '{}' not found.".format(join(parsed.tag, "Testing", "TAG")))
exit(1)
xmldoc = None
transform = None
try:
with open(join(parsed.tag, "Testing", directory, "Test.xml"))\
as testxmlfile:
xmldoc = etree.parse(testxmlfile)
except FileNotFoundError:
logging.error("File {} not found. Was it deleted or moved?".format(
join(parsed.tag, "Testing", directory, "Test.xml")))
exit(1)
try:
with open(parsed.xslt) as xsltfile:
xslt_root = etree.XML(xsltfile.read())
transform = etree.XSLT(xslt_root)
except FileNotFoundError:
logging.error("File {} not found.".format(parsed.xslt))
exit(1)
result_tree = transform(xmldoc)
print(result_tree)
| 30.893333
| 79
| 0.649115
|
33cdd692e161c973c063793c401294425feaded4
| 3,595
|
py
|
Python
|
fuze/angr-modified/concretization_strategies/__init__.py
|
Xelentra1/Linux_kernel_exploits
|
5b52fb0fe5210bb9103c57158fe5c4016ca06105
|
[
"MIT"
] | 444
|
2018-05-13T00:16:03.000Z
|
2022-03-30T01:31:50.000Z
|
fuze/angr-modified/concretization_strategies/__init__.py
|
Xelentra1/Linux_kernel_exploits
|
5b52fb0fe5210bb9103c57158fe5c4016ca06105
|
[
"MIT"
] | 6
|
2019-04-19T00:20:16.000Z
|
2022-01-19T02:04:00.000Z
|
fuze/angr-modified/concretization_strategies/__init__.py
|
Xelentra1/Linux_kernel_exploits
|
5b52fb0fe5210bb9103c57158fe5c4016ca06105
|
[
"MIT"
] | 97
|
2018-07-23T02:44:29.000Z
|
2022-03-31T10:18:07.000Z
|
class SimConcretizationStrategy(object):
"""
Concretization strategies control the resolution of symbolic memory indices
in SimuVEX. By subclassing this class and setting it as a concretization strategy
(on state.memory.read_strategies and state.memory.write_strategies), SimuVEX's
memory index concretization behavior can be modified.
"""
def __init__(self, filter=None, exact=True): #pylint:disable=redefined-builtin
"""
Initializes the base SimConcretizationStrategy.
:param filter: A function, taking arguments of (SimMemory, claripy.AST) that determins
if this strategy can handle resolving the provided AST.
:param exact: A flag (default: True) that determines if the convenience resolution
functions provided by this class use exact or approximate resolution.
"""
self._exact = exact
self._filter = filter
def _min(self, memory, addr, **kwargs):
"""
Gets the minimum solution of an address.
"""
return memory.state.se.min(addr, exact=kwargs.pop('exact', self._exact), **kwargs)
def _max(self, memory, addr, **kwargs):
"""
Gets the maximum solution of an address.
"""
return memory.state.se.max(addr, exact=kwargs.pop('exact', self._exact), **kwargs)
def _any(self, memory, addr, **kwargs):
"""
Gets any solution of an address.
"""
return memory.state.se.eval(addr, exact=kwargs.pop('exact', self._exact), **kwargs)
def _eval(self, memory, addr, n, **kwargs):
"""
Gets n solutions for an address.
"""
return memory.state.se.eval_upto(addr, n, exact=kwargs.pop('exact', self._exact), **kwargs)
def _range(self, memory, addr, **kwargs):
"""
Gets the (min, max) range of solutions for an address.
"""
return (self._min(memory, addr, **kwargs), self._max(memory, addr, **kwargs))
def concretize(self, memory, addr):
"""
Concretizes the address into a list of values.
If this strategy cannot handle this address, returns None.
"""
if self._filter is None or self._filter(memory, addr):
return self._concretize(memory, addr)
def _concretize(self, memory, addr):
"""
Should be implemented by child classes to handle concretization.
"""
raise NotImplementedError()
def copy(self):
"""
Returns a copy of the strategy, if there is data that should be kept separate between
states. If not, returns self.
"""
return self
def merge(self, others):
"""
Merges this strategy with others (if there is data that should be kept separate between
states. If not, is a no-op.
"""
pass
from .any import SimConcretizationStrategyAny
from .controlled_data import SimConcretizationStrategyControlledData
from .eval import SimConcretizationStrategyEval
from .max import SimConcretizationStrategyMax
from .nonzero import SimConcretizationStrategyNonzero
from .nonzero_range import SimConcretizationStrategyNonzeroRange
from .norepeats import SimConcretizationStrategyNorepeats
from .norepeats_range import SimConcretizationStrategyNorepeatsRange
from .range import SimConcretizationStrategyRange
from .single import SimConcretizationStrategySingle
from .solutions import SimConcretizationStrategySolutions
from .kuafffp import SimConcretizationStrategyKuafffp
from .kuafffp2 import SimConcretizationStrategyKuafffp2
| 39.076087
| 99
| 0.679277
|
fa8a217c77baab4068a7bb5b5460a98255fa7a26
| 16,109
|
py
|
Python
|
twilio/rest/api/v2010/account/recording/add_on_result/payload/__init__.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 1,362
|
2015-01-04T10:25:18.000Z
|
2022-03-24T10:07:08.000Z
|
twilio/rest/api/v2010/account/recording/add_on_result/payload/__init__.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 299
|
2015-01-30T09:52:39.000Z
|
2022-03-31T23:03:02.000Z
|
env/lib/python3.9/site-packages/twilio/rest/api/v2010/account/recording/add_on_result/payload/__init__.py
|
giannicrivello/AudioShack_BE
|
b50ba91b6904ac069fc37c98a691729932297b6a
|
[
"MIT"
] | 622
|
2015-01-03T04:43:09.000Z
|
2022-03-29T14:11:00.000Z
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class PayloadList(ListResource):
def __init__(self, version, account_sid, reference_sid, add_on_result_sid):
"""
Initialize the PayloadList
:param Version version: Version that contains the resource
:param account_sid: The SID of the Account that created the resource
:param reference_sid: The SID of the recording to which the AddOnResult resource that contains the payload belongs
:param add_on_result_sid: The SID of the AddOnResult to which the payload belongs
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadList
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadList
"""
super(PayloadList, self).__init__(version)
# Path Solution
self._solution = {
'account_sid': account_sid,
'reference_sid': reference_sid,
'add_on_result_sid': add_on_result_sid,
}
self._uri = '/Accounts/{account_sid}/Recordings/{reference_sid}/AddOnResults/{add_on_result_sid}/Payloads.json'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams PayloadInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists PayloadInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of PayloadInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return PayloadPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of PayloadInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return PayloadPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a PayloadContext
:param sid: The unique string that identifies the resource to fetch
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
"""
return PayloadContext(
self._version,
account_sid=self._solution['account_sid'],
reference_sid=self._solution['reference_sid'],
add_on_result_sid=self._solution['add_on_result_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a PayloadContext
:param sid: The unique string that identifies the resource to fetch
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
"""
return PayloadContext(
self._version,
account_sid=self._solution['account_sid'],
reference_sid=self._solution['reference_sid'],
add_on_result_sid=self._solution['add_on_result_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.PayloadList>'
class PayloadPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the PayloadPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The SID of the Account that created the resource
:param reference_sid: The SID of the recording to which the AddOnResult resource that contains the payload belongs
:param add_on_result_sid: The SID of the AddOnResult to which the payload belongs
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadPage
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadPage
"""
super(PayloadPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of PayloadInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance
"""
return PayloadInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
reference_sid=self._solution['reference_sid'],
add_on_result_sid=self._solution['add_on_result_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.PayloadPage>'
class PayloadContext(InstanceContext):
def __init__(self, version, account_sid, reference_sid, add_on_result_sid, sid):
"""
Initialize the PayloadContext
:param Version version: Version that contains the resource
:param account_sid: The SID of the Account that created the resource to fetch
:param reference_sid: The SID of the recording to which the AddOnResult resource that contains the payload to fetch belongs
:param add_on_result_sid: The SID of the AddOnResult to which the payload to fetch belongs
:param sid: The unique string that identifies the resource to fetch
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
"""
super(PayloadContext, self).__init__(version)
# Path Solution
self._solution = {
'account_sid': account_sid,
'reference_sid': reference_sid,
'add_on_result_sid': add_on_result_sid,
'sid': sid,
}
self._uri = '/Accounts/{account_sid}/Recordings/{reference_sid}/AddOnResults/{add_on_result_sid}/Payloads/{sid}.json'.format(**self._solution)
def fetch(self):
"""
Fetch the PayloadInstance
:returns: The fetched PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return PayloadInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
reference_sid=self._solution['reference_sid'],
add_on_result_sid=self._solution['add_on_result_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the PayloadInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.PayloadContext {}>'.format(context)
class PayloadInstance(InstanceResource):
def __init__(self, version, payload, account_sid, reference_sid,
add_on_result_sid, sid=None):
"""
Initialize the PayloadInstance
:returns: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance
"""
super(PayloadInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'add_on_result_sid': payload.get('add_on_result_sid'),
'account_sid': payload.get('account_sid'),
'label': payload.get('label'),
'add_on_sid': payload.get('add_on_sid'),
'add_on_configuration_sid': payload.get('add_on_configuration_sid'),
'content_type': payload.get('content_type'),
'date_created': deserialize.rfc2822_datetime(payload.get('date_created')),
'date_updated': deserialize.rfc2822_datetime(payload.get('date_updated')),
'reference_sid': payload.get('reference_sid'),
'subresource_uris': payload.get('subresource_uris'),
}
# Context
self._context = None
self._solution = {
'account_sid': account_sid,
'reference_sid': reference_sid,
'add_on_result_sid': add_on_result_sid,
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: PayloadContext for this PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadContext
"""
if self._context is None:
self._context = PayloadContext(
self._version,
account_sid=self._solution['account_sid'],
reference_sid=self._solution['reference_sid'],
add_on_result_sid=self._solution['add_on_result_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def add_on_result_sid(self):
"""
:returns: The SID of the AddOnResult to which the payload belongs
:rtype: unicode
"""
return self._properties['add_on_result_sid']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def label(self):
"""
:returns: The string that describes the payload
:rtype: unicode
"""
return self._properties['label']
@property
def add_on_sid(self):
"""
:returns: The SID of the Add-on to which the result belongs
:rtype: unicode
"""
return self._properties['add_on_sid']
@property
def add_on_configuration_sid(self):
"""
:returns: The SID of the Add-on configuration
:rtype: unicode
"""
return self._properties['add_on_configuration_sid']
@property
def content_type(self):
"""
:returns: The MIME type of the payload
:rtype: unicode
"""
return self._properties['content_type']
@property
def date_created(self):
"""
:returns: The RFC 2822 date and time in GMT that the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The RFC 2822 date and time in GMT that the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def reference_sid(self):
"""
:returns: The SID of the recording to which the AddOnResult resource that contains the payload belongs
:rtype: unicode
"""
return self._properties['reference_sid']
@property
def subresource_uris(self):
"""
:returns: A list of related resources identified by their relative URIs
:rtype: unicode
"""
return self._properties['subresource_uris']
def fetch(self):
"""
Fetch the PayloadInstance
:returns: The fetched PayloadInstance
:rtype: twilio.rest.api.v2010.account.recording.add_on_result.payload.PayloadInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the PayloadInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.PayloadInstance {}>'.format(context)
| 36.363431
| 150
| 0.638711
|
a8b55a9d94fe23f767f45db6a7bca99528ab3c34
| 2,848
|
py
|
Python
|
src/oci/blockchain/models/peer_summary.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/blockchain/models/peer_summary.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/blockchain/models/peer_summary.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class PeerSummary(object):
"""
Peer summary information for returning in a list.
"""
def __init__(self, **kwargs):
"""
Initializes a new PeerSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param peer_key:
The value to assign to the peer_key property of this PeerSummary.
:type peer_key: str
:param lifecycle_state:
The value to assign to the lifecycle_state property of this PeerSummary.
:type lifecycle_state: str
"""
self.swagger_types = {
'peer_key': 'str',
'lifecycle_state': 'str'
}
self.attribute_map = {
'peer_key': 'peerKey',
'lifecycle_state': 'lifecycleState'
}
self._peer_key = None
self._lifecycle_state = None
@property
def peer_key(self):
"""
Gets the peer_key of this PeerSummary.
Peer identifier
:return: The peer_key of this PeerSummary.
:rtype: str
"""
return self._peer_key
@peer_key.setter
def peer_key(self, peer_key):
"""
Sets the peer_key of this PeerSummary.
Peer identifier
:param peer_key: The peer_key of this PeerSummary.
:type: str
"""
self._peer_key = peer_key
@property
def lifecycle_state(self):
"""
Gets the lifecycle_state of this PeerSummary.
The current state of the peer.
:return: The lifecycle_state of this PeerSummary.
:rtype: str
"""
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
"""
Sets the lifecycle_state of this PeerSummary.
The current state of the peer.
:param lifecycle_state: The lifecycle_state of this PeerSummary.
:type: str
"""
self._lifecycle_state = lifecycle_state
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 27.921569
| 245
| 0.636938
|
123829feb5acb422658c566e80a68e9d65016264
| 1,097
|
py
|
Python
|
hard-gists/3181989/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 21
|
2019-07-08T08:26:45.000Z
|
2022-01-24T23:53:25.000Z
|
hard-gists/3181989/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 5
|
2019-06-15T14:47:47.000Z
|
2022-02-26T05:02:56.000Z
|
hard-gists/3181989/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 17
|
2019-05-16T03:50:34.000Z
|
2021-01-14T14:35:12.000Z
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import sys
import StringIO
import requests
import PIL.Image
import tesserwrap
#: https://github.com/gregjurman/tesserwrap
tesseract = tesserwrap.tesseract()
def distinguish_captcha(image_url, show_origin_image=True):
#: preprocess
image_bytes = requests.get(image_url).content
origin_image = PIL.Image.open(StringIO.StringIO(image_bytes))
image = origin_image.point(lambda p: p * 1.5)\
.point(lambda p: 255 if p > 200 else 0)\
.convert("1")
#: distinguish the text
text = tesseract.ocr_image(image)
#: show the origin image
if show_origin_image:
origin_image.show()
return text.strip()
def main():
url = raw_input("Please input the url of captcha:\n > ").strip()
print >> sys.stderr, ">>> Press Ctrl + C to stop."
print >> sys.stderr, ">>> Press any key to continue."
while True:
raw_input()
print distinguish_captcha(url)
if __name__ == "__main__":
try:
print main()
except KeyboardInterrupt:
print >> sys.stderr, ">>> Exit."
| 24.377778
| 68
| 0.652689
|
913e65e98f353c782d908b60b3a496b5e11de0c0
| 2,706
|
py
|
Python
|
fserver/util.py
|
Carrotor116/fserver
|
86934023fb080088854b4bae7867ad3667561111
|
[
"MIT"
] | 1
|
2020-08-06T01:25:14.000Z
|
2020-08-06T01:25:14.000Z
|
fserver/util.py
|
Carrotor116/fserver
|
86934023fb080088854b4bae7867ad3667561111
|
[
"MIT"
] | 1
|
2020-07-26T09:27:01.000Z
|
2020-07-26T09:27:01.000Z
|
fserver/util.py
|
Carrotor116/fserver
|
86934023fb080088854b4bae7867ad3667561111
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import re
import sys
from fserver import conf
def debug(*args):
if conf.DEBUG:
pretty_print(sys.stdout, 'debug', *args)
def warning(*args):
pretty_print(sys.stderr, 'warning', *args)
def pretty_print(file, level, *args):
level = level.upper()
msg = ' '.join(str(_) for _ in args)
msg = msg.replace('\n', '{}: '.format(level))
print('{}: '.format(level) + msg, file=file)
def _get_ip_v4_ipconfig():
ips = set()
try:
ip_cmd = os.popen('ipconfig 2>&1').read().split('\n')
for line in ip_cmd:
if 'ip' not in line.lower():
continue
[ips.add(i) for i in line.replace('\r', '').split(' ') # filter ip mask
if is_ip_v4(i) and not i.startswith('255') and not i.endswith('.0')]
# [ips.append(s[s.index(':') + 2:]) for s in ip_cmd if 'ipv4' in s.lower()]
ips.add('127.0.0.1')
except Exception as e:
debug(e)
return ips
def _get_ip_v4_ifconfig():
ips = set()
sh = r"""ifconfig 2>&1 | \
awk -F '[ :]' 'BEGIN{print "succeed"}/inet /{ for (i=1;i<=NF;i++){ if ($i~/[0-9]\./) {print $i;break }} }' 2>&1 """
try:
ip_cmd = os.popen(sh).read()
if 'succeed' in ip_cmd:
[ips.add(i) for i in ip_cmd.split('\n') if i != '' and i != 'succeed']
ips.add('127.0.0.1')
except Exception as e:
debug(e)
return ips
def _get_ip_v4_ip_add():
ips = set()
sh = r"""ip -4 add 2>&1 |awk 'BEGIN{print "succeed"} $2 ~/^[0-9]+\./ {print $2}' | awk -F/ '{print $1}'"""
try:
ip_cmd = os.popen(sh).read()
if 'succeed' in ip_cmd:
[ips.add(i) for i in ip_cmd.split('\n') if i != '' and i != 'succeed']
ips.add('127.0.0.1')
except Exception as e:
debug(e)
return ips
def get_ip_v4():
if os.name == 'nt':
ips = _get_ip_v4_ipconfig()
elif os.name == 'posix':
ips = _get_ip_v4_ip_add() | _get_ip_v4_ifconfig()
else:
raise RuntimeError('un support os: {}'.format(os.name))
for ip in [i for i in ips]:
if ip.startswith('169.254.'):
ips.remove(ip)
return ips
def is_ip_v4(string):
r = re.match(r'((?:(?:25[0-5]|2[0-4]\d|(?:1\d{2}|[1-9]?\d))\.){3}(?:25[0-5]|2[0-4]\d|(?:1\d{2}|[1-9]?\d)))', string)
if r is not None and r.span()[1] == len(string):
return True
else:
return False
if __name__ == '__main__':
print(_get_ip_v4_ipconfig())
print(_get_ip_v4_ip_add())
print(_get_ip_v4_ifconfig())
print(is_ip_v4('127.1.1.1'))
print(is_ip_v4('127.a.1.1'))
print(is_ip_v4('0.0.0.0'))
| 27.06
| 120
| 0.539542
|
75c936e7858d5f7d0e582b87aebbbc08107d9123
| 7,899
|
py
|
Python
|
projects/tutorials/running_inference_tutorial.py
|
Aakash-Roy/allenact
|
3da8c72bcd32e33036c6b6ea32010c6b391be757
|
[
"MIT"
] | null | null | null |
projects/tutorials/running_inference_tutorial.py
|
Aakash-Roy/allenact
|
3da8c72bcd32e33036c6b6ea32010c6b391be757
|
[
"MIT"
] | null | null | null |
projects/tutorials/running_inference_tutorial.py
|
Aakash-Roy/allenact
|
3da8c72bcd32e33036c6b6ea32010c6b391be757
|
[
"MIT"
] | null | null | null |
# literate: tutorials/running-inference-on-a-pretrained-model.md
# %%
"""# Tutorial: Inference with a pre-trained model."""
# %%
"""
In this tutorial we will run inference on a pre-trained model for the PointNav task
in the RoboTHOR environment. In this task the agent is tasked with going to a specific location
within a realistic 3D environment.
For information on how to train a PointNav Model see [this tutorial](training-a-pointnav-model.md)
We will need to [install the full AllenAct library](../installation/installation-allenact.md#full-library),
the `robothor_plugin` requirements via
```bash
pip install -r allenact_plugins/robothor_plugin/extra_requirements.txt
```
and [download the
RoboTHOR Pointnav dataset](../installation/download-datasets.md) before we get started.
For this tutorial we will download the weights of a model trained on the debug dataset.
This can be done with a handy script in the `pretrained_model_ckpts` directory:
```bash
bash pretrained_model_ckpts/download_navigation_model_ckpts.sh robothor-pointnav-rgb-resnet
```
This will download the weights for an RGB model that has been
trained on the PointNav task in RoboTHOR to `pretrained_model_ckpts/robothor-pointnav-rgb-resnet`
Next we need to run the inference, using the PointNav experiment config from the
[tutorial on making a PointNav experiment](training-a-pointnav-model.md).
We can do this with the following command:
```bash
PYTHONPATH=. python allenact/main.py -o <PATH_TO_OUTPUT> -b <BASE_DIRECTORY_OF_YOUR_EXPERIMENT> -c <PATH_TO_CHECKPOINT> --eval
```
Where `<PATH_TO_OUTPUT>` is the location where the results of the test will be dumped, `<PATH_TO_CHECKPOINT>` is the
location of the downloaded model weights, and `<BASE_DIRECTORY_OF_YOUR_EXPERIMENT>` is a path to the directory where
our experiment definition is stored.
For our current setup the following command would work:
```bash
PYTHONPATH=. python allenact/main.py \
training_a_pointnav_model \
-o pretrained_model_ckpts/robothor-pointnav-rgb-resnet/ \
-b projects/tutorials \
-c pretrained_model_ckpts/robothor-pointnav-rgb-resnet/checkpoints/PointNavRobothorRGBPPO/2020-08-31_12-13-30/exp_PointNavRobothorRGBPPO__stage_00__steps_000039031200.pt \
--eval
```
For testing on all saved checkpoints we pass a directory to `--checkpoint` rather than just a single file:
```bash
PYTHONPATH=. python allenact/main.py \
training_a_pointnav_model \
-o pretrained_model_ckpts/robothor-pointnav-rgb-resnet/ \
-b projects/tutorials \
-c pretrained_model_ckpts/robothor-pointnav-rgb-resnet/checkpoints/PointNavRobothorRGBPPO/2020-08-31_12-13-30
--eval
```
## Visualization
We also show examples of visualizations that can be extracted from the `"valid"` and `"test"` modes. Currently,
visualization is still undergoing design changes and does not support multi-agent tasks, but the available functionality
is sufficient for pointnav in RoboThor.
Following up on the example above, we can make a specialized pontnav `ExperimentConfig` where we instantiate
the base visualization class, `VizSuite`, defined in
[`allenact.utils.viz_utils`](https://github.com/allenai/allenact/tree/master/allenact/utils/viz_utils.py), when in `test` mode.
Each visualization type can be thought of as a plugin to the base `VizSuite`. For example, all `episode_ids` passed to
`VizSuite` will be processed with each of the instantiated visualization types (possibly with the exception of the
`AgentViewViz`). In the example below we show how to instantiate different visualization types from 4 different data
sources.
The data sources available to `VizSuite` are:
* Task output (e.g. 2D trajectories)
* Vector task (e.g. egocentric views)
* Rollout storage (e.g. recurrent memory, taken action logprobs...)
* `ActorCriticOutput` (e.g. action probabilities)
The visualization types included below are:
* `TrajectoryViz`: Generic 2D trajectory view.
* `AgentViewViz`: RGB egocentric view.
* `ActorViz`: Action probabilities from `ActorCriticOutput[CategoricalDistr]`.
* `TensorViz1D`: Evolution of a point from RolloutStorage over time.
* `TensorViz2D`: Evolution of a vector from RolloutStorage over time.
* `ThorViz`: Specialized 2D trajectory view
[for RoboThor](https://github.com/allenai/allenact/tree/master/allenact_plugins/robothor_plugin/robothor_viz.py).
Note that we need to explicitly set the `episode_ids` that we wish to visualize. For `AgentViewViz` we have the option
of using a different (typically shorter) list of episodes or enforce the ones used for the rest of visualizations.
"""
# %% hide
from typing import Optional
from allenact.utils.viz_utils import (
VizSuite,
TrajectoryViz,
ActorViz,
AgentViewViz,
TensorViz1D,
TensorViz2D,
)
from allenact_plugins.robothor_plugin.robothor_viz import ThorViz
from projects.tutorials.training_a_pointnav_model import (
PointNavRoboThorRGBPPOExperimentConfig,
)
# %%
class PointNavRoboThorRGBPPOVizExperimentConfig(PointNavRoboThorRGBPPOExperimentConfig):
"""ExperimentConfig used to demonstrate how to set up visualization code.
# Attributes
viz_ep_ids : Scene names that will be visualized.
viz_video_ids : Scene names that will have videos visualizations associated with them.
"""
viz_ep_ids = [
"FloorPlan_Train1_1_3",
"FloorPlan_Train1_1_4",
"FloorPlan_Train1_1_5",
"FloorPlan_Train1_1_6",
]
viz_video_ids = [["FloorPlan_Train1_1_3"], ["FloorPlan_Train1_1_4"]]
viz: Optional[VizSuite] = None
def get_viz(self, mode):
if self.viz is not None:
return self.viz
self.viz = VizSuite(
episode_ids=self.viz_ep_ids,
mode=mode,
# Basic 2D trajectory visualizer (task output source):
base_trajectory=TrajectoryViz(
path_to_target_location=("task_info", "target",),
),
# Egocentric view visualizer (vector task source):
egeocentric=AgentViewViz(
max_video_length=100, episode_ids=self.viz_video_ids
),
# Default action probability visualizer (actor critic output source):
action_probs=ActorViz(figsize=(3.25, 10), fontsize=18),
# Default taken action logprob visualizer (rollout storage source):
taken_action_logprobs=TensorViz1D(),
# Same episode mask visualizer (rollout storage source):
episode_mask=TensorViz1D(rollout_source=("masks",)),
# Default recurrent memory visualizer (rollout storage source):
rnn_memory=TensorViz2D(rollout_source=("memory", "single_belief")),
# Specialized 2D trajectory visualizer (task output source):
thor_trajectory=ThorViz(
figsize=(16, 8),
viz_rows_cols=(448, 448),
scenes=("FloorPlan_Train{}_{}", 1, 1, 1, 1),
),
)
return self.viz
def machine_params(self, mode="train", **kwargs):
res = super().machine_params(mode, **kwargs)
if mode == "test":
res.set_visualizer(self.get_viz(mode))
return res
# %%
"""
Running test on the same downloaded models, but using the visualization-enabled `ExperimentConfig` with
```bash
PYTHONPATH=. python allenact/main.py \
running_inference_tutorial \
-o pretrained_model_ckpts/robothor-pointnav-rgb-resnet/ \
-b projects/tutorials \
-c pretrained_model_ckpts/robothor-pointnav-rgb-resnet/checkpoints/PointNavRobothorRGBPPO/2020-08-31_12-13-30/exp_PointNavRobothorRGBPPO__stage_00__steps_000039031200.pt \
--eval
```
generates different types of visualization and logs them in tensorboard. If everything is properly setup and
tensorboard includes the `robothor-pointnav-rgb-resnet` folder, under the `IMAGES` tab, we should see something similar
to

"""
| 39.893939
| 171
| 0.745284
|
a826c2c2a30ce8e8923d5ebda73945230a98309f
| 5,866
|
py
|
Python
|
model_vsrl4hico.py
|
thilinicooray/mac-network-pytorch
|
0e4bf3f7f301570b652490f697758361c866f3c1
|
[
"MIT"
] | null | null | null |
model_vsrl4hico.py
|
thilinicooray/mac-network-pytorch
|
0e4bf3f7f301570b652490f697758361c866f3c1
|
[
"MIT"
] | null | null | null |
model_vsrl4hico.py
|
thilinicooray/mac-network-pytorch
|
0e4bf3f7f301570b652490f697758361c866f3c1
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from attention import Attention, NewAttention
from language_model import WordEmbedding, QuestionEmbedding
from classifier import SimpleClassifier
from fc import FCNet
import torch.nn.functional as F
import torchvision as tv
import utils
import numpy as np
import model_verb_directcnn
import model_roles_recqa_noself
class vgg16_modified(nn.Module):
def __init__(self):
super(vgg16_modified, self).__init__()
vgg = tv.models.vgg16_bn(pretrained=True)
self.vgg_features = vgg.features
def rep_size(self):
return 1024
def base_size(self):
return 512
def forward(self,x):
#return self.dropout2(self.relu2(self.lin2(self.dropout1(self.relu1(self.lin1(self.vgg_features(x).view(-1, 512*7*7)))))))
features = self.vgg_features(x)
return features
class TopDown(nn.Module):
def __init__(self,
vocab_size,
embed_hidden=300,
mlp_hidden=512):
super(TopDown, self).__init__()
self.vocab_size = vocab_size
self.q_emb = nn.LSTM(embed_hidden, mlp_hidden,
batch_first=True, bidirectional=True)
self.lstm_proj = nn.Linear(mlp_hidden * 2, mlp_hidden)
self.verb_transform = nn.Linear(embed_hidden, mlp_hidden)
self.v_att = NewAttention(mlp_hidden, mlp_hidden, mlp_hidden)
'''self.q_net = FCNet([mlp_hidden, mlp_hidden])
self.v_net = FCNet([mlp_hidden, mlp_hidden])
self.classifier = SimpleClassifier(
mlp_hidden, 2 * mlp_hidden, self.vocab_size, 0.5)'''
self.classifier = nn.Sequential(
nn.Linear(mlp_hidden * 7 *7 + mlp_hidden, mlp_hidden*8),
nn.BatchNorm1d(mlp_hidden*8),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(mlp_hidden * 8, mlp_hidden*8),
nn.BatchNorm1d(mlp_hidden*8),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
)
def forward(self, img, q):
batch_size = img.size(0)
w_emb = q
self.q_emb.flatten_parameters()
lstm_out, (h, _) = self.q_emb(w_emb)
q_emb = h.permute(1, 0, 2).contiguous().view(batch_size, -1)
q_emb = self.lstm_proj(q_emb)
att = self.v_att(img, q_emb)
v_emb = (att * img)
v_emb = v_emb.permute(0, 2, 1)
v_emb = v_emb.contiguous().view(-1, 512*7*7)
v_emb_with_q = torch.cat([v_emb, q_emb], -1)
logits = self.classifier(v_emb_with_q)
return logits
class BaseModel(nn.Module):
def __init__(self, encoder,
gpu_mode,
embed_hidden=300,
mlp_hidden = 512
):
super(BaseModel, self).__init__()
self.normalize = tv.transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
#self.normalize = tv.transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
self.train_transform = tv.transforms.Compose([
tv.transforms.RandomRotation(10),
tv.transforms.RandomResizedCrop(224),
tv.transforms.RandomHorizontalFlip(),
tv.transforms.ToTensor(),
self.normalize,
])
self.dev_transform = tv.transforms.Compose([
tv.transforms.Resize(224),
tv.transforms.CenterCrop(224),
tv.transforms.ToTensor(),
self.normalize,
])
self.encoder = encoder
self.gpu_mode = gpu_mode
self.mlp_hidden = mlp_hidden
self.verbq_word_count = len(self.encoder.verb_q_words)
self.n_verbs = self.encoder.get_num_verbs()
self.verb_module = model_verb_directcnn.BaseModel(self.encoder, self.gpu_mode)
self.role_module = model_roles_recqa_noself.BaseModel(self.encoder, self.gpu_mode)
self.verb_module.eval()
self.role_module.eval()
self.conv = vgg16_modified()
'''for param in self.verb_module.parameters():
param.require_grad = False
for param in self.role_module.parameters():
param.require_grad = False
for param in self.conv.parameters():
param.require_grad = False'''
self.verb_vqa = TopDown(self.n_verbs)
self.verb_q_emb = nn.Embedding(self.verbq_word_count + 1, embed_hidden, padding_idx=self.verbq_word_count)
self.classifier = nn.Linear(self.mlp_hidden*8, 600)
def train_preprocess(self):
return self.train_transform
def dev_preprocess(self, ):
return self.dev_transform
def forward(self, img):
verb_pred_prev = self.verb_module(img)
sorted_idx = torch.sort(verb_pred_prev, 1, True)[1]
verbs = sorted_idx[:,0]
role_pred = self.role_module(img, verbs)
label_idx = torch.max(role_pred,-1)[1]
verb_q_idx = self.encoder.get_verbq_idx(verbs, label_idx)
if self.gpu_mode >= 0:
verb_q_idx = verb_q_idx.to(torch.device('cuda'))
img_embd = self.conv(img)
batch_size, n_channel, conv_h, conv_w = img_embd.size()
img_embd = img_embd.view(batch_size, n_channel, -1)
img_embd = img_embd.permute(0, 2, 1)
q_emb = self.verb_q_emb(verb_q_idx)
verb_pred_logit = self.verb_vqa(img_embd, q_emb)
verb_pred = self.classifier(verb_pred_logit)
return verb_pred
def calculate_loss(self, verb_pred, gt_verbs):
batch_size = verb_pred.size()[0]
loss = 0
#print('eval pred verbs :', pred_verbs)
for i in range(batch_size):
verb_loss = 0
verb_loss += utils.binary_cross_entropy(verb_pred[i], gt_verbs[i])
loss += verb_loss
final_loss = loss/batch_size
#print('loss final :', final_loss)
return final_loss
| 32.955056
| 130
| 0.620525
|
9a4b73e18784c648d5a663bc348ee884f9243c48
| 1,020
|
py
|
Python
|
myproject/myproject/urls.py
|
natchakorn19156/myrepo2
|
5113575d47745441c0df777c1ce51e450a0dd1c7
|
[
"BSD-2-Clause"
] | null | null | null |
myproject/myproject/urls.py
|
natchakorn19156/myrepo2
|
5113575d47745441c0df777c1ce51e450a0dd1c7
|
[
"BSD-2-Clause"
] | null | null | null |
myproject/myproject/urls.py
|
natchakorn19156/myrepo2
|
5113575d47745441c0df777c1ce51e450a0dd1c7
|
[
"BSD-2-Clause"
] | null | null | null |
"""myproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from myapp import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^current_datetime/', views.current_datetime, name='current_datetime'),
url(r'^list_products/',views.list_products,name = 'list_products'),
url(r'^products_detail/',views.products_detail,name = 'products_detail')
]
| 39.230769
| 80
| 0.716667
|
96c41becc4e4f3f51687918206e29dae2533126c
| 2,626
|
py
|
Python
|
tests/config/test_config.py
|
duruyi/pycasbin
|
a16bfaa669c37ac1598684e36b0319430ab749e5
|
[
"Apache-2.0"
] | null | null | null |
tests/config/test_config.py
|
duruyi/pycasbin
|
a16bfaa669c37ac1598684e36b0319430ab749e5
|
[
"Apache-2.0"
] | null | null | null |
tests/config/test_config.py
|
duruyi/pycasbin
|
a16bfaa669c37ac1598684e36b0319430ab749e5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The casbin Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from casbin.config import Config
from unittest import TestCase
class TestConfig(TestCase):
def test_new_config(self):
path = os.path.split(os.path.realpath(__file__))[0]
path = os.path.abspath(path + "/test.ini")
config = Config.new_config(path)
# default::key
self.assertEqual(config.get("debug"), "true")
self.assertEqual(config.get("url"), "act.wiki")
# reids::key
self.assertEqual(config.get("redis::redis.key"), "push1,push2")
self.assertEqual(config.get("mysql::mysql.dev.host"), "127.0.0.1")
self.assertEqual(config.get("mysql::mysql.master.host"), "10.0.0.1")
# math::key test
self.assertEqual(config.get("math::math.i64"), "64")
self.assertEqual(config.get("math::math.f64"), "64.1")
# other::key test
self.assertEqual(config.get("other::name"), "ATC自动化测试^-^&($#……#")
self.assertEqual(config.get("other::key1"), "test key")
config.set("other::key1", "new test key")
self.assertEqual(config.get("other::key1"), "new test key")
config.set("other::key1", "test key")
self.assertEqual(config.get("multi1::name"), "r.sub==p.sub && r.obj==p.obj")
self.assertEqual(config.get("multi2::name"), "r.sub==p.sub && r.obj==p.obj")
self.assertEqual(config.get("multi3::name"), "r.sub==p.sub && r.obj==p.obj")
self.assertEqual(config.get("multi4::name"), "")
self.assertEqual(config.get("multi5::name"), "r.sub==p.sub && r.obj==p.obj")
self.assertEqual(config.get_bool("multi5::name"), False)
self.assertEqual(
config.get_string("multi5::name"), "r.sub==p.sub && r.obj==p.obj"
)
self.assertEqual(
config.get_strings("multi5::name"), ["r.sub==p.sub && r.obj==p.obj"]
)
with self.assertRaises(ValueError):
config.get_int("multi5::name")
with self.assertRaises(ValueError):
config.get_float("multi5::name")
| 39.19403
| 84
| 0.634044
|
6cc9d1eac90c3bd64a5354287fb70d47d7de88a5
| 1,516
|
py
|
Python
|
xlsxwriter/test/comparison/test_chart_axis13.py
|
hugovk/XlsxWriter
|
e97cc66637d9895480ee32cfb5e561d652d3787b
|
[
"BSD-2-Clause"
] | null | null | null |
xlsxwriter/test/comparison/test_chart_axis13.py
|
hugovk/XlsxWriter
|
e97cc66637d9895480ee32cfb5e561d652d3787b
|
[
"BSD-2-Clause"
] | null | null | null |
xlsxwriter/test/comparison/test_chart_axis13.py
|
hugovk/XlsxWriter
|
e97cc66637d9895480ee32cfb5e561d652d3787b
|
[
"BSD-2-Clause"
] | null | null | null |
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_axis13.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'scatter'})
chart.axis_ids = [54045312, 54043776]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$B$1:$B$5',
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$C$1:$C$5',
})
chart.set_y_axis({'min': 0, 'max': 16})
chart.set_x_axis({'min': 0, 'max': 6})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| 24.451613
| 79
| 0.53628
|
cca8cc94fc2218c9952104210c727dd01a3f1a42
| 10,362
|
py
|
Python
|
tests/test_wallet.py
|
massanchik/monero-python
|
5699c26f6ba0a64f50ac065ebe0419daf01fd993
|
[
"BSD-3-Clause"
] | 3
|
2020-05-02T02:48:16.000Z
|
2021-06-15T16:50:11.000Z
|
tests/test_wallet.py
|
massanchik/monero-python
|
5699c26f6ba0a64f50ac065ebe0419daf01fd993
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_wallet.py
|
massanchik/monero-python
|
5699c26f6ba0a64f50ac065ebe0419daf01fd993
|
[
"BSD-3-Clause"
] | 1
|
2019-03-08T12:04:19.000Z
|
2019-03-08T12:04:19.000Z
|
from datetime import datetime
from decimal import Decimal
import unittest
import warnings
from monero.wallet import Wallet
from monero.account import Account
from monero.address import address
from monero.numbers import PaymentID
from monero.transaction import IncomingPayment, Transaction
class FiltersTestCase(unittest.TestCase):
def setUp(self):
class MockBackend(object):
def __init__(self):
self.transfers = []
tx = Transaction(
timestamp=datetime(2018, 1, 29, 15, 0, 25),
height=1087606,
hash='a0b876ebcf7c1d499712d84cedec836f9d50b608bb22d6cb49fd2feae3ffed14',
fee=Decimal('0.00352891'))
pm = IncomingPayment(
amount=Decimal('1'),
local_address=address('Bf6ngv7q2TBWup13nEm9AjZ36gLE6i4QCaZ7XScZUKDUeGbYEHmPRdegKGwLT8tBBK7P6L32RELNzCR6QzNFkmogDjvypyV'),
payment_id=PaymentID('0166d8da6c0045c51273dd65d6f63734beb8a84e0545a185b2cfd053fced9f5d'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 14, 57, 47),
height=1087601,
hash='f34b495cec77822a70f829ec8a5a7f1e727128d62e6b1438e9cb7799654d610e',
fee=Decimal('0.008661870000'))
pm = IncomingPayment(
amount=Decimal('3.000000000000'),
local_address=address('BhE3cQvB7VF2uuXcpXp28Wbadez6GgjypdRS1F1Mzqn8Advd6q8VfaX8ZoEDobjejrMfpHeNXoX8MjY8q8prW1PEALgr1En'),
payment_id=PaymentID('f75ad90e25d71a12'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 13, 17, 18),
height=1087530,
hash='5c3ab739346e9d98d38dc7b8d36a4b7b1e4b6a16276946485a69797dbf887cd8',
fee=Decimal('0.000962550000'))
pm = IncomingPayment(
amount=Decimal('10.000000000000'),
local_address=address('9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC'),
payment_id=PaymentID('f75ad90e25d71a12'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 13, 17, 18),
height=1087530,
hash='4ea70add5d0c7db33557551b15cd174972fcfc73bf0f6a6b47b7837564b708d3',
fee=Decimal('0.000962550000'))
pm = IncomingPayment(
amount=Decimal('4.000000000000'),
local_address=address('9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC'),
payment_id=PaymentID('f75ad90e25d71a12'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 13, 17, 18),
height=1087530,
hash='e9a71c01875bec20812f71d155bfabf42024fde3ec82475562b817dcc8cbf8dc',
fee=Decimal('0.000962550000'))
pm = IncomingPayment(
amount=Decimal('2.120000000000'),
local_address=address('9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC'),
payment_id=PaymentID('cb248105ea6a9189'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 14, 57, 47),
height=1087601,
hash='5ef7ead6a041101ed326568fbb59c128403cba46076c3f353cd110d969dac808',
fee=Decimal('0.000962430000'))
pm = IncomingPayment(
amount=Decimal('7.000000000000'),
local_address=address('BhE3cQvB7VF2uuXcpXp28Wbadez6GgjypdRS1F1Mzqn8Advd6q8VfaX8ZoEDobjejrMfpHeNXoX8MjY8q8prW1PEALgr1En'),
payment_id=PaymentID('0000000000000000'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 13, 17, 18),
height=1087530,
hash='cc44568337a186c2e1ccc080b43b4ae9db26a07b7afd7edeed60ce2fc4a6477f',
fee=Decimal('0.000962550000'))
pm = IncomingPayment(
amount=Decimal('10.000000000000'),
local_address=address('9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC'),
payment_id=PaymentID('0000000000000000'),
transaction=tx)
self.transfers.append(pm)
tx = Transaction(
timestamp=datetime(2018, 1, 29, 21, 13, 28),
height=None,
hash='d29264ad317e8fdb55ea04484c00420430c35be7b3fe6dd663f99aebf41a786c',
fee=Decimal('0.000961950000'))
pm = IncomingPayment(
amount=Decimal('3.140000000000'),
local_address=address('9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC'),
payment_id=PaymentID('03f6649304ea4cb2'),
transaction=tx)
self.transfers.append(pm)
def height(self):
return 1087607
def accounts(self):
return [Account(self, 0)]
def transfers_in(self, account, pmtfilter):
return list(pmtfilter.filter(self.transfers))
self.wallet = Wallet(MockBackend())
def test_filter_none(self):
pmts = self.wallet.incoming()
self.assertEqual(len(pmts), 7)
def test_filter_payment_id(self):
pmts = self.wallet.incoming(payment_id='cb248105ea6a9189')
self.assertEqual(len(pmts), 1)
self.assertEqual(
pmts[0].transaction.hash,
'e9a71c01875bec20812f71d155bfabf42024fde3ec82475562b817dcc8cbf8dc')
pmts = self.wallet.incoming(payment_id='f75ad90e25d71a12')
self.assertEqual(len(pmts), 3)
pmts = self.wallet.incoming(payment_id=('cb248105ea6a9189', 'f75ad90e25d71a12'))
self.assertEqual(len(pmts), 4)
self.assertEqual(
pmts,
self.wallet.incoming(payment_id=(PaymentID('cb248105ea6a9189'), 'f75ad90e25d71a12')))
def test_filter_address(self):
pmts = self.wallet.incoming(
local_address='BhE3cQvB7VF2uuXcpXp28Wbadez6GgjypdRS1F1Mzqn8Advd6q8VfaX8ZoEDobjejrMfpHeNXoX8MjY8q8prW1PEALgr1En')
self.assertEqual(len(pmts), 2)
self.assertEqual(
pmts,
self.wallet.incoming(
local_address=address('BhE3cQvB7VF2uuXcpXp28Wbadez6GgjypdRS1F1Mzqn8Advd6q8VfaX8ZoEDobjejrMfpHeNXoX8MjY8q8prW1PEALgr1En')))
pmts = self.wallet.incoming(
local_address=(
'BhE3cQvB7VF2uuXcpXp28Wbadez6GgjypdRS1F1Mzqn8Advd6q8VfaX8ZoEDobjejrMfpHeNXoX8MjY8q8prW1PEALgr1En',
'Bf6ngv7q2TBWup13nEm9AjZ36gLE6i4QCaZ7XScZUKDUeGbYEHmPRdegKGwLT8tBBK7P6L32RELNzCR6QzNFkmogDjvypyV'))
self.assertEqual(len(pmts), 3)
def test_filter_mempool(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
pmts = self.wallet.incoming()
self.assertEqual(len(pmts), 7)
for p in pmts:
self.assertGreater(self.wallet.confirmations(p.transaction), 0)
pmts = self.wallet.incoming(unconfirmed=True)
self.assertEqual(len(pmts), 8)
pmts = self.wallet.incoming(unconfirmed=True, confirmed=False)
self.assertEqual(len(pmts), 1)
self.assertEqual(
pmts[0].transaction.hash,
'd29264ad317e8fdb55ea04484c00420430c35be7b3fe6dd663f99aebf41a786c')
self.assertEqual(self.wallet.confirmations(pmts[0]), 0)
self.assertEqual(self.wallet.confirmations(pmts[0].transaction), 0)
self.assertEqual(len(w), 0)
pmts = self.wallet.incoming(unconfirmed=True, confirmed=False, min_height=1)
self.assertEqual(len(pmts), 0)
self.assertEqual(len(w), 1)
self.assertIs(w[0].category, RuntimeWarning)
pmts = self.wallet.incoming(unconfirmed=True, confirmed=False, max_height=99999999999999)
self.assertEqual(len(pmts), 0)
self.assertEqual(len(w), 2)
self.assertIs(w[1].category, RuntimeWarning)
pmts = self.wallet.incoming(payment_id='03f6649304ea4cb2')
self.assertEqual(len(pmts), 0)
pmts = self.wallet.incoming(unconfirmed=True, payment_id='03f6649304ea4cb2')
self.assertEqual(len(pmts), 1)
pmts = self.wallet.incoming(
local_address='9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC')
self.assertEqual(len(pmts), 4)
pmts = self.wallet.incoming(
unconfirmed=True,
local_address='9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC')
self.assertEqual(len(pmts), 5)
pmts = self.wallet.incoming(
local_address='9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC',
payment_id='03f6649304ea4cb2')
self.assertEqual(len(pmts), 0)
pmts = self.wallet.incoming(
unconfirmed=True,
local_address='9tQoHWyZ4yXUgbz9nvMcFZUfDy5hxcdZabQCxmNCUukKYicXegsDL7nQpcUa3A1pF6K3fhq3scsyY88tdB1MqucULcKzWZC',
payment_id='03f6649304ea4cb2')
self.assertEqual(len(pmts), 1)
self.assertEqual(len(w), 2)
def test_filter_excessive(self):
self.assertRaises(ValueError, self.wallet.incoming, excessive_argument='foo')
| 52.333333
| 141
| 0.62073
|
b5b3d82529a573395cd05894956ea8bbc5f68957
| 3,197
|
py
|
Python
|
member/models.py
|
kaqfa/supervise_backend
|
0d3e57648f96c943e6f5f2da1276386acdc95b3f
|
[
"Apache-2.0"
] | null | null | null |
member/models.py
|
kaqfa/supervise_backend
|
0d3e57648f96c943e6f5f2da1276386acdc95b3f
|
[
"Apache-2.0"
] | null | null | null |
member/models.py
|
kaqfa/supervise_backend
|
0d3e57648f96c943e6f5f2da1276386acdc95b3f
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
from django.contrib.auth.hashers import PBKDF2PasswordHasher
from django.contrib.auth.models import User
from django.utils.crypto import get_random_string
import progress
class Expertise(models.Model):
"""Bidang keahlian supervisor"""
name = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
def __str__(self):
return self.name
class Member(models.Model):
"""Pengganti User, include student dan supervisor"""
LEVEL_CHOICES = (('st', 'mahasiswa'), ('sp', 'pembimbing'))
STATUS_CHOICES = (('a', 'aktif'), ('b', 'banned'), ('n', 'nonaktif'),
('g', 'lulus'))
user = models.OneToOneField(User)
nim = models.CharField(max_length=20, null=True, blank=True)
npp = models.CharField(max_length=20, null=True, blank=True)
address = models.TextField(null=True, blank=True)
phone = models.CharField(max_length=20, null=True, blank=True)
expertise = models.ManyToManyField(Expertise, blank=True)
level = models.CharField(max_length=2, choices=LEVEL_CHOICES)
status = models.CharField(max_length=1, default='a', choices=STATUS_CHOICES)
supervisor = models.ForeignKey("self", null=True, blank=True)
def __str__(self):
return self.user.username
def name(self):
return self.user.first_name+" "+self.user.last_name
def get_progress_data(self):
try:
thesis = {'topic': self.thesis.topic, 'title': self.thesis.title,
'abstract': self.thesis.abstract, 'field': None,
'save_date': self.thesis.save_date}
data = {'username': self.user.username, 'nim': self.nim,
'name': self.name(), 'thesis': thesis,
'number_of_task': self.student_num_of_task(),
'number_of_task_done': self.student_num_of_done_task()}
except progress.models.Thesis.DoesNotExist:
data = {'username': self.user.username, 'nim': self.nim,
'name': self.name(), 'thesis': None,
'number_of_task': self.student_num_of_task(),
'number_of_task_done': self.student_num_of_done_task()}
return data
def student_num_of_task(self):
if self.level == 'st':
return self.studenttask_set.all().count()
return 0
def student_num_of_done_task(self):
if self.level == 'st':
return self.studenttask_set.filter(status=2).count()
class StudentProposal(models.Model):
STATUS_CHOICES = (('p', 'menunggu'), ('a', 'diterima'), ('r', 'ditolak'))
student = models.ForeignKey(Member, related_name='%(class)s_student')
supervisor = models.ForeignKey(Member, related_name='%(class)s_supervisor')
status = models.CharField(max_length=1, choices=STATUS_CHOICES)
propose_date = models.DateTimeField(auto_now_add=True)
response_date = models.DateTimeField(auto_now=True)
def response(self, code):
self.status = code
self.save()
if code == 'a':
student = self.student
student.supervisor = self.supervisor
student.save()
| 38.518072
| 80
| 0.640601
|
67c59d46f6dfafa3295ef5bf59702024d3f2b4da
| 886
|
py
|
Python
|
tests/test_iters.py
|
cwstryker/skidl
|
b5854ecd71e9da70d5391ad67ab30a8985a61ee8
|
[
"MIT"
] | null | null | null |
tests/test_iters.py
|
cwstryker/skidl
|
b5854ecd71e9da70d5391ad67ab30a8985a61ee8
|
[
"MIT"
] | null | null | null |
tests/test_iters.py
|
cwstryker/skidl
|
b5854ecd71e9da70d5391ad67ab30a8985a61ee8
|
[
"MIT"
] | null | null | null |
import pytest
from skidl import *
from .setup_teardown import *
def test_iters_1():
"""Test bus iterator."""
b_size = 4
b = Bus("chplx", b_size)
for hi in b:
for lo in b:
if hi != lo:
led = Part("device", "LED")
hi += led["A"]
lo += led["K"]
for l in b:
assert len(l) == 2 * (b_size - 1)
def test_iters_2():
"""Test pin iterator."""
q = Part("device", "Q_NPN_CEB")
s = 0
for p1 in q:
for p2 in q:
if p1 != p2:
s += 1
assert s == len(q) * (len(q) - 1)
def test_iters_3():
"""Test net iterator."""
b = Net()
for hi in b:
for lo in b:
if hi != lo:
led = Part("device", "LED")
hi += led["A"]
lo += led["K"]
for l in b:
assert len(l) == 0
| 20.136364
| 43
| 0.419865
|
1b498ab44512aeda880e2eddbdb83852176e7dda
| 4,556
|
py
|
Python
|
plugins/elasticsearch/unit_test/test_search_documents.py
|
rapid7/insightconnect-plugins
|
b4732be7716bc949264c09526de6b9afbf9f705f
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/elasticsearch/unit_test/test_search_documents.py
|
rapid7/insightconnect-plugins
|
b4732be7716bc949264c09526de6b9afbf9f705f
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/elasticsearch/unit_test/test_search_documents.py
|
rapid7/insightconnect-plugins
|
b4732be7716bc949264c09526de6b9afbf9f705f
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
import sys
import os
from unittest.mock import patch
from komand_elasticsearch.actions import SearchDocuments
from insightconnect_plugin_runtime.exceptions import PluginException
from komand_elasticsearch.actions.search_documents.schema import Input, Output
from unit_test.util import Util
from unittest import TestCase
sys.path.append(os.path.abspath("../"))
class TestSearchDocuments(TestCase):
expected = {
Output.TOOK: 2,
Output.TIMED_OUT: False,
Output.SHARDS: {"total": 1, "successful": 1, "skipped": 0, "failed": 0},
Output.HITS: {
"total": {"value": 2},
"max_score": 1.0,
"hits": [
{
"_index": "test-index",
"_type": "_doc",
"_id": "VWx5O3oBrBTgS4Hhf6Hp",
"_score": 1.0,
"_source": {"id": 1, "message": "Some message"},
},
{
"_index": "test-index",
"_type": "_doc",
"_id": "Vmx6O3oBrBTgS4HhWKFJ",
"_score": 1.0,
"_source": {"id": 1, "message": "Some message"},
},
],
},
}
expected_with_route = {
Output.TOOK: 2,
Output.TIMED_OUT: False,
Output.SHARDS: {"total": 1, "successful": 1, "skipped": 0, "failed": 0},
Output.HITS: {
"total": {"value": 2},
"max_score": 1.0,
"hits": [
{
"_index": "test-index",
"_type": "_doc",
"_id": "VWx5O3oBrBTgS4Hhf6Hp",
"_score": 1.0,
"_routing": "test-route",
"_source": {"id": 1, "message": "Some message"},
},
],
},
}
@classmethod
@patch("requests.request", side_effect=Util.mocked_requests_get)
def setUpClass(cls, mock_request) -> None:
cls.action = Util.default_connector(SearchDocuments())
@patch("requests.request", side_effect=Util.mocked_requests_get)
def test_search_documents(self, mock_request):
actual = self.action.run(
{Input.INDEX: "search", Input.QUERY: {"query": {"match_all": {}}}, Input.ROUTING: None}
)
self.assertEqual(actual, self.expected)
@patch("requests.request", side_effect=Util.mocked_requests_get)
def test_search_documents_with_route(self, mock_request):
actual = self.action.run(
{Input.INDEX: "search-with-route", Input.QUERY: {"query": {"match_all": {}}}, Input.ROUTING: "test-route"}
)
self.assertEqual(actual, self.expected_with_route)
@patch("requests.request", side_effect=Util.mocked_requests_get)
def test_search_documents_without_route(self, mock_request):
actual = self.action.run({Input.INDEX: "search-without-route", Input.QUERY: {"query": {"match_all": {}}}})
self.assertEqual(actual, self.expected)
@patch("requests.request", side_effect=Util.mocked_requests_get)
def test_search_documents_with_route_none(self, mock_request):
actual = self.action.run(
{Input.INDEX: "search-without-route", Input.QUERY: {"query": {"match_all": {}}}, Input.ROUTING: None}
)
self.assertEqual(actual, self.expected)
@patch("requests.request", side_effect=Util.mocked_requests_get)
def test_search_documents_empty(self, mock_request):
actual = self.action.run({Input.INDEX: "empty", Input.QUERY: {"query": {"match_all": {}}}, Input.ROUTING: None})
self.assertEqual(
actual,
{
"hits": {"hits": [], "max_score": 0, "total": {"value": 0}},
"shards": {},
"timed_out": "false",
"took": 0,
},
)
@patch("requests.request", side_effect=Util.mocked_requests_get)
def test_search_documents_wrong_object(self, mock_request):
actual = self.action.run(
{Input.INDEX: "wrong_object", Input.QUERY: {"query": {"match_all": {}}}, Input.ROUTING: None}
)
self.assertEqual(
{
"hits": {
"hits": [{"_score": 0}, {".name": 1.0, "_score": 0, "name": 1}],
"max_score": 0,
"total": {"value": 0},
},
"shards": {},
"timed_out": "false",
"took": 0,
},
actual,
)
| 36.741935
| 120
| 0.530509
|
7084b0161e3e90da86988ef6a4f9bd895da55c97
| 187
|
py
|
Python
|
tests/utils.py
|
CuriBio/curibio.sdk
|
09384eac0defe1ec835d2e6a3f926eaa72b6a923
|
[
"MIT"
] | null | null | null |
tests/utils.py
|
CuriBio/curibio.sdk
|
09384eac0defe1ec835d2e6a3f926eaa72b6a923
|
[
"MIT"
] | 106
|
2020-05-29T14:21:10.000Z
|
2021-11-10T00:44:00.000Z
|
tests/utils.py
|
CuriBio/curibio.sdk
|
09384eac0defe1ec835d2e6a3f926eaa72b6a923
|
[
"MIT"
] | 1
|
2021-07-01T16:26:49.000Z
|
2021-07-01T16:26:49.000Z
|
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
def get_cell_value(sheet, zero_based_row, zero_based_col):
return sheet.cell(row=zero_based_row + 1, column=zero_based_col + 1).value
| 31.166667
| 78
| 0.68984
|
fddf54f3c86c304fcd99a5912ec38c988e8023a0
| 1,175
|
py
|
Python
|
rwio/reader.py
|
viciouspetal/time-machine-py
|
906b5cba479e2d80df8108b14dcbb1a95b5458bd
|
[
"Apache-2.0"
] | null | null | null |
rwio/reader.py
|
viciouspetal/time-machine-py
|
906b5cba479e2d80df8108b14dcbb1a95b5458bd
|
[
"Apache-2.0"
] | null | null | null |
rwio/reader.py
|
viciouspetal/time-machine-py
|
906b5cba479e2d80df8108b14dcbb1a95b5458bd
|
[
"Apache-2.0"
] | null | null | null |
class Reader:
def __init__(self, filename):
self.filename = filename
self.f = None
try:
self.f = open(self.filename, 'rt')
except FileNotFoundError as err:
print("Could not locate" + self.filename + ". Ensure that it exists.")
def close(self):
if self.f is not None:
self.f.close()
def read(self):
lines = []
if self.f is not None:
try:
lines = self.f.readlines()
lines = self.remove_empty_lines_from_list(lines)
except IOError as err:
print("Could read from " + self.filename + ".")
print("Ensure that it exists and is not opened by other programs.")
return lines
def print_content(self):
lines = self.read()
for line in lines:
print(line)
self.close()
def remove_empty_lines_from_list(self, source_list):
target_list = []
for item in source_list:
item = item.replace("\n", "")
if item is not None and len(item) > 0:
target_list.append(item)
return target_list
| 30.128205
| 83
| 0.538723
|
5732171973a3b1b0d612685d39683323ab6126b3
| 4,899
|
py
|
Python
|
facesdk/retinaface/models/retinaface.py
|
foamliu/FaceSDK
|
af83d440eafe8523e4323df7c4c8dc5370fb3d70
|
[
"MIT"
] | null | null | null |
facesdk/retinaface/models/retinaface.py
|
foamliu/FaceSDK
|
af83d440eafe8523e4323df7c4c8dc5370fb3d70
|
[
"MIT"
] | null | null | null |
facesdk/retinaface/models/retinaface.py
|
foamliu/FaceSDK
|
af83d440eafe8523e4323df7c4c8dc5370fb3d70
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models._utils as _utils
from facesdk.retinaface.models.net import FPN as FPN
from facesdk.retinaface.models.net import MobileNetV1 as MobileNetV1
from facesdk.retinaface.models.net import SSH as SSH
class ClassHead(nn.Module):
def __init__(self, inchannels=512, num_anchors=3):
super(ClassHead, self).__init__()
self.num_anchors = num_anchors
self.conv1x1 = nn.Conv2d(inchannels, self.num_anchors * 2, kernel_size=(1, 1), stride=1, padding=0)
def forward(self, x):
out = self.conv1x1(x)
out = out.permute(0, 2, 3, 1).contiguous()
return out.view(out.shape[0], -1, 2)
class BboxHead(nn.Module):
def __init__(self, inchannels=512, num_anchors=3):
super(BboxHead, self).__init__()
self.conv1x1 = nn.Conv2d(inchannels, num_anchors * 4, kernel_size=(1, 1), stride=1, padding=0)
def forward(self, x):
out = self.conv1x1(x)
out = out.permute(0, 2, 3, 1).contiguous()
return out.view(out.shape[0], -1, 4)
class LandmarkHead(nn.Module):
def __init__(self, inchannels=512, num_anchors=3):
super(LandmarkHead, self).__init__()
self.conv1x1 = nn.Conv2d(inchannels, num_anchors * 10, kernel_size=(1, 1), stride=1, padding=0)
def forward(self, x):
out = self.conv1x1(x)
out = out.permute(0, 2, 3, 1).contiguous()
return out.view(out.shape[0], -1, 10)
class RetinaFace(nn.Module):
def __init__(self, cfg=None, phase='train'):
"""
:param cfg: Network related settings.
:param phase: train or test.
"""
super(RetinaFace, self).__init__()
self.phase = phase
backbone = MobileNetV1()
# if cfg['name'] == 'mobilenet0.25':
# backbone = MobileNetV1()
# if cfg['pretrain']:
# checkpoint = torch.load("./weights/mobilenetV1X0.25_pretrain.tar", map_location=torch.device('cpu'))
# from collections import OrderedDict
# new_state_dict = OrderedDict()
# for k, v in checkpoint['state_dict'].items():
# name = k[7:] # remove module.
# new_state_dict[name] = v
# # load params
# backbone.load_state_dict(new_state_dict)
# elif cfg['name'] == 'Resnet50':
# import torchvision.models as models
# backbone = models.resnet50(pretrained=cfg['pretrain'])
self.body = _utils.IntermediateLayerGetter(backbone, cfg['return_layers'])
in_channels_stage2 = cfg['in_channel']
in_channels_list = [
in_channels_stage2 * 2,
in_channels_stage2 * 4,
in_channels_stage2 * 8,
]
out_channels = cfg['out_channel']
self.fpn = FPN(in_channels_list, out_channels)
self.ssh1 = SSH(out_channels, out_channels)
self.ssh2 = SSH(out_channels, out_channels)
self.ssh3 = SSH(out_channels, out_channels)
self.ClassHead = self._make_class_head(fpn_num=3, inchannels=cfg['out_channel'])
self.BboxHead = self._make_bbox_head(fpn_num=3, inchannels=cfg['out_channel'])
self.LandmarkHead = self._make_landmark_head(fpn_num=3, inchannels=cfg['out_channel'])
def _make_class_head(self, fpn_num=3, inchannels=64, anchor_num=2):
classhead = nn.ModuleList()
for i in range(fpn_num):
classhead.append(ClassHead(inchannels, anchor_num))
return classhead
def _make_bbox_head(self, fpn_num=3, inchannels=64, anchor_num=2):
bboxhead = nn.ModuleList()
for i in range(fpn_num):
bboxhead.append(BboxHead(inchannels, anchor_num))
return bboxhead
def _make_landmark_head(self, fpn_num=3, inchannels=64, anchor_num=2):
landmarkhead = nn.ModuleList()
for i in range(fpn_num):
landmarkhead.append(LandmarkHead(inchannels, anchor_num))
return landmarkhead
def forward(self, inputs):
out = self.body(inputs)
# FPN
fpn = self.fpn(out)
# SSH
feature1 = self.ssh1(fpn[0])
feature2 = self.ssh2(fpn[1])
feature3 = self.ssh3(fpn[2])
features = [feature1, feature2, feature3]
bbox_regressions = torch.cat([self.BboxHead[i](feature) for i, feature in enumerate(features)], dim=1)
classifications = torch.cat([self.ClassHead[i](feature) for i, feature in enumerate(features)], dim=1)
ldm_regressions = torch.cat([self.LandmarkHead[i](feature) for i, feature in enumerate(features)], dim=1)
if self.phase == 'train':
output = (bbox_regressions, classifications, ldm_regressions)
else:
output = (bbox_regressions, F.softmax(classifications, dim=-1), ldm_regressions)
return output
| 38.273438
| 118
| 0.62972
|
22617986b82f8c89c96bcda10438ae78e3af19e5
| 394
|
py
|
Python
|
terrascript/resource/pagerduty.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/pagerduty.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/pagerduty.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/pagerduty.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:24:19 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.resource.pagerduty
#
# instead of
#
# >>> import terrascript.resource.PagerDuty.pagerduty
#
# This is only available for 'official' and 'partner' providers.
from terrascript.resource.PagerDuty.pagerduty import *
| 26.266667
| 73
| 0.753807
|
e7fcd0146935fb8e42a6c7b2cd9b0d4691df9360
| 135
|
py
|
Python
|
urlabridge/app/urls.py
|
kaitlinlogie/url-abridge
|
dc50f1862de3303edeb4e90d3b75f336e808117b
|
[
"MIT"
] | null | null | null |
urlabridge/app/urls.py
|
kaitlinlogie/url-abridge
|
dc50f1862de3303edeb4e90d3b75f336e808117b
|
[
"MIT"
] | null | null | null |
urlabridge/app/urls.py
|
kaitlinlogie/url-abridge
|
dc50f1862de3303edeb4e90d3b75f336e808117b
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('<str:path>/', views.page_redirect, name='page_redirect')
]
| 19.285714
| 66
| 0.711111
|
6222cd4912205c27e4e6687fad4047a0180b53a2
| 5,787
|
py
|
Python
|
haystack/views.py
|
gthb/django-haystack
|
41814ab4c2b2942f8229658a76749a1fe2889ef8
|
[
"BSD-3-Clause"
] | null | null | null |
haystack/views.py
|
gthb/django-haystack
|
41814ab4c2b2942f8229658a76749a1fe2889ef8
|
[
"BSD-3-Clause"
] | null | null | null |
haystack/views.py
|
gthb/django-haystack
|
41814ab4c2b2942f8229658a76749a1fe2889ef8
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf import settings
from django.core.paginator import Paginator, InvalidPage
from django.http import Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from haystack.forms import ModelSearchForm
from haystack.query import EmptySearchQuerySet
RESULTS_PER_PAGE = getattr(settings, 'HAYSTACK_SEARCH_RESULTS_PER_PAGE', 20)
class SearchView(object):
template = 'search/search.html'
extra_context = {}
query = ''
results = EmptySearchQuerySet()
request = None
form = None
def __init__(self, template=None, load_all=True, form_class=ModelSearchForm, searchqueryset=None, context_class=RequestContext):
self.load_all = load_all
self.form_class = form_class
self.context_class = context_class
self.searchqueryset = searchqueryset
if template:
self.template = template
def __name__(self):
return "SearchView"
def __call__(self, request):
"""
Generates the actual response to the search.
Relies on internal, overridable methods to construct the response.
"""
self.request = request
self.form = self.build_form()
self.query = self.get_query()
self.results = self.get_results()
return self.create_response()
def build_form(self, form_kwargs=None):
"""
Instantiates the form the class should use to process the search query.
"""
data = None
kwargs = {
'load_all': self.load_all,
}
if form_kwargs:
kwargs.update(form_kwargs)
if len(self.request.GET):
data = self.request.GET
if self.searchqueryset is not None:
kwargs['searchqueryset'] = self.searchqueryset
return self.form_class(data, **kwargs)
def get_query(self):
"""
Returns the query provided by the user.
Returns an empty string if the query is invalid.
"""
if self.form.is_valid():
return self.form.cleaned_data['q']
return ''
def get_results(self):
"""
Fetches the results via the form.
Returns an empty list if there's no query to search with.
"""
if self.query:
return self.form.search()
return EmptySearchQuerySet()
def build_page(self):
"""
Paginates the results appropriately.
In case someone does not want to use Django's built-in pagination, it
should be a simple matter to override this method to do what they would
like.
"""
paginator = Paginator(self.results, RESULTS_PER_PAGE)
try:
page = paginator.page(self.request.GET.get('page', 1))
except InvalidPage:
raise Http404
return (paginator, page)
def extra_context(self):
"""
Allows the addition of more context variables as needed.
Must return a dictionary.
"""
return {}
def create_response(self):
"""
Generates the actual HttpResponse to send back to the user.
"""
(paginator, page) = self.build_page()
context = {
'query': self.query,
'form': self.form,
'page': page,
'paginator': paginator,
}
context.update(self.extra_context())
return render_to_response(self.template, context, context_instance=self.context_class(self.request))
def search_view_factory(view_class=SearchView, *args, **kwargs):
def search_view(request):
return view_class(*args, **kwargs)(request)
return search_view
class FacetedSearchView(SearchView):
def __name__(self):
return "FacetedSearchView"
def extra_context(self):
extra = super(FacetedSearchView, self).extra_context()
extra['facets'] = self.results.facet_counts()
return extra
def basic_search(request, template='search/search.html', load_all=True, form_class=ModelSearchForm, searchqueryset=None, context_class=RequestContext, extra_context=None):
"""
A more traditional view that also demonstrate an alternative
way to use Haystack.
Useful as an example of for basing heavily custom views off of.
Also has the benefit of thread-safety, which the ``SearchView`` class may
not be.
Template:: ``search/search.html``
Context::
* form
An instance of the ``form_class``. (default: ``ModelSearchForm``)
* page
The current page of search results.
* paginator
A paginator instance for the results.
* query
The query received by the form.
"""
query = ''
results = EmptySearchQuerySet()
if request.GET.get('q'):
form = form_class(request.GET, searchqueryset=searchqueryset, load_all=load_all)
if form.is_valid():
query = form.cleaned_data['q']
results = form.search()
else:
form = form_class(searchqueryset=searchqueryset, load_all=load_all)
paginator = Paginator(results, RESULTS_PER_PAGE)
try:
page = paginator.page(int(request.GET.get('page', 1)))
except InvalidPage:
raise Http404("No such page of results!")
context = {
'form': form,
'page': page,
'paginator': paginator,
'query': query,
}
if extra_context:
context.update(extra_context)
return render_to_response(template, context, context_instance=context_class(request))
| 29.52551
| 171
| 0.609988
|
67b69a42fa8706284d84c7017eef3eb3b0581239
| 459
|
py
|
Python
|
sitewebapp/migrations/0013_auto_20210130_0409.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 2
|
2020-12-05T05:34:56.000Z
|
2020-12-09T10:27:43.000Z
|
sitewebapp/migrations/0013_auto_20210130_0409.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 3
|
2021-06-28T16:47:23.000Z
|
2021-06-28T16:48:51.000Z
|
sitewebapp/migrations/0013_auto_20210130_0409.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 9
|
2021-01-29T17:06:30.000Z
|
2021-08-21T18:23:26.000Z
|
# Generated by Django 2.2.15 on 2021-01-29 22:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sitewebapp', '0012_auto_20210130_0352'),
]
operations = [
migrations.AlterField(
model_name='auditionrounds',
name='candidate',
field=models.ManyToManyField(blank=True, related_name='candidates', to='sitewebapp.Candidates'),
),
]
| 24.157895
| 108
| 0.638344
|
3da526205a2fbd524c296cf02a37c719b8119863
| 1,186
|
py
|
Python
|
postgresql/importcsv.py
|
williamszostak/discogs-xml2db
|
b637a3944320c32e3d44328a6f00420ffc1f8c17
|
[
"Apache-2.0"
] | 133
|
2015-01-23T15:57:08.000Z
|
2022-03-13T13:38:58.000Z
|
postgresql/importcsv.py
|
williamszostak/discogs-xml2db
|
b637a3944320c32e3d44328a6f00420ffc1f8c17
|
[
"Apache-2.0"
] | 82
|
2015-01-06T13:52:40.000Z
|
2021-11-16T12:33:34.000Z
|
postgresql/importcsv.py
|
williamszostak/discogs-xml2db
|
b637a3944320c32e3d44328a6f00420ffc1f8c17
|
[
"Apache-2.0"
] | 64
|
2015-03-20T22:16:15.000Z
|
2021-11-06T17:15:37.000Z
|
#!/usr/bin/env python
import bz2
import sys
import os
import pathlib
from psycopg2 import sql
from dbconfig import connect_db, Config
# since we run this as a script, we need to add the parent folder
# so we can import discogsxml2db from it
parent_path = str(pathlib.Path(__file__).absolute().parent.parent)
sys.path.insert(1, parent_path)
from discogsxml2db.exporter import csv_headers # noqa
def load_csv(filename, db):
print("Importing data from {}".format(filename))
base, fname = os.path.split(filename)
table, ext = fname.split('.', 1)
if ext.startswith('csv'):
q = sql.SQL("COPY {} ({}) FROM STDIN WITH CSV HEADER").format(
sql.Identifier(table),
sql.SQL(', ').join(map(sql.Identifier, csv_headers[table])))
if ext == 'csv':
fp = open(filename, encoding='utf-8')
elif ext == 'csv.bz2':
fp = bz2.BZ2File(filename)
cursor = db.cursor()
cursor.copy_expert(q, fp)
db.commit()
root = os.path.realpath(os.path.dirname(__file__))
config = Config(os.path.join(root, 'postgresql.conf'))
db = connect_db(config)
for filename in sys.argv[1:]:
load_csv(os.path.abspath(filename), db)
| 27.581395
| 76
| 0.668634
|
01dfb038b664c5762aea2cb78924b0f788b6953b
| 1,604
|
py
|
Python
|
dotviewer/graphpage.py
|
m4sterchain/mesapy
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
[
"Apache-2.0",
"OpenSSL"
] | 381
|
2018-08-18T03:37:22.000Z
|
2022-02-06T23:57:36.000Z
|
dotviewer/graphpage.py
|
m4sterchain/mesapy
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
[
"Apache-2.0",
"OpenSSL"
] | 16
|
2018-09-22T18:12:47.000Z
|
2022-02-22T20:03:59.000Z
|
dotviewer/graphpage.py
|
m4sterchain/mesapy
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
[
"Apache-2.0",
"OpenSSL"
] | 30
|
2018-08-20T03:16:34.000Z
|
2022-01-12T17:39:22.000Z
|
class GraphPage(object):
"""Base class for the client-side content of one of the 'pages'
(one graph) sent over to and displayed by the external process.
"""
save_tmp_file = None
def __init__(self, *args, **kwds):
self.args = args
self.kwds = kwds
def content(self):
"""Compute the content of the page.
This doesn't modify the page in place; it returns a new GraphPage.
"""
if hasattr(self, 'source'):
return self
else:
new = self.__class__()
new.source = '' # '''dot source'''
new.links = {} # {'word': 'statusbar text'}
new.compute(*self.args, **self.kwds) # defined in subclasses
return new
def followlink(self, word):
raise KeyError
def display(self):
"Display a graph page."
import graphclient, msgstruct
try:
graphclient.display_page(self, save_tmp_file=self.save_tmp_file)
except msgstruct.RemoteError, e:
import sys
print >> sys.stderr, "Exception in the graph viewer:", str(e)
def display_background(self):
"Display a graph page in a background thread."
try:
import thread
thread.start_new_thread(self.display, ())
except ImportError:
self.display()
class DotFileGraphPage(GraphPage):
def compute(self, dotfile):
import codecs
from strunicode import RAW_ENCODING
f = codecs.open(dotfile, 'r', RAW_ENCODING)
self.source = f.read()
f.close()
| 30.846154
| 76
| 0.581671
|
a866f3a1f4ffb1458ae2a7368fc215ef629682a1
| 11,405
|
py
|
Python
|
bikeshed/biblio.py
|
dontcallmedom/bikeshed
|
ae3b4eaa941e5ea380931f30ba9a0086026a44aa
|
[
"CC0-1.0"
] | 2
|
2016-07-21T16:44:51.000Z
|
2019-04-29T15:04:51.000Z
|
bikeshed/biblio.py
|
dontcallmedom/bikeshed
|
ae3b4eaa941e5ea380931f30ba9a0086026a44aa
|
[
"CC0-1.0"
] | null | null | null |
bikeshed/biblio.py
|
dontcallmedom/bikeshed
|
ae3b4eaa941e5ea380931f30ba9a0086026a44aa
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import re
import copy
from collections import defaultdict, deque
from .messages import *
from .htmlhelpers import *
class BiblioEntry(object):
def __init__(self, preferredURL="dated", **kwargs):
self.linkText = None
self.title = None
self.authors = []
self.etAl = False
self.status = None
self.date = None
self.dated_url = None
self.current_url = None
self.url = None
self.other = None
for key, val in kwargs.items():
if key == "authors":
setattr(self, key, val)
elif key == "etAl":
self.etAl = val
else:
setattr(self, key, val)
if preferredURL == "dated":
self.url = self.dated_url or self.current_url
else:
self.url = self.current_url or self.dated_url
def __str__(self):
str = ""
etAl = self.etAl
if len(self.authors) == 1:
str += self.authors[0]
elif len(self.authors) < 4:
str += "; ".join(self.authors)
elif len(self.authors) != 0:
str += self.authors[0]
etAl = True
if str != "":
str += "; et al. " if etAl else ". "
if self.url:
str += "<a href='{0}'>{1}</a>. ".format(self.url, self.title)
else:
str += "{0}. ".format(self.title)
if self.date:
str += self.date + ". "
if self.status:
str += self.status + ". "
if self.other:
str += self.other + " "
if self.url:
str += "URL: <a href='{0}'>{0}</a>".format(self.url)
return str
def toHTML(self):
ret = []
str = ""
etAl = self.etAl
if len(self.authors) == 1:
str += self.authors[0]
elif len(self.authors) < 4:
str += "; ".join(self.authors)
elif len(self.authors) != 0:
str += self.authors[0]
etAl = True
if str != "":
str += "; et al. " if etAl else ". "
ret.append(str)
if self.url:
ret.append(E.a({"href":self.url}, self.title))
ret.append(". ")
else:
ret.append(self.title + ". ")
str = ""
if self.date:
str += self.date + ". "
if self.status:
str += self.status + ". "
if self.other:
str += self.other + " "
ret.append(str)
if self.url:
ret.append("URL: ")
ret.append(E.a({"href":self.url}, self.url))
return ret
def valid(self):
if self.title is None:
return False
return True
class SpecBasedBiblioEntry(BiblioEntry):
'''
Generates a "fake" biblio entry from a spec reference,
for when we don't have "real" bibliography data for a reference.
'''
def __init__(self, spec, preferredURL="dated"):
self.spec = spec
self.linkText = spec['vshortname']
self._valid = True
if preferredURL == "dated" and spec.get("TR", None) is not None:
self.url = spec['TR']
elif spec.get('ED', None) is not None:
self.url = spec['ED']
elif spec.get('TR', None) is not None:
self.url = spec['TR']
else:
self._valid = False
def valid(self):
return self._valid
def toHTML(self):
return [
self.spec['description'],
" URL: ",
E.a({"href":self.url}, self.url)
]
class StringBiblioEntry(BiblioEntry):
'''
Generates a barebones biblio entry from a preformatted biblio string.
This only exists because SpecRef still has a few of them;
don't use it on purpose for real things in the future.
'''
def __init__(self, data, linkText, **kwargs):
self.data = data
self.linkText = linkText
def valid(self):
return True
def toHTML(self):
return parseHTML(self.data)
def __str__(self):
return self.data
def processReferBiblioFile(lines, storage, order):
singularReferCodes = {
"U": "dated_url",
"T": "title",
"D": "date",
"S": "status",
"L": "linkText",
"O": "other",
}
pluralReferCodes = {
"A": "authors",
"Q": "authors",
}
unusedReferCodes = set("BCIJNPRVX")
biblio = None
for i,line in enumerate(lines):
line = line.strip()
if line == "":
# Empty line
if biblio is not None:
storage[biblio['linkText'].lower()].append(biblio)
biblio = None
continue
elif line.startswith("#") or line.startswith("%#"):
# Comment
continue
else:
if biblio is None:
biblio = defaultdict(list)
biblio['order'] = order
biblio['biblioFormat'] = "dict"
match = re.match(r"%(\w)\s+(.*)", line)
if match:
letter, value = match.groups()
else:
die("Biblio line in unexpected format:\n{0}", line)
continue
if letter in singularReferCodes:
biblio[singularReferCodes[letter]] = value
elif letter in pluralReferCodes:
biblio[pluralReferCodes[letter]].append(value)
elif letter in unusedReferCodes:
pass
else:
die("Unknown line type ")
if biblio is not None:
storage[biblio['linkText'].lower()] = biblio
return storage
def processSpecrefBiblioFile(text, storage, order):
'''
A SpecRef file is a JSON object, where keys are ids
and values are either <alia>, <legacyRef>, or <ref>.
<alias>: {
*aliasOf: <id>,
*id: <id>
}
<legacyRef>: <string>
<ref>: {
id: <id>,
authors: [<string>],
etAl: <bool>,
href: <url>,
*title: <string>,
date: <date>,
deliveredBy: [<wg>],
status: <string>,
publisher: <string>,
obsoletes: [<id>],
obsoletedBy: [<id>],
versionOf: <id>,
versions: [<id>],
edDraft: <url>
}
<date>: /^([1-3]?\d\s)?((?:January|February|March|April|May|June|July|August|September|October|November|December)\s)?\d+$/
<wg>: {*url:<url>, *shortname:<string>}
'''
import json
try:
datas = json.loads(text)
except Exception, e:
die("Couldn't read the local JSON file:\n{0}", str(e))
return storage
# JSON field name: BiblioEntry name
fields = {
"authors": "authors",
"etAl": "etAl",
"href": "dated_url",
"edDraft": "current_url",
"title": "title",
"date": "date",
"status": "status"
}
# Required BiblioEntry fields
requiredFields = ["url", "title"]
aliases = {}
for biblioKey, data in datas.items():
biblio = {"linkText": biblioKey, "order": order}
if isinstance(data, basestring):
# Handle <legacyRef>
biblio['biblioFormat'] = "string"
biblio['data'] = data.replace("\n", " ")
elif "aliasOf" in data:
# Handle <alias>
if biblioKey.lower() == data["aliasOf"].lower():
# SpecRef uses aliases to handle capitalization differences,
# which I don't care about.
continue
biblio["biblioFormat"] = "alias"
biblio["aliasOf"] = data["aliasOf"].lower()
else:
# Handle <ref>
biblio['biblioFormat'] = "dict"
for jsonField, biblioField in fields.items():
if jsonField in data:
biblio[biblioField] = data[jsonField]
if "versionOf" in data:
# "versionOf" entries are all dated urls,
# so you want the href *all* the time.
biblio["current_url"] = data["href"]
storage[biblioKey.lower()].append(biblio)
return storage
def loadBiblioDataFile(lines, storage):
try:
while True:
fullKey = lines.next()
prefix, key = fullKey[0], fullKey[2:].strip()
if prefix == "d":
b = {
"linkText": lines.next(),
"date": lines.next(),
"status": lines.next(),
"title": lines.next(),
"dated_url": lines.next(),
"current_url": lines.next(),
"other": lines.next(),
"etAl": lines.next() != "\n",
"order": 3,
"biblioFormat": "dict",
"authors": []
}
while True:
line = lines.next()
if line == b"-\n":
break
b['authors'].append(line)
elif prefix == "s":
b = {
"linkText": lines.next(),
"data": lines.next(),
"biblioFormat": "string",
"order": 3
}
line = lines.next() # Eat the -
elif prefix == "a":
b = {
"linkText": lines.next(),
"aliasOf": lines.next(),
"biblioFormat": "alias",
"order": 3
}
line = lines.next() # Eat the -
else:
die("Unknown biblio prefix '{0}' on key '{1}'", prefix, fullKey)
continue
storage[key].append(b)
except StopIteration:
pass
def levenshtein(a,b):
"Calculates the Levenshtein distance between a and b."
n, m = len(a), len(b)
if n > m:
# Make sure n <= m, to use O(min(n,m)) space
a,b = b,a
n,m = m,n
current = range(n+1)
for i in range(1,m+1):
previous, current = current, [i]+[0]*n
for j in range(1,n+1):
add, delete = previous[j]+1, current[j-1]+1
change = previous[j-1]
if a[j-1] != b[i-1]:
change = change + 1
current[j] = min(add, delete, change)
return current[n]
def findCloseBiblios(biblioKeys, target, n=5):
'''
Finds biblio entries close to the target.
Returns all biblios with target as the substring,
plus the 5 closest ones per levenshtein distance.
'''
target = target.lower()
names = []
superStrings = []
def addName(name, distance):
tuple = (name, distance)
if len(names) < n:
names.append(tuple)
names.sort(key=lambda x:x[1])
elif distance >= names[-1][1]:
pass
else:
for i, entry in enumerate(names):
if distance < entry[1]:
names.insert(i, tuple)
names.pop()
break
return names
for name in biblioKeys:
if target in name:
superStrings.append(name)
else:
addName(name, levenshtein(name, target))
return sorted(s.strip() for s in superStrings) + [n.strip() for n,d in names]
| 29.24359
| 126
| 0.485401
|
98d2a716f36f69782e6d4ff042dc258619f7a21f
| 528
|
py
|
Python
|
setup.py
|
marksteve/flask-redisconfig
|
2ccbd63d83df3582df574fcfaaaf8804a001a980
|
[
"MIT"
] | 1
|
2015-05-16T16:21:00.000Z
|
2015-05-16T16:21:00.000Z
|
setup.py
|
marksteve/flask-redisconfig
|
2ccbd63d83df3582df574fcfaaaf8804a001a980
|
[
"MIT"
] | null | null | null |
setup.py
|
marksteve/flask-redisconfig
|
2ccbd63d83df3582df574fcfaaaf8804a001a980
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='Flask-RedisConfig',
version='0.2.0',
url='https://github.com/marksteve/flask-redisconfig',
license='MIT',
author='Mark Steve Samson',
author_email='hello@marksteve.com',
description='Redis-backed config for Flask applications',
long_description=open('README.rst', 'r').read(),
py_modules=['flask_redisconfig'],
zip_safe=False,
platforms='any',
install_requires=[
'durabledict',
'redis',
],
classifiers=[
],
)
| 24
| 61
| 0.642045
|
c24b4c13939eefb8ab43b963c679dcfe34d2d452
| 1,418
|
py
|
Python
|
test/test_basic.py
|
Lukasa/certitude
|
33845ae0a6b123f084f10c7dd27f11485dd73ca6
|
[
"MIT"
] | 22
|
2015-12-23T22:30:13.000Z
|
2019-02-09T02:49:53.000Z
|
test/test_basic.py
|
Lukasa/certitude
|
33845ae0a6b123f084f10c7dd27f11485dd73ca6
|
[
"MIT"
] | 12
|
2015-12-23T22:31:15.000Z
|
2020-10-27T21:05:34.000Z
|
test/test_basic.py
|
Lukasa/certitude
|
33845ae0a6b123f084f10c7dd27f11485dd73ca6
|
[
"MIT"
] | 1
|
2016-04-25T13:12:31.000Z
|
2016-04-25T13:12:31.000Z
|
# -*- coding: utf-8 -*-
"""
Test basic Certitude functionality.
"""
import certitude
class TestValidation(object):
def test_basic_validation(self, certifi_chain):
"""
We can safely validate a good certificate chain.
Note that this certificate chain belongs to certifi.io, and will expire
in 2018.
"""
assert certitude.validate_cert_chain(
certifi_chain, u'certifi.io'
)
def test_hostname_validation(self, certifi_chain):
"""
We fail to validate if the hostname doesn't match the provided cert.
"""
assert not certitude.validate_cert_chain(
certifi_chain, u'http2bin.org'
)
def test_reject_expired(self, expired):
"""
We fail to validate expired certificates.
"""
assert not certitude.validate_cert_chain(
expired, u'expired.badssl.com'
)
def test_reject_invalid_host(self, wrong_host):
"""
We fail to validate certificates that don't match their host.
"""
assert not certitude.validate_cert_chain(
wrong_host, u'wrong.host.badssl.com'
)
def test_reject_self_signed(self, self_signed):
"""
We fail to validate self-signed certs.
"""
assert not certitude.validate_cert_chain(
self_signed, u'self-signed.badssl.com'
)
| 27.803922
| 79
| 0.614245
|
dab5307ea6e7d0223a2af7408f5fbbe88e22cbba
| 6,195
|
py
|
Python
|
backend/lambdas/data_mappers/handlers.py
|
dkmeena/amazon-s3-find-and-forget
|
8af4791e3f7f78373553c21afd680633a973434f
|
[
"Apache-2.0"
] | 165
|
2020-05-29T08:12:17.000Z
|
2022-03-30T22:35:57.000Z
|
backend/lambdas/data_mappers/handlers.py
|
dkmeena/amazon-s3-find-and-forget
|
8af4791e3f7f78373553c21afd680633a973434f
|
[
"Apache-2.0"
] | 101
|
2020-06-24T12:59:49.000Z
|
2022-03-28T13:32:15.000Z
|
backend/lambdas/data_mappers/handlers.py
|
dkmeena/amazon-s3-find-and-forget
|
8af4791e3f7f78373553c21afd680633a973434f
|
[
"Apache-2.0"
] | 23
|
2020-06-18T10:53:49.000Z
|
2022-03-29T03:38:04.000Z
|
"""
DataMapper handlers
"""
import json
import os
import boto3
from boto_utils import DecimalEncoder, get_user_info, running_job_exists
from decorators import (
with_logging,
request_validator,
catch_errors,
add_cors_headers,
json_body_loader,
load_schema,
)
dynamodb_resource = boto3.resource("dynamodb")
table = dynamodb_resource.Table(os.getenv("DataMapperTable"))
glue_client = boto3.client("glue")
PARQUET_HIVE_SERDE = "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
JSON_HIVE_SERDE = "org.apache.hive.hcatalog.data.JsonSerDe"
JSON_OPENX_SERDE = "org.openx.data.jsonserde.JsonSerDe"
SUPPORTED_SERDE_LIBS = [PARQUET_HIVE_SERDE, JSON_HIVE_SERDE, JSON_OPENX_SERDE]
@with_logging
@add_cors_headers
@request_validator(load_schema("get_data_mapper"))
@catch_errors
def get_data_mapper_handler(event, context):
data_mapper_id = event["pathParameters"]["data_mapper_id"]
item = table.get_item(Key={"DataMapperId": data_mapper_id}).get("Item")
if not item:
return {"statusCode": 404}
return {"statusCode": 200, "body": json.dumps(item, cls=DecimalEncoder)}
@with_logging
@add_cors_headers
@request_validator(load_schema("list_data_mappers"))
@catch_errors
def get_data_mappers_handler(event, context):
qs = event.get("queryStringParameters")
if not qs:
qs = {}
page_size = int(qs.get("page_size", 10))
scan_params = {"Limit": page_size}
start_at = qs.get("start_at")
if start_at:
scan_params["ExclusiveStartKey"] = {"DataMapperId": start_at}
items = table.scan(**scan_params).get("Items", [])
if len(items) < page_size:
next_start = None
else:
next_start = items[-1]["DataMapperId"]
return {
"statusCode": 200,
"body": json.dumps(
{"DataMappers": items, "NextStart": next_start}, cls=DecimalEncoder
),
}
@with_logging
@add_cors_headers
@json_body_loader
@request_validator(load_schema("create_data_mapper"))
@catch_errors
def put_data_mapper_handler(event, context):
path_params = event["pathParameters"]
body = event["body"]
validate_mapper(body)
item = {
"DataMapperId": path_params["data_mapper_id"],
"Columns": body["Columns"],
"QueryExecutor": body["QueryExecutor"],
"QueryExecutorParameters": body["QueryExecutorParameters"],
"CreatedBy": get_user_info(event),
"RoleArn": body["RoleArn"],
"Format": body.get("Format", "parquet"),
"DeleteOldVersions": body.get("DeleteOldVersions", True),
"IgnoreObjectNotFoundExceptions": body.get(
"IgnoreObjectNotFoundExceptions", False
),
}
table.put_item(Item=item)
return {"statusCode": 201, "body": json.dumps(item)}
@with_logging
@add_cors_headers
@request_validator(load_schema("delete_data_mapper"))
@catch_errors
def delete_data_mapper_handler(event, context):
if running_job_exists():
raise ValueError("Cannot delete Data Mappers whilst there is a job in progress")
data_mapper_id = event["pathParameters"]["data_mapper_id"]
table.delete_item(Key={"DataMapperId": data_mapper_id})
return {"statusCode": 204}
def validate_mapper(mapper):
existing_s3_locations = get_existing_s3_locations(mapper["DataMapperId"])
if mapper["QueryExecutorParameters"].get("DataCatalogProvider") == "glue":
table_details = get_table_details_from_mapper(mapper)
new_location = get_glue_table_location(table_details)
serde_lib, serde_params = get_glue_table_format(table_details)
for partition in mapper["QueryExecutorParameters"].get("PartitionKeys", []):
if partition not in get_glue_table_partition_keys(table_details):
raise ValueError("Partition Key {} doesn't exist".format(partition))
if any([is_overlap(new_location, e) for e in existing_s3_locations]):
raise ValueError(
"A data mapper already exists which covers this S3 location"
)
if serde_lib not in SUPPORTED_SERDE_LIBS:
raise ValueError(
"The format for the specified table is not supported. The SerDe lib must be one of {}".format(
", ".join(SUPPORTED_SERDE_LIBS)
)
)
if serde_lib == JSON_OPENX_SERDE:
not_allowed_json_params = {
"ignore.malformed.json": "TRUE",
"dots.in.keys": "TRUE",
}
for param, value in not_allowed_json_params.items():
if param in serde_params and serde_params[param] == value:
raise ValueError(
"The parameter {} cannot be {} for SerDe library {}".format(
param, value, JSON_OPENX_SERDE
)
)
if any([k for k, v in serde_params.items() if k.startswith("mapping.")]):
raise ValueError(
"Column mappings are not supported for SerDe library {}".format(
JSON_OPENX_SERDE
)
)
def get_existing_s3_locations(current_data_mapper_id):
items = table.scan()["Items"]
glue_mappers = [
get_table_details_from_mapper(mapper)
for mapper in items
if mapper["QueryExecutorParameters"].get("DataCatalogProvider") == "glue"
and mapper["DataMapperId"] != current_data_mapper_id
]
return [get_glue_table_location(m) for m in glue_mappers]
def get_table_details_from_mapper(mapper):
db = mapper["QueryExecutorParameters"]["Database"]
table_name = mapper["QueryExecutorParameters"]["Table"]
return glue_client.get_table(DatabaseName=db, Name=table_name)
def get_glue_table_location(t):
return t["Table"]["StorageDescriptor"]["Location"]
def get_glue_table_format(t):
return (
t["Table"]["StorageDescriptor"]["SerdeInfo"]["SerializationLibrary"],
t["Table"]["StorageDescriptor"]["SerdeInfo"]["Parameters"],
)
def get_glue_table_partition_keys(t):
return [x["Name"] for x in t["Table"]["PartitionKeys"]]
def is_overlap(a, b):
return a in b or b in a
| 34.226519
| 110
| 0.664407
|
fdc79055badbe970b76b4d7981f049bd071e0f4f
| 1,076
|
py
|
Python
|
Chapter13_code/ch13_r03_consume_parameters_passed_to_your_handlers/models/ir_http.py
|
PacktPublishing/Odoo-Development-Cookbook
|
5553110c0bc352c4541f11904e236cad3c443b8b
|
[
"MIT"
] | 55
|
2016-05-23T16:05:50.000Z
|
2021-07-19T00:16:46.000Z
|
Chapter13_code/ch13_r03_consume_parameters_passed_to_your_handlers/models/ir_http.py
|
kogkog098/Odoo-Development-Cookbook
|
166c9b98efbc9108b30d719213689afb1f1c294d
|
[
"MIT"
] | 1
|
2016-12-09T02:14:21.000Z
|
2018-07-02T09:02:20.000Z
|
Chapter13_code/ch13_r03_consume_parameters_passed_to_your_handlers/models/ir_http.py
|
kogkog098/Odoo-Development-Cookbook
|
166c9b98efbc9108b30d719213689afb1f1c294d
|
[
"MIT"
] | 52
|
2016-06-01T20:03:59.000Z
|
2020-10-31T23:58:25.000Z
|
# -*- coding: utf-8 -*-
# © 2015 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import werkzeug
from openerp import api, fields, models
from openerp.http import request
class ModelNameSearchConverter(werkzeug.routing.BaseConverter):
def __init__(self, url_map, model):
super(ModelNameSearchConverter, self).__init__(url_map)
self.model = model
self.regex = r'((\w|\+)+)'
def to_python(self, value):
result = request.env[self.model].browse(
map(lambda x: x[0], request.env[self.model].sudo().name_search(
value.replace('+', ' '), operator='=ilike', limit=1)))
if not result:
raise werkzeug.exceptions.NotFound()
return result
def to_url(self, value):
return value.name_get()[1].replace(' ', '+')
class IrHttp(models.Model):
_inherit = 'ir.http'
def _get_converters(self):
result = super(IrHttp, self)._get_converters()
result['model_name'] = ModelNameSearchConverter
return result
| 31.647059
| 75
| 0.637546
|
3ef3370df30f81c9d08582bdea08934321bb429c
| 328
|
py
|
Python
|
project_root/restaurant/migrations/0005_auto_20190807_1217.py
|
saharisrael31/marketing-service-api
|
1fb6c016c2bf65134bf43da5b245a78bbcdc5294
|
[
"bzip2-1.0.6"
] | null | null | null |
project_root/restaurant/migrations/0005_auto_20190807_1217.py
|
saharisrael31/marketing-service-api
|
1fb6c016c2bf65134bf43da5b245a78bbcdc5294
|
[
"bzip2-1.0.6"
] | null | null | null |
project_root/restaurant/migrations/0005_auto_20190807_1217.py
|
saharisrael31/marketing-service-api
|
1fb6c016c2bf65134bf43da5b245a78bbcdc5294
|
[
"bzip2-1.0.6"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-08-07 09:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('restaurant', '0004_auto_20190807_1150'),
]
operations = [
migrations.RenameModel(
old_name='Tags',
new_name='Tag',
),
]
| 18.222222
| 50
| 0.591463
|
2d3959561386c83bd4dc0435e18dae998a447d8e
| 687
|
py
|
Python
|
cameo/clipboardtool.py
|
muchu1983/104_cameo
|
8c7f78de198a5bd8d870589402e3b7e8b59f520a
|
[
"BSD-3-Clause"
] | null | null | null |
cameo/clipboardtool.py
|
muchu1983/104_cameo
|
8c7f78de198a5bd8d870589402e3b7e8b59f520a
|
[
"BSD-3-Clause"
] | null | null | null |
cameo/clipboardtool.py
|
muchu1983/104_cameo
|
8c7f78de198a5bd8d870589402e3b7e8b59f520a
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import win32clipboard as cb
import win32con
#剪貼簿存取
class ClipboardTool:
#將 unicode 字串放入剪貼簿
def setUnicodeText(self, strUnicode=u""):
cb.OpenClipboard()
cb.EmptyClipboard()
cb.SetClipboardData(win32con.CF_UNICODETEXT, strUnicode)
cb.CloseClipboard()
#取出 unicode 字串
def getUnicodeText(self):
cb.OpenClipboard()
uText = cb.GetClipboardData(win32con.CF_UNICODETEXT)
cb.CloseClipboard()
return uText
| 25.444444
| 64
| 0.663755
|
8fd5e84b8774f1886548c4691d00c843b1c45008
| 1,384
|
py
|
Python
|
venv/Lib/site-packages/plotnine/themes/theme_linedraw.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/plotnine/themes/theme_linedraw.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/plotnine/themes/theme_linedraw.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | null | null | null |
from .elements import element_line, element_rect, element_text, element_blank
from .theme import theme
from .theme_gray import theme_gray
class theme_linedraw(theme_gray):
"""
A theme with only black lines of various widths on white backgrounds
Parameters
----------
base_size : int, optional
Base font size. All text sizes are a scaled versions of
the base font size. Default is 11.
base_family : str, optional
Base font family.
"""
def __init__(self, base_size=11, base_family=None):
theme_gray.__init__(self, base_size, base_family)
self.add_theme(theme(
axis_text=element_text(color='black', size=base_size*0.8),
axis_ticks=element_line(color='black', size=0.5),
axis_ticks_minor=element_blank(),
legend_key=element_rect(color='black', size=0.5),
panel_background=element_rect(fill='white'),
panel_border=element_rect(fill='None', color='black', size=1),
panel_grid_major=element_line(color='black', size=0.1),
panel_grid_minor=element_line(color='black', size=0.02),
strip_background=element_rect(
fill='black', color='black', size=1),
strip_text_x=element_text(color='white'),
strip_text_y=element_text(color='white', angle=-90)
), inplace=True)
| 39.542857
| 77
| 0.651734
|
3524ba851a7d5c87d4c4f73fa78da2b9553042f0
| 5,202
|
py
|
Python
|
sportsreference/fb/fb_utils.py
|
atklaus/sportsreference
|
22a45ea83ce1608c3176f00d4f414d5b9463605c
|
[
"MIT"
] | null | null | null |
sportsreference/fb/fb_utils.py
|
atklaus/sportsreference
|
22a45ea83ce1608c3176f00d4f414d5b9463605c
|
[
"MIT"
] | null | null | null |
sportsreference/fb/fb_utils.py
|
atklaus/sportsreference
|
22a45ea83ce1608c3176f00d4f414d5b9463605c
|
[
"MIT"
] | null | null | null |
from difflib import get_close_matches
from .squad_ids import SQUAD_IDS
def _parse_squad_name(team_id):
"""
Parse and clean the team's name.
To try and match requested team names with the master squad ID list, passed
names should be parsed to remove the common 'FC' and 'CF' tags, as well as
force all strings to be lowercase and excess whitespace removed.
Parameters
----------
team_id : string
The requested team's name to be parsed.
Returns
-------
string
Returns a ``string`` of the parsed team's name.
"""
name = team_id.replace(' FC', '')
name = name.replace('FC ', '')
name = name.replace(' CF', '')
name = name.replace('CF ', '')
name = name.lower()
name = name.strip()
return name
def lookup_squad_id(name, quiet=False):
"""
Attempt to match a team name with a squad ID.
A simple utility to make it easier to find squad IDs given a team name.
By supplying a team name, this function will return the squad ID if a
match can be found, or return a dictionary of the top 5 closest teams if a
match cannot be made. For example, specifying 'Tottenham Hotspur' will
return Tottenham's squad ID of '361ca564'. However, specifying 'Tottenham'
doesn't technically match an official team name, and the closest matches
will be returned instead, with Tottenham Hotspur being the first result.
Due to the massive number of teams listed on fbref.com, the incorrect team
could be accidently pulled by what appears to be the proper name. For
example, 'Barcelona' is the name of one of the largest clubs in the world,
located in Barcelona, Spain. However, 'Barcelona' could also refer to
Barcelona Sporting Club (commonly referred to as just 'Barcelona' locally)
who competes in the Ecuadorian Serie A. By using the squad ID, the intended
team is guaranteed to be used.
This helper function does not rely on case for the words, so 'Tottenham
Hotspur' will return the same result as 'tottenham hotspur'. Also, common
tags such as 'FC' and 'CF' are removed, so there is no need to specify
those components.
In the case a match can't be made, a dictionary of suggestions will be
returned instead of the squad ID. The dictionary is intended to be used
to find the best alternatives for later use. The keys are the suggested
names and values are the squad IDs. This allows direct usage of a squad ID
in subsequent calls to various classes in the Football module in
sportsreference instead of attempting to lookup a name. As there can be
multiple return types, it is recommended to check the type of the returned
value before further calculations. If the return is of type ``string``, it
is the 8-digit squad ID. If it is of type ``dictionary``, it is a key-value
object containing suggestions.
Parameters
----------
name : string
A ``string`` of the name of a squad to lookup, such as 'Tottenham
Hotspur'.
quiet : boolean
A ``boolean`` value which suppresses text output while True.
Returns
-------
string or dictionary
Returns a ``string`` of the squad's 8-digit ID if a match could be
found for the requested team. If a match could not be found, a
``dictionary`` is returned with the key-value pairs for the top 5
closest teams as keys and their respective IDs as values.
"""
filtered_name = _parse_squad_name(name)
if filtered_name in SQUAD_IDS:
return SQUAD_IDS[filtered_name]
closest_matches = get_close_matches(filtered_name, SQUAD_IDS.keys(), 5)
squad_match_ids = {}
output = 'Exact match not found - Printing closest matches:\n'
print(closest_matches)
for team in closest_matches:
output += team.title() + ' - ' + SQUAD_IDS[team] + '\n'
squad_match_ids[team.title()] = SQUAD_IDS[team]
if not quiet:
print(output)
return squad_match_ids
def _lookup_team(team_id):
"""
Find the squad ID for the requested team.
Every team on fbref.com has its own unique squad ID, which is a 8-digit
code containing alphanumeric numbers. The user can either supply the
8-digit code as-is, or provide the team's full name. If the squad ID is
provided and matches a master list of IDs, the squad ID will be returned
as-is for later use in the class. If the name is passed, it will first be
parsed to try and match the team with a team in the master squad ID list.
If no squad is found, an error will be raised indicating the requested team
cannot be found.
Parameters
----------
team_id : string
A ``string`` of either the team's ID or the name of the team.
Returns
-------
string
Returns a ``string`` of the squad's 8-digit ID.
"""
if team_id.lower() in SQUAD_IDS.values():
return team_id.lower()
name = lookup_squad_id(team_id)
if type(name) == str:
return name
error_message = ('Team ID of "%s" not found. Did you mean one of the '
'following?\n%s' % (team_id, name))
raise ValueError(error_message)
| 40.015385
| 79
| 0.681276
|
933bd705f6fc7001439d3f5d097cad74587129e2
| 5,218
|
py
|
Python
|
tensorflow/python/training/adagrad.py
|
uve/tensorflow
|
e08079463bf43e5963acc41da1f57e95603f8080
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/training/adagrad.py
|
uve/tensorflow
|
e08079463bf43e5963acc41da1f57e95603f8080
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/training/adagrad.py
|
uve/tensorflow
|
e08079463bf43e5963acc41da1f57e95603f8080
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Adagrad for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import optimizer
from tensorflow.python.training import training_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=["train.AdagradOptimizer"])
class AdagradOptimizer(optimizer.Optimizer):
"""Optimizer that implements the Adagrad algorithm.
See this [paper](http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)
or this
[intro](https://ppasupat.github.io/a9online/uploads/proximal_notes.pdf).
"""
def __init__(self, learning_rate, initial_accumulator_value=0.1,
use_locking=False, name="Adagrad"):
"""Construct a new Adagrad optimizer.
Args:
learning_rate: A `Tensor` or a floating point value. The learning rate.
initial_accumulator_value: A floating point value.
Starting value for the accumulators, must be positive.
use_locking: If `True` use locks for update operations.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "Adagrad".
Raises:
ValueError: If the `initial_accumulator_value` is invalid.
@compatibility(eager)
When eager execution is enabled, `learning_rate` can be a callable that
takes no arguments and returns the actual value to use. This can be useful
for changing these values across different invocations of optimizer
functions.
@end_compatibility
"""
if initial_accumulator_value <= 0.0:
raise ValueError("initial_accumulator_value must be positive: %s" %
initial_accumulator_value)
super(AdagradOptimizer, self).__init__(use_locking, name)
self._learning_rate = learning_rate
self._initial_accumulator_value = initial_accumulator_value
# Created in Initialize.
self._learning_rate_tensor = None
def _create_slots(self, var_list):
for v in var_list:
dtype = v.dtype.base_dtype
if v.get_shape().is_fully_defined():
init = init_ops.constant_initializer(self._initial_accumulator_value,
dtype=dtype)
else:
init = self._init_constant_op(v, dtype)
self._get_or_make_slot_with_initializer(v, init, v.get_shape(), dtype,
"accumulator", self._name)
def _init_constant_op(self, v, dtype):
def init():
# Use a Tensor instead of initializer if variable does not have
# static shape.
init_constant = gen_array_ops.fill(array_ops.shape(v),
self._initial_accumulator_value)
return math_ops.cast(init_constant, dtype)
return init
def _prepare(self):
learning_rate = self._call_if_callable(self._learning_rate)
self._learning_rate_tensor = ops.convert_to_tensor(
learning_rate, name="learning_rate")
def _apply_dense(self, grad, var):
acc = self.get_slot(var, "accumulator")
return training_ops.apply_adagrad(
var,
acc,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad,
use_locking=self._use_locking)
def _resource_apply_dense(self, grad, var):
acc = self.get_slot(var, "accumulator")
return training_ops.resource_apply_adagrad(
var.handle,
acc.handle,
math_ops.cast(self._learning_rate_tensor, grad.dtype.base_dtype),
grad,
use_locking=self._use_locking)
def _apply_sparse(self, grad, var):
acc = self.get_slot(var, "accumulator")
return training_ops.sparse_apply_adagrad(
var,
acc,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad.values,
grad.indices,
use_locking=self._use_locking)
def _resource_apply_sparse(self, grad, var, indices):
acc = self.get_slot(var, "accumulator")
return training_ops.resource_sparse_apply_adagrad(
var.handle,
acc.handle,
math_ops.cast(self._learning_rate_tensor, grad.dtype),
grad,
indices,
use_locking=self._use_locking)
| 39.233083
| 81
| 0.680529
|
a71cea7a5428d5408679f83023bba2cb3e153756
| 8,730
|
py
|
Python
|
check_project.py
|
sw360/sw360python
|
bb0a249561016fb8637c3a8fdf0b1d00ecfbf345
|
[
"MIT"
] | 3
|
2021-06-16T22:36:03.000Z
|
2022-02-11T11:13:14.000Z
|
check_project.py
|
sw360/sw360python
|
bb0a249561016fb8637c3a8fdf0b1d00ecfbf345
|
[
"MIT"
] | 6
|
2021-06-23T15:10:10.000Z
|
2022-03-09T08:36:55.000Z
|
check_project.py
|
sw360/sw360python
|
bb0a249561016fb8637c3a8fdf0b1d00ecfbf345
|
[
"MIT"
] | 1
|
2022-03-07T06:57:52.000Z
|
2022-03-07T06:57:52.000Z
|
# -------------------------------------------------------------------------------
# (c) 2019-2020 Siemens AG
# All Rights Reserved.
# Author: thomas.graf@siemens.com
#
# Licensed under the MIT license.
# SPDX-License-Identifier: MIT
# -------------------------------------------------------------------------------
"""
Check a project on SW360
usage: check_project.py [-h] [-n NAME] [-v VERSION] [-id PROJECT_ID]
[-t SW360_TOKEN] [-url SW360_URL]
Check a project on SW360, display component clearing status
optional arguments:
-h, --help show this help message and exit
-n NAME, --name NAME name of the project
-v VERSION, --version VERSION
version of the project
-id PROJECT_ID SW360 id of the project, supersedes name and version
parameters
-t SW360_TOKEN, --token SW360_TOKEN
use this token for access to SW360
-url SW360_URL use this URL for access to SW360
Examples
Productive system:
python check_project.py -n tr-card -v 1.0
Staging system:
python check_project.py -n tr-card -v 1.0 -t <token> -url https://stage.sw360.siemens.com
"""
import argparse
import os
import sys
from colorama import init, Fore, Style
import requests
import sw360
# Do you use an oauth flow? This is usually False if you get your SW360 token
# in the SW360 preferences and true if you get it via a separate OAuth2 flow
OAUTH2 = False
# initialize colorama
init()
class CheckProject():
"""Check a project on SW360, display component clearing status"""
def __init__(self):
self.client = None
self.project_id = ""
self.project = None
self.sw360_url = "https://sw360.siemens.com"
@classmethod
def get_clearing_state(cls, proj, href):
"""Returns the clearing state of the given component/release"""
rel = proj["linkedReleases"]
for key in rel:
if key["release"] == href:
return key["mainlineState"]
return None
def has_source_code(self, href):
"""Returns true if a source code attachment is available"""
rel = self.client.get_release_by_url(href)
if "_embedded" not in rel:
return False
if "sw360:attachments" not in rel["_embedded"]:
return False
att = rel["_embedded"]["sw360:attachments"]
for key in att:
if key["attachmentType"] == "SOURCE":
return True
return False
def show_linked_projects(self, project):
"""Show linked projects of the given project"""
if "sw360:projects" in project["_embedded"]:
linked_projects = project["_embedded"]["sw360:projects"]
if linked_projects:
print("\n Linked projects: ")
for key in linked_projects:
print(" " + key["name"] + ", " + key["version"])
else:
print("\n No linked projects")
def show_linked_releases(self, project):
"""Show linked releases of the given project"""
if "sw360:releases" in project["_embedded"]:
print("\n Components: ")
releases = project["_embedded"]["sw360:releases"]
releases.sort(key=lambda s: s["name"].lower())
for key in releases:
href = key["_links"]["self"]["href"]
state = self.get_clearing_state(project, href)
prereq = ""
if state == "OPEN":
print(Fore.LIGHTYELLOW_EX, end="", flush=True)
if not self.has_source_code(href):
print(Fore.LIGHTRED_EX, end="", flush=True)
prereq = "; No source provided"
else:
prereq = ""
print(" " + key["name"] + "; " + key["version"] + "; "
+ state + prereq + Fore.RESET)
else:
print(" No linked releases")
def show_project_status(self, project_id):
"""Retrieve and display project status"""
try:
project = self.client.get_project(project_id)
except sw360.SW360Error as swex:
print(Fore.LIGHTRED_EX + " ERROR: unable to access project!")
sys.exit(" " + str(swex) + Style.RESET_ALL)
print(" Project name: " + project["name"] + ", " + project["version"])
if "projectResponsible" in project:
print(" Project responsible: " + project["projectResponsible"])
print(" Project owner: " + project["projectOwner"])
print(" Clearing state: " + project["clearingState"])
self.show_linked_projects(project)
self.show_linked_releases(project)
def login(self, token=None, url=None):
"""Login to SW360"""
if token:
sw360_api_token = token
else:
sw360_api_token = os.environ["SW360ProductionToken"]
if url:
self.sw360_url = url
if self.sw360_url[-1] != "/":
self.sw360_url += "/"
if not sw360_api_token:
sys.exit(Fore.LIGHTRED_EX + " No SW360 API token specified!" + Style.RESET_ALL)
self.client = sw360.SW360(self.sw360_url, sw360_api_token, oauth2=OAUTH2)
try:
result = self.client.login_api()
return result
except sw360.SW360Error as swex:
if swex.response and (swex.response.status_code == requests.codes['unauthorized']):
sys.exit(
Fore.LIGHTRED_EX +
" You are not authorized!" +
Style.RESET_ALL)
else:
sys.exit(
Fore.LIGHTRED_EX +
" Error authorizing user!" +
Style.RESET_ALL)
def find_project(self, name, version):
"""Find the project with the matching name and version on SW360"""
projects = self.client.get_projects_by_name(name)
if not projects:
sys.exit(Fore.YELLOW + " No matching project found!" + Style.RESET_ALL)
print(" Searching for projects")
for project in projects:
href = project["_links"]["self"]["href"]
if "version" not in project:
print(
" "
+ project["name"]
+ " => ID = "
+ self.client.get_id_from_href(href)
)
else:
pid = self.client.get_id_from_href(href)
print(
" "
+ project["name"]
+ ", "
+ project["version"]
+ " => ID = "
+ pid
)
if project["version"].lower() == version:
return pid
return None
@classmethod
def parse_commandline(cls):
"""Parse command line arguments"""
parser = argparse.ArgumentParser(
description="Check a project on SW360, display component clearing status"
)
parser.add_argument("-n", "--name", help="name of the project")
parser.add_argument("-v", "--version", help="version of the project")
parser.add_argument(
"-id",
dest="project_id",
help="SW360 id of the project, supersedes name and version parameters",
)
parser.add_argument(
"-t",
"--token",
dest="sw360_token",
help="use this token for access to SW360",
)
parser.add_argument(
"-url", dest="sw360_url", help="use this URL for access to SW360"
)
args = parser.parse_args()
if not args.project_id:
if not args.name:
sys.exit(Fore.LIGHTRED_EX + " No project name specified!" + Style.RESET_ALL)
if not args.version:
sys.exit(Fore.LIGHTRED_EX + " No project version specified!" + Style.RESET_ALL)
return args
def main(self):
"""Main method"""
print("\nCheck a project on SW360")
args = self.parse_commandline()
self.login(token=args.sw360_token, url=args.sw360_url)
if args.project_id:
self.project_id = args.project_id
else:
self.project_id = self.find_project(args.name, args.version)
if not self.project_id:
sys.exit(Fore.LIGHTRED_EX + " ERROR: no (unique) project found!" + Style.RESET_ALL)
print("")
self.show_project_status(self.project_id)
if __name__ == "__main__":
APP = CheckProject()
APP.main()
| 33.968872
| 96
| 0.541466
|
347cafdb78021568e092a461aa66ae3368629afb
| 348
|
py
|
Python
|
neobank/users/urls.py
|
ovallesgustavo/neobank
|
dcb9df023d10d8bfc0930976af1f245eb9b58c7b
|
[
"MIT"
] | null | null | null |
neobank/users/urls.py
|
ovallesgustavo/neobank
|
dcb9df023d10d8bfc0930976af1f245eb9b58c7b
|
[
"MIT"
] | null | null | null |
neobank/users/urls.py
|
ovallesgustavo/neobank
|
dcb9df023d10d8bfc0930976af1f245eb9b58c7b
|
[
"MIT"
] | null | null | null |
from django.urls import path
from neobank.users.views import user_detail_view, user_redirect_view, user_update_view
app_name = "users"
urlpatterns = [
path("~redirect/", view=user_redirect_view, name="redirect"),
path("~update/", view=user_update_view, name="update"),
path("<str:username>/", view=user_detail_view, name="detail"),
]
| 31.636364
| 86
| 0.735632
|
9207e015bb44ac3e3f5156082b8ce61c4bf2055b
| 945
|
py
|
Python
|
sudoku_solver/contrib/sites/migrations/0002_set_site_domain_and_name.py
|
d3prof3t/django_sudoku_solver
|
275c7ed358a93d55f52e29723c43df54c11b554e
|
[
"BSD-3-Clause"
] | null | null | null |
sudoku_solver/contrib/sites/migrations/0002_set_site_domain_and_name.py
|
d3prof3t/django_sudoku_solver
|
275c7ed358a93d55f52e29723c43df54c11b554e
|
[
"BSD-3-Clause"
] | null | null | null |
sudoku_solver/contrib/sites/migrations/0002_set_site_domain_and_name.py
|
d3prof3t/django_sudoku_solver
|
275c7ed358a93d55f52e29723c43df54c11b554e
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "example.com",
"name": "Sudoku Solver"
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "example.com",
"name": "example.com"
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
| 23.04878
| 72
| 0.614815
|
55dc1d6087096752f6f2b78432a1258e6b8c530a
| 1,939
|
py
|
Python
|
authors/apps/articles/tests/test_social_share.py
|
SilasKenneth/ah-technocrats
|
c199e6dd432bdb4a5e1152f90cb1716b09af2c4e
|
[
"BSD-3-Clause"
] | 1
|
2018-12-04T15:29:57.000Z
|
2018-12-04T15:29:57.000Z
|
authors/apps/articles/tests/test_social_share.py
|
SilasKenneth/ah-technocrats
|
c199e6dd432bdb4a5e1152f90cb1716b09af2c4e
|
[
"BSD-3-Clause"
] | 52
|
2018-11-27T08:00:25.000Z
|
2021-06-10T20:58:16.000Z
|
authors/apps/articles/tests/test_social_share.py
|
SilasKenneth/ah-technocrats
|
c199e6dd432bdb4a5e1152f90cb1716b09af2c4e
|
[
"BSD-3-Clause"
] | 4
|
2019-07-15T10:24:22.000Z
|
2020-02-04T19:15:12.000Z
|
from django.urls import reverse
from authors.apps.authentication.models import User
from .base_test import BaseTestCase
from rest_framework import status
class TestSocialSharingArticles(BaseTestCase):
def test_get_fb_share_link(self):
share_to_facebook, token = self.get_share_endpoint('facebook')
response = self.test_client.get(share_to_facebook, format='json', HTTP_AUTHORIZATION=token)
resp = str(response.data)
self.assertIn('facebook', resp)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_twitter_share_link(self):
share_to_twitter, token = self.get_share_endpoint('twitter')
response = self.test_client.get(share_to_twitter, format='json', HTTP_AUTHORIZATION=token)
resp = str(response.data)
self.assertIn('twitter', resp)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_gplus_share_link(self):
share_to_google, token = self.get_share_endpoint('gplus')
response = self.test_client.get(share_to_google, format='json', HTTP_AUTHORIZATION=token)
resp = str(response.data)
self.assertIn('google', resp)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_reddit_share_link(self):
share_to_reddit, token = self.get_share_endpoint('reddit')
response = self.test_client.get(share_to_reddit, format='json', HTTP_AUTHORIZATION=token)
resp = str(response.data)
self.assertIn('reddit', resp)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_email_share_link(self):
share_to_email, token = self.get_share_endpoint('email')
response = self.test_client.get(share_to_email, format='json', HTTP_AUTHORIZATION=token)
resp = str(response.data)
self.assertIn('mailto', resp)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| 47.292683
| 99
| 0.728726
|
e6f96913332720a17b5070cdcbfb54d8f8a4b659
| 4,647
|
py
|
Python
|
Config.py
|
shenlong95/SSL-NAG
|
c8ce6cbb2cca3ccd24649d78fe653b786eb524a8
|
[
"MIT"
] | 1
|
2021-08-09T10:32:22.000Z
|
2021-08-09T10:32:22.000Z
|
Config.py
|
shenlong95/SSL-NAG
|
c8ce6cbb2cca3ccd24649d78fe653b786eb524a8
|
[
"MIT"
] | null | null | null |
Config.py
|
shenlong95/SSL-NAG
|
c8ce6cbb2cca3ccd24649d78fe653b786eb524a8
|
[
"MIT"
] | 2
|
2020-08-25T00:57:48.000Z
|
2021-07-29T02:57:01.000Z
|
import re
import argparse
__all__ = ['parse_cmd_args', 'parse_dict_args']
def create_parser():
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
# Technical details
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--is-parallel', default=False, type=str2bool,
help='use data parallel', metavar='BOOL')
parser.add_argument('--checkpoint-epochs', default=1, type=int,
metavar='EPOCHS', help='checkpoint frequency in epochs, 0 to turn checkpointing off (default: 1)')
parser.add_argument('-g', '--gpu', default=0, type=int, metavar='N',
help='gpu number (default: 0)')
# Data
parser.add_argument('--dataset', metavar='DATASET', default='imagenet',
choices=['cifar10'])
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size (default: 256)')
parser.add_argument('--labeled-batch-size', default=128, type=int,
metavar='N', help='batch size for labeled data (default: 128)')
parser.add_argument('--print-freq', default=20, type=int,
metavar='N', help='display frequence (default: 20)')
parser.add_argument('--labels', type=str, default='', metavar='DIR')
parser.add_argument('--train-subdir', type=str, metavar='DIR')
parser.add_argument('--eval-subdir', type=str, metavar='DIR')
# Architecture
parser.add_argument('--arch', '-a', metavar='ARCH', default='lenet')
# Optimization
parser.add_argument('--epochs', default=90, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='max learning rate')
parser.add_argument('--loss', default="mse", type=str, metavar='TYPE',
choices=['soft'])
parser.add_argument('--optim', default="sgd", type=str, metavar='TYPE',
choices=['sgd', 'adam'])
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)')
parser.add_argument('--nesterov', default=False, type=str2bool,
help='use nesterov momentum', metavar='BOOL')
# LR schecular
parser.add_argument('--lr-scheduler', default="cos", type=str, metavar='TYPE',
choices=['cos', 'multistep', 'none'])
parser.add_argument('--min-lr', '--minimum-learning-rate', default=1e-7, type=float,
metavar='LR', help='minimum learning rate')
parser.add_argument('--steps', default="0,",
type=lambda x: [int(s) for s in x.split(',')],
metavar='N', help='milestones')
parser.add_argument('--gamma', default=0.1, type=float,
help='factor of learning rate decay')
# Pseudo-Label
parser.add_argument('--t1', default=100, type=float, metavar='M',
help='T1')
parser.add_argument('--t2', default=600, type=float, metavar='M',
help='T1')
parser.add_argument('--af', default=0.3, type=float, metavar='M',
help='af')
parser.add_argument('--n-labels', default=50000, type=int,
help='labeled num')
return parser
def parse_commandline_args():
return create_parser().parse_args()
def parse_dict_args(**kwargs):
def to_cmdline_kwarg(key, value):
if len(key) == 1:
key = "-{}".format(key)
else:
key = "--{}".format(re.sub(r"_", "-", key))
value = str(value)
return key, value
kwargs_pairs = (to_cmdline_kwarg(key, value)
for key, value in kwargs.items())
cmdline_args = list(sum(kwargs_pairs, ()))
print("Using these args: ", " ".join(cmdline_args))
return create_parser().parse_args(cmdline_args)
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
| 44.682692
| 123
| 0.560362
|
9c28050429b1d3c4949de38e57c3f2be413a46ab
| 264
|
py
|
Python
|
examples/spot/stream/isolated_margin/new_isolated_margin_listen_key.py
|
Banging12/binance-connector-python
|
dc6fbbd0bb64fb08d73ad8b31e0b81d776efa30b
|
[
"MIT"
] | 512
|
2021-06-15T08:52:44.000Z
|
2022-03-31T09:49:53.000Z
|
examples/spot/stream/isolated_margin/new_isolated_margin_listen_key.py
|
Banging12/binance-connector-python
|
dc6fbbd0bb64fb08d73ad8b31e0b81d776efa30b
|
[
"MIT"
] | 75
|
2021-06-20T13:49:50.000Z
|
2022-03-30T02:45:31.000Z
|
examples/spot/stream/isolated_margin/new_isolated_margin_listen_key.py
|
Banging12/binance-connector-python
|
dc6fbbd0bb64fb08d73ad8b31e0b81d776efa30b
|
[
"MIT"
] | 156
|
2021-06-18T11:56:36.000Z
|
2022-03-29T16:34:22.000Z
|
#!/usr/bin/env python
import logging
from binance.spot import Spot as Client
from binance.lib.utils import config_logging
config_logging(logging, logging.DEBUG)
key = ""
client = Client(key)
logging.info(client.new_isolated_margin_listen_key(symbol="BTCUSDT"))
| 22
| 69
| 0.799242
|
f5e8578ce0e567e41bfef6c64729d22705952827
| 1,721
|
py
|
Python
|
data_preparation/main.py
|
Iddoyadlin/hebrew-w2v
|
c887d3dd3bc427289f1c07a4e71c1672604db92b
|
[
"Apache-2.0"
] | 7
|
2022-03-21T17:46:55.000Z
|
2022-03-26T19:46:42.000Z
|
data_preparation/main.py
|
Iddoyadlin/hebrew-w2v
|
c887d3dd3bc427289f1c07a4e71c1672604db92b
|
[
"Apache-2.0"
] | 1
|
2022-03-22T13:44:59.000Z
|
2022-03-22T14:13:32.000Z
|
data_preparation/main.py
|
Iddoyadlin/hebrew-w2v
|
c887d3dd3bc427289f1c07a4e71c1672604db92b
|
[
"Apache-2.0"
] | null | null | null |
from base import get_absolute_path
from create_corpus import download_corpus, corpus_to_text
import split_corpus
from postprocess_hebpipe import process_files
from tokenize_hebpipe import run_hebpipe_command
config = {
"WIKIFILE": "hewiki-latest-pages-articles.xml.bz2",
"CORPUS_OUTPUT": get_absolute_path("wiki.he.txt"),
"SPLIT_CORPUS_EVERY": 100,
"SPLIT_CORPUS_OUTPUT_FOLDER": get_absolute_path("output"),
"TOKENIZED_CORPUS_FOLDER": get_absolute_path("tokenized_output"),
"TOKENIZED_CORPUS": get_absolute_path("wiki_tokenized.he.txt")
}
if __name__=="__main__":
### download and save to file ###
WIKIFILE = config['WIKIFILE']
zipped_corpus_path = get_absolute_path(WIKIFILE)
raw_corpus_path = config['CORPUS_OUTPUT']
download_corpus(url=f"https://dumps.wikimedia.org/hewiki/latest/{WIKIFILE}", dump_path=zipped_corpus_path)
corpus_to_text(raw_corpus_path, zipped_corpus_path)
##################################
### split corpus, so it will be easier to process with hebpipe ###
SPLIT_EVERY = config['SPLIT_CORPUS_EVERY']
split_corpus_folder = config["SPLIT_CORPUS_OUTPUT_FOLDER"]
split_corpus.split(raw_corpus_path, split_corpus_folder, SPLIT_EVERY)
###################################################################
### get hebpipe tokenization command ###
tokenized_corpus_folder = config["TOKENIZED_CORPUS_FOLDER"]
run_hebpipe_command(split_corpus_folder, tokenized_corpus_folder)
#############################
### post process tokenized hebpipe output ###
tokenized_corpus = config["TOKENIZED_CORPUS"]
process_files(tokenized_corpus_folder, tokenized_corpus)
#############################################
| 43.025
| 110
| 0.687391
|
f61a8b8155bf7d2e89d88b68401c70d9bef70b3a
| 1,340
|
py
|
Python
|
terraform/terraform.py
|
Ersandeep977/Arth-2.0-Python-Menu-project
|
1f317e7d70bdabe04442f0ea2b7e1b318cb4ac59
|
[
"Apache-2.0"
] | null | null | null |
terraform/terraform.py
|
Ersandeep977/Arth-2.0-Python-Menu-project
|
1f317e7d70bdabe04442f0ea2b7e1b318cb4ac59
|
[
"Apache-2.0"
] | null | null | null |
terraform/terraform.py
|
Ersandeep977/Arth-2.0-Python-Menu-project
|
1f317e7d70bdabe04442f0ea2b7e1b318cb4ac59
|
[
"Apache-2.0"
] | null | null | null |
import os
import time
print("...."*80)
print("hello....WELCOME TO 'terraform WORLD'...by SANDEEP KUMAR Patel..")
print("...."*80)
while True:
print('''
Enter "yes" for run terraform command
or
Enter "No" for exit terraform
''')
ch = input('Enter your choice "yes/no" :- ')
if ch == 'yes':
print(" ..terrafrom init launch.... ")
print("#"*150)
time.sleep(1)
os.system('terraform init')
print("#"*150)
print("...."*80)
print("NEXT ..Show the validate.... ")
print("...."*80)
print("#"*150)
time.sleep(5)
os.system('terraform validate')
print("#"*150)
print("...."*80)
print("NEXT ..Show the plan.... ")
print("...."*80)
print("#"*150)
time.sleep(5)
os.system('terraform plan')
print("NEXT ..Show the apply.... ")
print("...."*80)
print("#"*150)
time.sleep(5)
os.system('terraform apply -auto-approve')
print("NEXT ..Show the destroy.... ")
print("...."*80)
print("#"*150)
time.sleep(5)
os.system('terraform destroy -auto-approve')
elif ch =='no':
print('Thank you for using this application \n wait 5 second and press "Enter" buton')
time.sleep(3)
break
| 28.510638
| 94
| 0.500746
|
64f608df3bbc60bf25e2597d7c43e045c1b9da0c
| 9,293
|
py
|
Python
|
PyFlow/__init__.py
|
QuentinTournier40/AnimationFreeCAD
|
8eaff8356ec68b948a721b83a6888b652278db8a
|
[
"Apache-2.0"
] | null | null | null |
PyFlow/__init__.py
|
QuentinTournier40/AnimationFreeCAD
|
8eaff8356ec68b948a721b83a6888b652278db8a
|
[
"Apache-2.0"
] | null | null | null |
PyFlow/__init__.py
|
QuentinTournier40/AnimationFreeCAD
|
8eaff8356ec68b948a721b83a6888b652278db8a
|
[
"Apache-2.0"
] | 1
|
2022-02-03T08:03:30.000Z
|
2022-02-03T08:03:30.000Z
|
## Copyright 2015-2019 Ilgar Lunin, Pedro Cabrera
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""Common utils working with packages.
"""
# this line adds extension-packages not installed inside the PyFlow directory
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
import importlib
import pkgutil
import collections
from copy import copy
import os
import json
from PyFlow.Packages import *
__all__ = [
"INITIALIZE",
"GET_PACKAGES",
"GET_PACKAGE_CHECKED",
"CreateRawPin",
"getPinDefaultValueByType",
"findPinClassByType",
"getRawNodeInstance",
"getAllPinClasses",
"getHashableDataTypes",
]
__PACKAGES = {}
__PACKAGE_PATHS = {}
__HASHABLE_TYPES = []
def GET_PACKAGES():
return __PACKAGES
def GET_PACKAGE_PATH(packageName):
if packageName in __PACKAGE_PATHS:
return __PACKAGE_PATHS[packageName]
def GET_PACKAGE_CHECKED(package_name):
assert package_name in __PACKAGES
return __PACKAGES[package_name]
def getAllPinClasses():
result = []
for package in list(__PACKAGES.values()):
result += list(package.GetPinClasses().values())
return result
def findPinClassByType(dataType):
for package_name, package in GET_PACKAGES().items():
pins = package.GetPinClasses()
if dataType in pins:
return pins[dataType]
return None
def getPinDefaultValueByType(dataType):
pin = findPinClassByType(dataType)
if pin:
return pin.pinDataTypeHint()[1]
return None
def getHashableDataTypes():
if len(__HASHABLE_TYPES) == 0:
for pin in getAllPinClasses():
t = pin.internalDataStructure()
if t is not type(None) and t is not None:
if isinstance(pin.internalDataStructure()(), collections.abc.Hashable):
__HASHABLE_TYPES.append(pin.__name__)
return copy(__HASHABLE_TYPES)
def getPinFromData(data):
for pin in [pin for pin in getAllPinClasses() if pin.IsValuePin()]:
pType = pin.internalDataStructure()
if data == pType:
return pin
def CreateRawPin(name, owningNode, dataType, direction, **kwds):
pinClass = findPinClassByType(dataType)
if pinClass is None:
return None
inst = pinClass(name, owningNode, direction, **kwds)
return inst
def getRawNodeInstance(nodeClassName, packageName=None, libName=None, **kwargs):
from PyFlow.Core.NodeBase import NodeBase
package = GET_PACKAGE_CHECKED(packageName)
# try find function first
if libName is not None:
for key, lib in package.GetFunctionLibraries().items():
foos = lib.getFunctions()
if libName == key and nodeClassName in foos:
return NodeBase.initializeFromFunction(foos[nodeClassName])
# try find node class
nodes = package.GetNodeClasses()
if nodeClassName in nodes:
return nodes[nodeClassName](nodeClassName, **kwargs)
# try find exported py nodes
packagePath = GET_PACKAGE_PATH(packageName)
pyNodesPath = os.path.join(packagePath, "PyNodes")
if os.path.exists(pyNodesPath):
for path, dirs, files in os.walk(pyNodesPath):
for pyNodeFileName in files:
pyNodeName, _ = os.path.splitext(pyNodeFileName)
if nodeClassName == pyNodeName:
pythonNode = getRawNodeInstance("pythonNode", "PyFlowBase")
pyNodeFullPath = os.path.join(path, pyNodeFileName)
with open(pyNodeFullPath, "r") as f:
pythonNode._nodeData = f.read()
return pythonNode
# try find exported compound nodes
compoundNodesPath = os.path.join(packagePath, "Compounds")
if os.path.exists(compoundNodesPath):
for path, dirs, files in os.walk(compoundNodesPath):
for compoundNodeFileName in files:
compoundNodeName, _ = os.path.splitext(compoundNodeFileName)
compoundNodeFullPath = os.path.join(path, compoundNodeFileName)
with open(compoundNodeFullPath, 'r') as f:
compoundData = json.load(f)
if compoundData["name"] == nodeClassName:
compoundNode = getRawNodeInstance("compound", "PyFlowBase")
compoundNodeFullPath = os.path.join(path, compoundNodeFileName)
with open(compoundNodeFullPath, "r") as f:
jsonString = f.read()
compoundNode._rawGraphJson = json.loads(jsonString)
return compoundNode
def INITIALIZE(additionalPackageLocations=[], software=""):
from PyFlow.UI.Tool import REGISTER_TOOL
from PyFlow.UI.Widgets.InputWidgets import REGISTER_UI_INPUT_WIDGET_PIN_FACTORY
from PyFlow.UI.Canvas.UINodeBase import REGISTER_UI_NODE_FACTORY
from PyFlow.UI.Canvas.UIPinBase import REGISTER_UI_PIN_FACTORY
from PyFlow import ConfigManager
from PySide2.QtWidgets import QMessageBox
packagePaths = Packages.__path__
def ensurePackagePath(inPath):
for subFolder in os.listdir(inPath):
subFolderPath = os.path.join(inPath, subFolder)
if os.path.isdir(subFolderPath):
if "PyFlow" in os.listdir(subFolderPath):
subFolderPath = os.path.join(subFolderPath, "PyFlow", "Packages")
if os.path.exists(subFolderPath):
return subFolderPath
return inPath
def recursePackagePaths(inPath):
paths = []
for subFolder in os.listdir(inPath):
subFolderPath = os.path.join(inPath, subFolder)
if os.path.isdir(subFolderPath):
if "PyFlow" in os.listdir(subFolderPath):
subFolderPath = os.path.join(subFolderPath, "PyFlow", "Packages")
if os.path.exists(subFolderPath):
paths.append(subFolderPath)
return paths
# check for additional package locations
if "PYFLOW_PACKAGES_PATHS" in os.environ:
delim = ';'
pathsString = os.environ["PYFLOW_PACKAGES_PATHS"]
# remove delimiters from right
pathsString = pathsString.rstrip(delim)
for packagesRoot in pathsString.split(delim):
if os.path.exists(packagesRoot):
paths = recursePackagePaths(packagesRoot)
packagePaths.extend(paths)
for packagePathId in range(len(additionalPackageLocations)):
packagePath = additionalPackageLocations[packagePathId]
packagePath = ensurePackagePath(packagePath)
additionalPackageLocations[packagePathId] = packagePath
packagePaths.extend(additionalPackageLocations)
for importer, modname, ispkg in pkgutil.iter_modules(packagePaths):
try:
if ispkg:
mod = importer.find_module(modname).load_module(modname)
package = getattr(mod, modname)()
__PACKAGES[modname] = package
__PACKAGE_PATHS[modname] = os.path.normpath(mod.__path__[0])
except Exception as e:
QMessageBox.critical(None, str("Fatal error"), "Error On Module %s :\n%s" % (modname, str(e)))
continue
registeredInternalPinDataTypes = set()
for name, package in __PACKAGES.items():
packageName = package.__class__.__name__
for node in package.GetNodeClasses().values():
node._packageName = packageName
for pin in package.GetPinClasses().values():
pin._packageName = packageName
if pin.IsValuePin():
internalType = pin.internalDataStructure()
if internalType in registeredInternalPinDataTypes:
raise Exception("Pin with {0} internal data type already been registered".format(internalType))
registeredInternalPinDataTypes.add(internalType)
uiPinsFactory = package.UIPinsFactory()
if uiPinsFactory is not None:
REGISTER_UI_PIN_FACTORY(packageName, uiPinsFactory)
uiPinInputWidgetsFactory = package.PinsInputWidgetFactory()
if uiPinInputWidgetsFactory is not None:
REGISTER_UI_INPUT_WIDGET_PIN_FACTORY(packageName, uiPinInputWidgetsFactory)
uiNodesFactory = package.UINodesFactory()
if uiNodesFactory is not None:
REGISTER_UI_NODE_FACTORY(packageName, uiNodesFactory)
for toolClass in package.GetToolClasses().values():
supportedSoftwares = toolClass.supportedSoftwares()
if "any" not in supportedSoftwares:
if software not in supportedSoftwares:
continue
REGISTER_TOOL(packageName, toolClass)
getHashableDataTypes()
| 36.876984
| 115
| 0.660605
|
0d09137eaa0f2bd55bfda14761e85ae5146d4642
| 3,493
|
py
|
Python
|
deepchem/molnet/load_function/qm9_datasets.py
|
rbharath/deepchem
|
8c3bcdd557abdbb380b64c2d460a73aff76fb303
|
[
"MIT"
] | 3
|
2017-10-07T13:48:48.000Z
|
2021-06-13T09:09:03.000Z
|
deepchem/molnet/load_function/qm9_datasets.py
|
rbharath/deepchem
|
8c3bcdd557abdbb380b64c2d460a73aff76fb303
|
[
"MIT"
] | 1
|
2017-08-29T22:08:24.000Z
|
2017-08-29T22:08:24.000Z
|
deepchem/molnet/load_function/qm9_datasets.py
|
rbharath/deepchem
|
8c3bcdd557abdbb380b64c2d460a73aff76fb303
|
[
"MIT"
] | 5
|
2017-03-19T01:48:13.000Z
|
2019-02-22T01:12:03.000Z
|
"""
qm9 dataset loader.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import deepchem
def load_qm9(featurizer='CoulombMatrix', split='random', reload=True):
"""Load qm9 datasets."""
# Featurize qm9 dataset
print("About to featurize qm9 dataset.")
if "DEEPCHEM_DATA_DIR" in os.environ:
data_dir = os.environ["DEEPCHEM_DATA_DIR"]
else:
data_dir = "/tmp"
if reload:
save_dir = os.path.join(data_dir, "qm9/" + featurizer + "/" + split)
if featurizer in ['CoulombMatrix', 'BPSymmetryFunction']:
dataset_file = os.path.join(data_dir, "gdb9.sdf")
if not os.path.exists(dataset_file):
os.system(
'wget -P ' + data_dir +
' http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/gdb9.tar.gz '
)
os.system('tar -zxvf ' + os.path.join(data_dir, 'gdb9.tar.gz') + ' -C ' +
data_dir)
else:
dataset_file = os.path.join(data_dir, "qm9.csv")
if not os.path.exists(dataset_file):
os.system(
'wget -P ' + data_dir +
' http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm9.csv '
)
qm9_tasks = [
"A", "B", "C", "mu", "alpha", "homo", "lumo", "gap", "r2", "zpve", "cv",
"u0_atom", "u298_atom", "h298_atom", "g298_atom"
]
if reload:
loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk(
save_dir)
if loaded:
return qm9_tasks, all_dataset, transformers
if featurizer == 'CoulombMatrix':
featurizer = deepchem.feat.CoulombMatrix(29)
loader = deepchem.data.SDFLoader(
tasks=qm9_tasks,
smiles_field="smiles",
mol_field="mol",
featurizer=featurizer)
elif featurizer == 'BPSymmetryFunction':
featurizer = deepchem.feat.BPSymmetryFunction(29)
loader = deepchem.data.SDFLoader(
tasks=qm9_tasks,
smiles_field="smiles",
mol_field="mol",
featurizer=featurizer)
else:
if featurizer == 'ECFP':
featurizer = deepchem.feat.CircularFingerprint(size=1024)
elif featurizer == 'GraphConv':
featurizer = deepchem.feat.ConvMolFeaturizer()
elif featurizer == 'Weave':
featurizer = deepchem.feat.WeaveFeaturizer()
elif featurizer == 'Raw':
featurizer = deepchem.feat.RawFeaturizer()
loader = deepchem.data.CSVLoader(
tasks=qm9_tasks, smiles_field="smiles", featurizer=featurizer)
dataset = loader.featurize(dataset_file)
splitters = {
'index': deepchem.splits.IndexSplitter(),
'random': deepchem.splits.RandomSplitter(),
'stratified': deepchem.splits.SingletaskStratifiedSplitter(
task_number=11)
}
splitter = splitters[split]
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(
dataset)
transformers = [
deepchem.trans.NormalizationTransformer(
transform_y=True, dataset=train_dataset)
]
for transformer in transformers:
train_dataset = transformer.transform(train_dataset)
valid_dataset = transformer.transform(valid_dataset)
test_dataset = transformer.transform(test_dataset)
if reload:
deepchem.utils.save.save_dataset_to_disk(
save_dir, train_dataset, valid_dataset, test_dataset, transformers)
return qm9_tasks, (train_dataset, valid_dataset, test_dataset), transformers
| 34.584158
| 89
| 0.65846
|
478a8d8800d49d7dcfd43b464684c7df83569538
| 565
|
py
|
Python
|
tests/test_pymd_helper.py
|
tbnorth/pymd_helper
|
088583bc08e71056387fdcba3c2cbeb37b7b336e
|
[
"MIT"
] | null | null | null |
tests/test_pymd_helper.py
|
tbnorth/pymd_helper
|
088583bc08e71056387fdcba3c2cbeb37b7b336e
|
[
"MIT"
] | null | null | null |
tests/test_pymd_helper.py
|
tbnorth/pymd_helper
|
088583bc08e71056387fdcba3c2cbeb37b7b336e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Tests for `pymd_helper` package."""
import pytest
from pymd_helper import pymd_helper
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
| 22.6
| 78
| 0.713274
|
881c0d0c5b0205a6f68d642f89fce61fe1c000d3
| 207
|
py
|
Python
|
mposp_root/wordcount/urls.py
|
hanshendrickx/gitDP002
|
9703791a2270495830142917a7b12ba1eb2ca3f5
|
[
"CC0-1.0"
] | null | null | null |
mposp_root/wordcount/urls.py
|
hanshendrickx/gitDP002
|
9703791a2270495830142917a7b12ba1eb2ca3f5
|
[
"CC0-1.0"
] | null | null | null |
mposp_root/wordcount/urls.py
|
hanshendrickx/gitDP002
|
9703791a2270495830142917a7b12ba1eb2ca3f5
|
[
"CC0-1.0"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.homepage, name='home'),
path('count/', views.count, name='count'),
path('about/', views.about, name='about'),
]
| 18.818182
| 46
| 0.63285
|
8f9f6448f0f3e948b6ca1d1b6676bb77fe9d5ae7
| 283
|
py
|
Python
|
simple_3dviz/io/utils.py
|
nazcaspider/simple-3dviz
|
3c40007259a1f754311623f74d24b06f7b98be14
|
[
"MIT"
] | 66
|
2020-03-31T14:33:20.000Z
|
2022-03-25T15:48:06.000Z
|
simple_3dviz/io/utils.py
|
nazcaspider/simple-3dviz
|
3c40007259a1f754311623f74d24b06f7b98be14
|
[
"MIT"
] | 6
|
2020-04-05T18:20:10.000Z
|
2021-12-17T19:54:54.000Z
|
simple_3dviz/io/utils.py
|
nazcaspider/simple-3dviz
|
3c40007259a1f754311623f74d24b06f7b98be14
|
[
"MIT"
] | 9
|
2020-04-03T14:31:28.000Z
|
2022-03-15T19:16:41.000Z
|
def _get_file(filename, mode="r"):
if isinstance(filename, str):
return open(filename, mode)
return filename
def _close_file(filename, f):
"""Close the file if filename is a string."""
if hasattr(f, "close") and isinstance(filename, str):
f.close()
| 25.727273
| 57
| 0.646643
|
7c0ce5726ecb942e126237485c03180b8e3f7b26
| 585
|
py
|
Python
|
src/cltk/phonology/enm/phonology.py
|
yelircaasi/cltk
|
1583aa24682543a1f33434a21918f039ca27d60c
|
[
"MIT"
] | 757
|
2015-11-20T00:58:52.000Z
|
2022-03-31T06:34:24.000Z
|
src/cltk/phonology/enm/phonology.py
|
yelircaasi/cltk
|
1583aa24682543a1f33434a21918f039ca27d60c
|
[
"MIT"
] | 950
|
2015-11-17T05:38:29.000Z
|
2022-03-14T16:09:34.000Z
|
src/cltk/phonology/enm/phonology.py
|
yelircaasi/cltk
|
1583aa24682543a1f33434a21918f039ca27d60c
|
[
"MIT"
] | 482
|
2015-11-22T18:13:02.000Z
|
2022-03-20T21:22:02.000Z
|
"""Middle English phonology tools
"""
from typing import List
from cltk.phonology.syllabify import Syllabifier
__author__ = ["Clément Besnier <clem@clementbesnier.fr>"]
class MiddleEnglishSyllabifier:
"""
Middle English syllabifier
"""
def __init__(self):
self.syllabifier = Syllabifier(language="enm")
def syllabify(self, word: str) -> List[str]:
return self.syllabifier.syllabify(word)
def __repr__(self):
return f"<MiddleEnglishSyllabifier>"
def __call__(self, word: str) -> List[str]:
return self.syllabify(word)
| 22.5
| 57
| 0.68547
|
3b2f848b62343d06f2496cf37e4adcdd65b261e5
| 3,834
|
py
|
Python
|
PyForge/FolderTree.py
|
sgthakare20/Pyforge
|
e3ce15586ccc07f39e0faf18885b472baa60ff5d
|
[
"MIT"
] | 1
|
2020-04-13T13:02:43.000Z
|
2020-04-13T13:02:43.000Z
|
PyForge/FolderTree.py
|
sgthakare20/Pyforge
|
e3ce15586ccc07f39e0faf18885b472baa60ff5d
|
[
"MIT"
] | null | null | null |
PyForge/FolderTree.py
|
sgthakare20/Pyforge
|
e3ce15586ccc07f39e0faf18885b472baa60ff5d
|
[
"MIT"
] | 2
|
2021-06-22T14:39:53.000Z
|
2021-06-22T15:28:21.000Z
|
# -*- coding: utf-8 -*-
"""Module containing classes to implementing and navigating data structures from Autodesk Forge BIM360 platform."""
from PyForge.ForgeFolders import FoldersApi
class FolderTree():
"""This class sets up a simple folder tree with children and parent folders linked to the BIM360 API."""
def __init__(self, folder, parent=None, children=None):
"""
Initialize a FolderTree instance with a given BIM360 Api folder object and potentially attaches the parent and the folders children.
Args:
folder (dict(JsonApiObject)): The BIM360 Api folder object this instance is linked to in the form of a dict.
parent (FolderTree, optional): The FolderTree object that is this instance's parent. Defaults to None.
children (list(FolderTree), optional): List of FolderTree objects that are this instance's children. Defaults to None.
Raises:
ValueError: Raised if the given folder argument is of NoneType.
Returns:
None.
"""
if folder is not None:
self.folder = folder
else:
raise ValueError("FolderTree needs a folder object to be initialized")
self.parent = parent
if children is None:
self.children = []
else:
self.children = children
def get_children(self, token, project_id):
"""
Get the children folder JsonApi objects of this FolderTree instance in the form of a list of dicts.
Args:
token (str): Authentication token for Autodesk Forge API.
project_id (str): The project id for the project the folder is in.
Raises:
ValueError: Is raised if token or project_id are NoneType.
Returns:
children_folders (list(dict(JsonApiObject))): Children folders of this FolderTree instance in the form of a list of dicts
containing JsonApiObjects.
"""
if token is None:
raise ValueError("Please give a authorization token.")
if project_id is None:
raise ValueError("Please enter a project id.")
type_filter = 'folders'
folders_api = FoldersApi(token)
folder_data, folder_versions = folders_api.get_folder_contents(project_id, self.folder['id'], type_filter)
children_folders = []
for data in folder_data:
children_folders.append(data)
return children_folders
def populate(self, token, project_id):
"""
Populate this FolderTree instance recursively down all of its' children in the Autodesk BIM360 folder structure.
Args:
token (str): Authentication token for Autodesk Forge API.
project_id (str): The project id for the project the folder is in.
Returns:
None.
"""
children_list = self.get_children(token, project_id)
for child in children_list:
new_child = FolderTree(child, self)
self.children.append(new_child)
new_child.populate(token, project_id)
def search_tree(self, folder_name):
"""
Search the for the FolderTree instance with the given name in this FolderTree's children, recursively.
Args:
folder_name (str): The name of the Autodesk BIM360 folder to be searched for.
Returns:
FolderTree: FolderTree instance with the given name.
"""
folder_tree = None
for child in self.children:
if child.folder['attributes']['name'] == folder_name:
return child
else:
folder_tree = child.search_tree(folder_name)
if folder_tree is not None:
return folder_tree
return folder_tree
| 35.174312
| 140
| 0.632499
|
2f9eb89b67bbeedc992b6d393f494d616767af45
| 1,790
|
py
|
Python
|
novaclient/v1_1/fping.py
|
CiscoSystems/python-novaclient
|
df8db07495984c6158e4c1e1d218c10594439c5e
|
[
"Apache-1.1"
] | null | null | null |
novaclient/v1_1/fping.py
|
CiscoSystems/python-novaclient
|
df8db07495984c6158e4c1e1d218c10594439c5e
|
[
"Apache-1.1"
] | null | null | null |
novaclient/v1_1/fping.py
|
CiscoSystems/python-novaclient
|
df8db07495984c6158e4c1e1d218c10594439c5e
|
[
"Apache-1.1"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fping interface.
"""
from novaclient import base
class Fping(base.Resource):
"""
A server to fping.
"""
HUMAN_ID = True
def __repr__(self):
return "<Fping: %s>" % self.id
class FpingManager(base.ManagerWithFind):
"""
Manage :class:`Fping` resources.
"""
resource_class = Fping
def list(self, all_tenants=False, include=[], exclude=[]):
"""
Fping all servers.
:rtype: list of :class:`Fping`.
"""
params = []
if all_tenants:
params.append("all_tenants=1")
if include:
params.append("include=%s" % ",".join(include))
elif exclude:
params.append("exclude=%s" % ",".join(exclude))
uri = "/os-fping"
if params:
uri = "%s?%s" % (uri, "&".join(params))
return self._list(uri, "servers")
def get(self, server):
"""
Fping a specific server.
:param network: ID of the server to fping.
:rtype: :class:`Fping`
"""
return self._get("/os-fping/%s" % base.getid(server), "server")
| 26.716418
| 78
| 0.603352
|
74fb25fb5d5b051dd38c0964cb494ea2efaa1e5c
| 2,046
|
py
|
Python
|
geekcomputers/ping_subnet.py
|
enyaooshigaolo/MyPython
|
67dc3f6ff596545ab70e11a573a6031232128711
|
[
"Apache-2.0"
] | 12
|
2018-05-11T22:35:32.000Z
|
2021-04-07T13:44:51.000Z
|
ping_subnet.py
|
rajasree-r/Python
|
3d4ee06249cf5697c72bae4e2732e814dde97dec
|
[
"MIT"
] | 1
|
2018-11-15T01:54:25.000Z
|
2018-11-15T01:54:25.000Z
|
ping_subnet.py
|
rajasree-r/Python
|
3d4ee06249cf5697c72bae4e2732e814dde97dec
|
[
"MIT"
] | 3
|
2020-04-19T19:39:34.000Z
|
2022-03-18T19:21:16.000Z
|
# Script Name : ping_subnet.py
# Author : Craig Richards
# Created : 12th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : After supplying the first 3 octets it will scan the final range for available addresses
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
filename = sys.argv[0] # Sets a variable for the script name
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the first octets of the address Usage : ''' + filename + ''' 111.111.111 '''
sys.exit(0)
else:
if (len(sys.argv) < 2): # If no arguments are passed then display the help and instructions on how to run the script
sys.exit (' You need to supply the first octets of the address Usage : ' + filename + ' 111.111.111')
subnet = sys.argv[1] # Set the variable subnet as the three octets you pass it
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
f = open('ping_' + subnet + '.log', 'w') # Open a logfile
for ip in range(2,255): # Set the ip variable for the range of numbers
ret = subprocess.call(myping + str(subnet) + "." + str(ip) ,
shell=True, stdout=f, stderr=subprocess.STDOUT) # Run the command pinging the servers
if ret == 0: # Depending on the response
f.write (subnet + "." + str(ip) + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (subnet + "." + str(ip) + " did not respond" + "\n") # Write out you can't reach the box
| 49.902439
| 140
| 0.57087
|
66dd4d47e5284c2ca3afd1a7fc97ab195c4ca668
| 2,182
|
py
|
Python
|
pydesktime/desktime.py
|
utek/pydesktime
|
69563b66034920fb1f7f6c16f2ddaee82c92f5d0
|
[
"MIT"
] | null | null | null |
pydesktime/desktime.py
|
utek/pydesktime
|
69563b66034920fb1f7f6c16f2ddaee82c92f5d0
|
[
"MIT"
] | null | null | null |
pydesktime/desktime.py
|
utek/pydesktime
|
69563b66034920fb1f7f6c16f2ddaee82c92f5d0
|
[
"MIT"
] | null | null | null |
import requests
import datetime
import calendar
class DeskTime(object):
MAIN_URL = 'https://desktime.com/api/2/json/?{params}'
def __init__(self, app_key, username, password):
self.api_key = self._login(app_key, username, password)
if self.api_key is None:
raise Exception("Authorization error")
pass
def _login(self, app_key, username, password):
auth = 'appkey={appkey}&action={action}&email={email}&password={password}'
auth = auth.format(appkey=app_key, action='authorize',
email=username, password=password)
auth_url = self.MAIN_URL.format(params=auth)
res = requests.get(auth_url)
data = res.json()
if not data.get(u'error', None):
return data.get('api_key', None)
return None
def getAllDataForDate(self, date=datetime.datetime.now().date()):
employees = 'apikey={apikey}&action=employees&date={date}'
employees = employees.format(apikey=self.api_key, action='employees',
date=date.isoformat())
url = self.MAIN_URL.format(params=employees)
res = requests.get(url)
data = res.json()
if not data.get('error', None):
return data
return None
def getMonth(self, year, month, with_weekends=False):
monthrange = calendar.monthrange(year, month)
today = datetime.datetime.now().date()
data = []
resdata = {}
for dayindex in range(monthrange[1]):
day = dayindex + 1
date = datetime.date(year, month, day)
if date > today and date.year == today.year and today.month == date.month:
continue
elif date > today:
return None
if not with_weekends and date.weekday() in (5, 6):
continue
data.append(self.getAllDataForDate(date))
for elem in data:
resdata[elem.get('date')] = elem.get('employees')
return data
def getEmployee(self, employee_id):
raise(NotImplementedError)
| 37.62069
| 87
| 0.57516
|
3c22c66a79ea06a0ff9d8b5a4e2777efa04935d4
| 2,894
|
py
|
Python
|
pyxplr/explore_missing.py
|
UBC-MDS/pyxplr
|
7e0d94316bce7b3ed343a6a6b5c3afce5c71afd8
|
[
"MIT"
] | 1
|
2020-02-25T23:47:18.000Z
|
2020-02-25T23:47:18.000Z
|
pyxplr/explore_missing.py
|
UBC-MDS/pyxplr
|
7e0d94316bce7b3ed343a6a6b5c3afce5c71afd8
|
[
"MIT"
] | 46
|
2020-02-25T23:43:52.000Z
|
2020-03-27T01:24:24.000Z
|
pyxplr/explore_missing.py
|
UBC-MDS/pyxplr
|
7e0d94316bce7b3ed343a6a6b5c3afce5c71afd8
|
[
"MIT"
] | null | null | null |
"""
Created on February 28, 2020
@author: Braden Tam
Implementation of the explore_missing function in the pyxplr package.
"""
import pandas as pd
import numpy as np
def explore_missing(df, num_rows=0, df_type="location"):
"""
explore_missing will identify missing observations within df. It will
return 1 of 2 tables: (location) 1 table of the exact location in the
dataframe where there is missing data or (count) another table showing
how many observationsare missing and the proportion of how much data is
missing for each feature.
Arguments
---------
df : pandas.DataFrame
The target dataframe to explore
num_rows : integer
The number of rows above and below the missing value to output
df_type: str
The desired type of output (location or count)
Returns
-------
type : pandas.DataFrame
The resultant dataframe
Raises
------
ValueError
num_rows must be a positive integer
num_rows must be of type int
There are no missing values in the dataframe
TypeError
Data must be a pandas DataFrame
NameError
Type must be either "count" or "location"
Examples
--------
>>> test = pd.DataFrame({'col1': [1, 2, None, 3, 4],
>>> 'col2': [2, 3, 4, 5, 6]})
>>> explore_missing(test, num_rows = 1)
>>> explore_missing(test, df_type = "count")
"""
if not isinstance(df, pd.DataFrame):
raise TypeError("Data must be a pandas DataFrame")
if not (df_type == "count") | (df_type == "location"):
raise NameError('Type must be either "count" or "location"')
if not isinstance(num_rows, int):
raise ValueError("num_rows must be of type int")
if num_rows < 0:
raise ValueError("num_rows must be a positive integer")
columns_empty_string = np.where(df.applymap(lambda x: x == ''))[0]
columns_nan = np.where(df.isnull())[0]
indices = np.append(columns_empty_string, columns_nan)
if len(indices) == 0:
raise ValueError("There are no missing values in the dataframe")
new_indices = np.empty(0)
for index in indices:
for num in np.array(range(1, num_rows + 1)):
new_indices = np.append(new_indices, index - num)
new_indices = np.append(new_indices, index + num)
rows = np.unique(np.append(new_indices, indices))
# avoids index error
rows = rows[(rows >= 0) & (rows < len(df))]
# number of missing values
total = np.sum(df.applymap(lambda x: x == '')) + np.sum(df.isnull())
if df_type == "count":
return pd.DataFrame({'Number of missing values': total,
'Proportion of missing data': total
/ len(df)})
# location of missing data
if df_type == "location":
return pd.DataFrame(df.iloc[rows])
| 31.802198
| 75
| 0.62405
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.