hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8122f2b68bfa38f9ba76a6e17b31b28ed8249abd
| 3,718
|
py
|
Python
|
stable_baselines/common/mpi_running_mean_std.py
|
allogn/stable-baselines
|
ea1162638d4ab1df7deeca61dac3308054931233
|
[
"MIT"
] | null | null | null |
stable_baselines/common/mpi_running_mean_std.py
|
allogn/stable-baselines
|
ea1162638d4ab1df7deeca61dac3308054931233
|
[
"MIT"
] | null | null | null |
stable_baselines/common/mpi_running_mean_std.py
|
allogn/stable-baselines
|
ea1162638d4ab1df7deeca61dac3308054931233
|
[
"MIT"
] | null | null | null |
import mpi4py
import tensorflow as tf
import numpy as np
import stable_baselines.common.tf_util as tf_util
class RunningMeanStd(object):
def __init__(self, epsilon=1e-2, shape=()):
"""
calulates the running mean and std of a data stream
https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
:param epsilon: (float) helps with arithmetic issues
:param shape: (tuple) the shape of the data stream's output
"""
self._sum = tf.get_variable(
dtype=tf.float64,
shape=shape,
initializer=tf.constant_initializer(0.0),
name="runningsum", trainable=False)
self._sumsq = tf.get_variable(
dtype=tf.float64,
shape=shape,
initializer=tf.constant_initializer(epsilon),
name="runningsumsq", trainable=False)
self._count = tf.get_variable(
dtype=tf.float64,
shape=(),
initializer=tf.constant_initializer(epsilon),
name="count", trainable=False)
self.shape = shape
self.mean = tf.cast(self._sum / self._count, tf.float32)
self.std = tf.sqrt(tf.maximum(tf.cast(self._sumsq / self._count, tf.float32) - tf.square(self.mean),
1e-2))
newsum = tf.placeholder(shape=self.shape, dtype=tf.float64, name='sum')
newsumsq = tf.placeholder(shape=self.shape, dtype=tf.float64, name='var')
newcount = tf.placeholder(shape=[], dtype=tf.float64, name='count')
self.incfiltparams = tf_util.function([newsum, newsumsq, newcount], [],
updates=[tf.assign_add(self._sum, newsum),
tf.assign_add(self._sumsq, newsumsq),
tf.assign_add(self._count, newcount)])
def update(self, data):
"""
update the running mean and std
:param data: (np.ndarray) the data
"""
data = data.astype('float64')
data_size = int(np.prod(self.shape))
totalvec = np.zeros(data_size * 2 + 1, 'float64')
addvec = np.concatenate([data.sum(axis=0).ravel(), np.square(data).sum(axis=0).ravel(),
np.array([len(data)], dtype='float64')])
mpi4py.MPI.COMM_WORLD.Allreduce(addvec, totalvec, op=mpi4py.MPI.SUM)
self.incfiltparams(totalvec[0: data_size].reshape(self.shape),
totalvec[data_size: 2 * data_size].reshape(self.shape), totalvec[2 * data_size])
@tf_util.in_session
def test_dist():
"""
test the running mean std
"""
# np.random.seed(0)
p_1, p_2, p_3 = (np.random.randn(3, 1), np.random.randn(4, 1), np.random.randn(5, 1))
q_1, q_2, q_3 = (np.random.randn(6, 1), np.random.randn(7, 1), np.random.randn(8, 1))
comm = mpi4py.MPI.COMM_WORLD
assert comm.Get_size() == 2
if comm.Get_rank() == 0:
x_1, x_2, x_3 = p_1, p_2, p_3
elif comm.Get_rank() == 1:
x_1, x_2, x_3 = q_1, q_2, q_3
else:
assert False
rms = RunningMeanStd(epsilon=0.0, shape=(1,))
tf_util.initialize()
rms.update(x_1)
rms.update(x_2)
rms.update(x_3)
bigvec = np.concatenate([p_1, p_2, p_3, q_1, q_2, q_3])
def checkallclose(var_1, var_2):
print(var_1, var_2)
return np.allclose(var_1, var_2)
assert checkallclose(
bigvec.mean(axis=0),
rms.mean.eval(),
)
assert checkallclose(
bigvec.std(axis=0),
rms.std.eval(),
)
if __name__ == "__main__":
# Run with mpirun -np 2 python <filename>
test_dist()
| 35.075472
| 108
| 0.579613
|
3a824a04a6e6edae3a6bf723ba9c7ebdf14b9e43
| 3,436
|
py
|
Python
|
contrib/performance/loadtest/trafficlogger.py
|
eventable/CalendarServer
|
384444edb1966b530bc391789afbe3fb9cd6fd3e
|
[
"Apache-2.0"
] | 1
|
2017-02-18T19:22:19.000Z
|
2017-02-18T19:22:19.000Z
|
contrib/performance/loadtest/trafficlogger.py
|
eventable/CalendarServer
|
384444edb1966b530bc391789afbe3fb9cd6fd3e
|
[
"Apache-2.0"
] | null | null | null |
contrib/performance/loadtest/trafficlogger.py
|
eventable/CalendarServer
|
384444edb1966b530bc391789afbe3fb9cd6fd3e
|
[
"Apache-2.0"
] | null | null | null |
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
"""
This module implements a reactor wrapper which will cause all traffic on
connections set up using that reactor to be logged.
"""
__all__ = ['loggedReactor']
from weakref import ref
from StringIO import StringIO
from collections import namedtuple
from zope.interface import providedBy
from twisted.python.components import proxyForInterface
from twisted.internet.interfaces import IReactorTCP
from twisted.protocols.policies import WrappingFactory, TrafficLoggingProtocol
logstate = namedtuple('logstate', 'active finished')
def loggedReactor(reactor):
"""
Construct and return a wrapper around the given C{reactor} which provides
all of the same interfaces, but which will log all traffic over outgoing
TCP connections it establishes.
"""
bases = []
for iface in providedBy(reactor):
if iface is IReactorTCP:
bases.append(_TCPTrafficLoggingReactor)
else:
bases.append(proxyForInterface(iface, '_reactor'))
if bases:
return type('(Logged Reactor)', tuple(bases), {})(reactor)
return reactor
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
traffic to be logged.
"""
_factories = None
@property
def factories(self):
if self._factories is None:
self._factories = []
return self._factories
def getLogFiles(self):
active = []
finished = []
for factoryref in self.factories:
factory = factoryref()
active.extend(factory.logs)
finished.extend(factory.finishedLogs)
return logstate(active, finished)
def connectTCP(self, host, port, factory, *args, **kwargs):
wrapper = _TrafficLoggingFactory(factory)
self.factories.append(ref(wrapper, self.factories.remove))
return self._reactor.connectTCP(
host, port, wrapper, *args, **kwargs)
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
"""
LOGFILE_LIMIT = 20
protocol = TrafficLoggingProtocol
noisy = False
def __init__(self, wrappedFactory):
WrappingFactory.__init__(self, wrappedFactory)
self.logs = []
self.finishedLogs = []
def unregisterProtocol(self, protocol):
WrappingFactory.unregisterProtocol(self, protocol)
self.logs.remove(protocol.logfile)
self.finishedLogs.append(protocol.logfile)
del self.finishedLogs[:-self.LOGFILE_LIMIT]
def buildProtocol(self, addr):
logfile = StringIO()
self.logs.append(logfile)
return self.protocol(
self, self.wrappedFactory.buildProtocol(addr), logfile, None, 0)
| 29.367521
| 78
| 0.697031
|
b94e379d662ad93ed81e27699a584bfb7e876480
| 2,859
|
py
|
Python
|
tests/terminals.py
|
vovanz/invoke
|
ba1f2742837d51f04e229a8d1f8c0d4d132833fc
|
[
"BSD-2-Clause"
] | null | null | null |
tests/terminals.py
|
vovanz/invoke
|
ba1f2742837d51f04e229a8d1f8c0d4d132833fc
|
[
"BSD-2-Clause"
] | null | null | null |
tests/terminals.py
|
vovanz/invoke
|
ba1f2742837d51f04e229a8d1f8c0d4d132833fc
|
[
"BSD-2-Clause"
] | null | null | null |
# Skip on Windows CI, it may blow up on one of these
from invoke.terminals import WINDOWS
import pytest
pytestmark = pytest.mark.skipif(
WINDOWS, reason="Low level terminal tests only work well on POSIX"
)
import fcntl
import termios
from mock import Mock, patch
from pytest import skip
from invoke.terminals import pty_size, bytes_to_read
class platform:
class pty_size:
@patch('fcntl.ioctl', wraps=fcntl.ioctl)
def calls_fcntl_with_TIOCGWINSZ(self, ioctl):
# Test the default (Unix) implementation because that's all we
# can realistically do here.
pty_size()
assert ioctl.call_args_list[0][0][1] == termios.TIOCGWINSZ
@patch('sys.stdout')
@patch('fcntl.ioctl')
def defaults_to_80x24_when_stdout_not_a_tty(self, ioctl, stdout):
# Make sure stdout acts like a real stream (means failure is
# more obvious)
stdout.fileno.return_value = 1
# Ensure it fails the isatty() test too
stdout.isatty.return_value = False
# Test
assert pty_size() == (80, 24)
@patch('sys.stdout')
@patch('fcntl.ioctl')
def uses_default_when_stdout_lacks_fileno(self, ioctl, stdout):
# i.e. when accessing it throws AttributeError
stdout.fileno.side_effect = AttributeError
assert pty_size() == (80, 24)
@patch('sys.stdout')
@patch('fcntl.ioctl')
def uses_default_when_stdout_triggers_ioctl_error(
self, ioctl, stdout
):
ioctl.side_effect = TypeError
assert pty_size() == (80, 24)
class bytes_to_read_:
@patch('invoke.terminals.fcntl')
def returns_1_when_stream_lacks_fileno(self, fcntl):
# A fileno() that exists but returns a non-int is a quick way
# to fail util.has_fileno().
assert bytes_to_read(Mock(fileno=lambda: None)) == 1
assert not fcntl.ioctl.called
@patch('invoke.terminals.fcntl')
def returns_1_when_stream_has_fileno_but_is_not_a_tty(self, fcntl):
# It blows up otherwise anyways (struct.unpack gets mad because
# result isn't a string of the right length) but let's make
# ioctl die similarly to the real world case we're testing for
# here (#425)
fcntl.ioctl.side_effect = IOError(
"Operation not supported by device"
)
stream = Mock(
isatty=lambda: False,
fileno=lambda: 17, # arbitrary
)
assert bytes_to_read(stream) == 1
assert not fcntl.ioctl.called
def returns_FIONREAD_result_when_stream_is_a_tty(self):
skip()
def returns_1_on_windows(self):
skip()
| 35.296296
| 75
| 0.613501
|
524335bc558a1af0298af9bfc1637adf1e41a3d3
| 1,181
|
py
|
Python
|
tools/perf/measurements/polymer_load.py
|
aranajhonny/chromium
|
caf5bcb822f79b8997720e589334266551a50a13
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2019-01-16T03:57:39.000Z
|
2019-01-16T03:57:39.000Z
|
tools/perf/measurements/polymer_load.py
|
aranajhonny/chromium
|
caf5bcb822f79b8997720e589334266551a50a13
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2018-02-10T21:00:08.000Z
|
2018-03-20T05:09:50.000Z
|
tools/perf/measurements/polymer_load.py
|
aranajhonny/chromium
|
caf5bcb822f79b8997720e589334266551a50a13
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry.page import page_measurement
from telemetry.value import scalar
class PageForPolymerLoad(page.Page):
def __init__(self, url, page_set):
super(PageForPolymerLoad, self).__init__(
url=url,
page_set=page_set)
self.script_to_evaluate_on_commit = '''
document.addEventListener("polymer-ready", function() {
var unused = document.body.offsetHeight;
window.__polymer_ready_time = performance.now();
setTimeout(function() {
window.__polymer_ready = true;
}, 1000);
})
'''
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition('window.__polymer_ready')
class PolymerLoadMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__polymer_ready_time'))
results.AddValue(scalar.ScalarValue(
results.current_page, 'Total', 'ms', result))
| 32.805556
| 72
| 0.72735
|
9b3ffa203380bc0db81387724dec29bf915a1269
| 1,244
|
py
|
Python
|
code_examples/python_generator/parse_cdp_file.py
|
emilgaripov/emilgaripov.github.io
|
b0e4bf353cca894ec0199a73e71dc4f963e559a8
|
[
"MIT"
] | 18
|
2017-02-19T15:58:54.000Z
|
2022-02-13T22:15:19.000Z
|
code_examples/python_generator/parse_cdp_file.py
|
emilgaripov/emilgaripov.github.io
|
b0e4bf353cca894ec0199a73e71dc4f963e559a8
|
[
"MIT"
] | 3
|
2020-02-26T14:42:54.000Z
|
2021-09-28T00:32:23.000Z
|
code_examples/python_generator/parse_cdp_file.py
|
emilgaripov/emilgaripov.github.io
|
b0e4bf353cca894ec0199a73e71dc4f963e559a8
|
[
"MIT"
] | 27
|
2017-05-03T15:38:41.000Z
|
2022-02-08T02:53:38.000Z
|
import re
from pprint import pprint
def get_cdp_neighbor(sh_cdp_neighbor_detail):
with open(sh_cdp_neighbor_detail) as f:
line = ''
while True:
while not 'Device ID' in line:
line = f.readline()
neighbor = ''
neighbor += line
for line in f:
if line.startswith('-----'):
break
neighbor += line
yield neighbor
line = f.readline()
if not line:
return
def parse_cdp(output):
regex = ('Device ID: (?P<device>\S+)'
'|IP address: (?P<ip>\S+)'
'|Platform: (?P<platform>\S+ \S+),'
'|Cisco IOS Software, (?P<ios>.+), RELEASE')
result = {}
match_iter = re.finditer(regex, output)
for match in match_iter:
if match.lastgroup == 'device':
device = match.group(match.lastgroup)
result[device] = {}
elif device:
result[device][match.lastgroup] = match.group(match.lastgroup)
return result
filename = 'sh_cdp_neighbors_detail.txt'
result = get_cdp_neighbor(filename)
all_cdp = {}
for cdp in result:
all_cdp.update(parse_cdp(cdp))
pprint(all_cdp)
| 24.88
| 74
| 0.538585
|
ceeb58e18ed81e3df8bfd2a4d77dd3abc779b938
| 9,447
|
py
|
Python
|
parser/utils/field.py
|
ironsword666/CWS
|
83f86e726191125d81833829e4ddf4dd004b937d
|
[
"MIT"
] | null | null | null |
parser/utils/field.py
|
ironsword666/CWS
|
83f86e726191125d81833829e4ddf4dd004b937d
|
[
"MIT"
] | null | null | null |
parser/utils/field.py
|
ironsword666/CWS
|
83f86e726191125d81833829e4ddf4dd004b937d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from collections import Counter
from parser.utils.vocab import Vocab
from parser.utils.fn import tohalfwidth
from parser.utils.common import bos, eos
import torch
class RawField(object):
def __init__(self, name, fn=None):
super(RawField, self).__init__()
self.name = name
self.fn = fn
def __repr__(self):
return f"({self.name}): {self.__class__.__name__}()"
def preprocess(self, sequence):
if self.fn is not None:
sequence = self.fn(sequence)
return sequence
def transform(self, sequences):
return [self.preprocess(sequence) for sequence in sequences]
class Field(RawField):
def __init__(self, name, pad=None, unk=None, bos=None, eos=None,
lower=False, tohalfwidth=False, use_vocab=True, tokenize=None, fn=None):
self.name = name
self.pad = pad
self.unk = unk
self.bos = bos
self.eos = eos
self.lower = lower
self.tohalfwidth = tohalfwidth
self.use_vocab = use_vocab
self.tokenize = tokenize
self.fn = fn
self.specials = [token for token in [pad, unk, bos, eos]
if token is not None]
def __repr__(self):
s, params = f"({self.name}): {self.__class__.__name__}(", []
if self.pad is not None:
params.append(f"pad={self.pad}")
if self.unk is not None:
params.append(f"unk={self.unk}")
if self.bos is not None:
params.append(f"bos={self.bos}")
if self.eos is not None:
params.append(f"eos={self.eos}")
if self.lower:
params.append(f"lower={self.lower}")
if not self.use_vocab:
params.append(f"use_vocab={self.use_vocab}")
if self.tohalfwidth:
params.append(f"tohalfwidth={self.tohalfwidth}")
s += f", ".join(params)
s += f")"
return s
@property
def pad_index(self):
return self.specials.index(self.pad) if self.pad is not None else 0
@property
def unk_index(self):
return self.specials.index(self.unk) if self.unk is not None else 0
@property
def bos_index(self):
return self.specials.index(self.bos)
@property
def eos_index(self):
return self.specials.index(self.eos)
def preprocess(self, sequence):
if self.fn is not None:
sequence = self.fn(sequence)
if self.tokenize is not None:
sequence = self.tokenize(sequence)
if self.lower:
sequence = [str.lower(token) for token in sequence]
if self.tohalfwidth:
sequence = [tohalfwidth(token) for token in sequence]
return sequence
def build(self, corpus, min_freq=1, embed=None):
sequences = getattr(corpus, self.name)
counter = Counter(token
for sequence in sequences
for token in self.preprocess(sequence))
self.vocab = Vocab(counter, min_freq, self.specials, self.unk_index)
if not embed:
self.embed = None
else:
tokens = self.preprocess(embed.tokens)
# if the `unk` token has existed in the pretrained,
# then replace it with a self-defined one
if embed.unk:
tokens[embed.unk_index] = self.unk
self.vocab.extend(tokens)
self.embed = torch.zeros(len(self.vocab), embed.dim)
self.embed[self.vocab.token2id(tokens)] = embed.vectors
self.embed /= torch.std(self.embed)
def transform(self, sequences):
sequences = [self.preprocess(sequence) for sequence in sequences]
if self.use_vocab:
sequences = [self.vocab.token2id(sequence)
for sequence in sequences]
if self.bos:
sequences = [[self.bos_index] + sequence for sequence in sequences]
if self.eos:
sequences = [sequence + [self.eos_index] for sequence in sequences]
sequences = [torch.tensor(sequence) for sequence in sequences]
return sequences
class NGramField(Field):
def __init__(self, *args, **kwargs):
self.n = kwargs.pop('n') if 'n' in kwargs else 1
super(NGramField, self).__init__(*args, **kwargs)
def build(self, corpus, min_freq=1, dict_file=None, embed=None):
sequences = getattr(corpus, self.name)
counter = Counter()
sequences = [self.preprocess(sequence) for sequence in sequences]
n_pad = self.n - 1
for sequence in sequences:
chars = list(sequence) + [eos] * n_pad
bichars = ["".join(chars[i + s] for s in range(self.n))
for i in range(len(chars) - n_pad)]
counter.update(bichars)
if dict_file is not None:
counter &= self.read_dict(dict_file)
self.vocab = Vocab(counter, min_freq, self.specials, self.unk_index)
if not embed:
self.embed = None
else:
tokens = self.preprocess(embed.tokens)
# if the `unk` token has existed in the pretrained,
# then replace it with a self-defined one
if embed.unk:
tokens[embed.unk_index] = self.unk
self.vocab.extend(tokens)
self.embed = torch.zeros(len(self.vocab), embed.dim)
self.embed[self.vocab.token2id(tokens)] = embed.vectors
self.embed /= torch.std(self.embed)
def read_dict(self, dict_file):
word_list = dict()
with open(dict_file, encoding='utf-8') as dict_in:
for line in dict_in:
line = line.split()
if len(line) == 3:
word_list[line[0]] = 100
return Counter(word_list)
def __repr__(self):
s, params = f"({self.name}): {self.__class__.__name__}(", []
params.append(f"n={self.n}")
if self.pad is not None:
params.append(f"pad={self.pad}")
if self.unk is not None:
params.append(f"unk={self.unk}")
if self.bos is not None:
params.append(f"bos={self.bos}")
if self.eos is not None:
params.append(f"eos={self.eos}")
if self.lower:
params.append(f"lower={self.lower}")
if not self.use_vocab:
params.append(f"use_vocab={self.use_vocab}")
if self.tohalfwidth:
params.append(f"tohalfwidth={self.tohalfwidth}")
s += f", ".join(params)
s += f")"
return s
def transform(self, sequences):
sequences = [self.preprocess(sequence) for sequence in sequences]
n_pad = (self.n - 1)
for sent_idx, sequence in enumerate(sequences):
chars = list(sequence) + [eos] * n_pad
sequences[sent_idx] = ["".join(chars[i + s] for s in range(self.n))
for i in range(len(chars) - n_pad)]
if self.use_vocab:
sequences = [self.vocab.token2id(sequence)
for sequence in sequences]
if self.bos:
sequences = [[self.bos_index] + sequence for sequence in sequences]
if self.eos:
sequences = [sequence + [self.eos_index] for sequence in sequences]
sequences = [torch.tensor(sequence) for sequence in sequences]
return sequences
class SegmentField(Field):
"""[summary]
Examples:
>>> sentence = ["我", "喜欢", "这个", ""游戏]
>>> sequence = [(0, 1), (1, 3), (3, 5), (5, 7)]
>>> spans = field.transform([sequences])[0]
>>> spans
tensor([[False, True, False, False, False, False, False, False],
[False, False, False, True, False, False, False, False],
[False, False, False, False, False, False, False, False],
[False, False, False, False, False, True, False, False],
[False, False, False, False, False, False, False, False],
[False, False, False, False, False, False, False, True],
[False, False, False, False, False, False, False, False],
[False, False, False, False, False, False, False, False]])
"""
def build(self, corpus, min_freq=1):
"""do nothing
"""
return
def transform(self, sequences):
sequences = [self.preprocess(sequence) for sequence in sequences]
spans = []
for sequence in sequences:
seq_len = sequence[-1][1] + 1
span_chart = torch.full((seq_len, seq_len), self.pad_index).bool()
for i, j in sequence:
span_chart[i, j] = 1
spans.append(span_chart)
return spans
class BertField(Field):
def transform(self, sequences):
subwords, lens = [], []
sequences = [list(sequence)
for sequence in sequences]
for sequence in sequences:
# TODO bert
sequence = self.preprocess(sequence)
sequence = [piece if piece else self.preprocess(self.pad)
for piece in sequence]
subwords.append(sequence)
subwords = [torch.tensor(pieces) for pieces in subwords]
mask = [torch.ones(len(pieces)).gt(0) for pieces in subwords]
return list(zip(subwords, mask))
| 34.988889
| 89
| 0.566529
|
60db17fc9ef7ec95553064150cc0129c7c1bdcf1
| 5,539
|
py
|
Python
|
TEST/BETAsite/RUN_SQEP.py
|
takehuge/PYQUM
|
bfc9d9b1c2f4246c7aac3a371baaf587c99f8069
|
[
"MIT"
] | null | null | null |
TEST/BETAsite/RUN_SQEP.py
|
takehuge/PYQUM
|
bfc9d9b1c2f4246c7aac3a371baaf587c99f8069
|
[
"MIT"
] | null | null | null |
TEST/BETAsite/RUN_SQEP.py
|
takehuge/PYQUM
|
bfc9d9b1c2f4246c7aac3a371baaf587c99f8069
|
[
"MIT"
] | null | null | null |
from numpy import sqrt, array, mean, sum, min, max
from pyqum.instrument.logger import set_status
from pyqum.instrument.toolbox import waveform, gotocdata
from pyqum.instrument.analyzer import curve, IQAParray
# 3. Test Square-wave Pulsing
from pyqum.directive.characterize import SQE_Pulse
# Set RUN mode:
set_status("SQE_Pulse", dict(pause=False))
# Assemble C-Structure:
CStructure = ['Flux-Bias', 'XY-Frequency', 'XY-Power', 'RO-Frequency', 'RO-Power',
'Pulse-Period', 'RO-ifLevel', 'RO-Pulse-Delay', 'RO-Pulse-Width', 'XY-ifLevel', 'XY-Pulse-Delay', 'XY-Pulse-Width',
'LO-Frequency', 'LO-Power', 'ADC-delay', 'Average', 'Sampling-Time']
# Sweep Flux:
F_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'-0.006 to 0.006 * 100', 'XY-Frequency':'opt,', 'XY-Power':'opt,', 'RO-Frequency':'4.885 4.8979', 'RO-Power':'-30',
'Pulse-Period':'15994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'0', 'RO-Pulse-Width':'15994', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'15994',
'LO-Frequency':'lockro,', 'LO-Power':'-30', 'ADC-delay':'0', 'Average':'12000', 'Sampling-Time':'2 to 2000 * 999'
}
F_COMMENT = "Flux dependency of Qubit for different Readout frequency"
F_TAG = 'Flux-bias'
# Sweep RO-frequency:
S_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'0.002757', 'XY-Frequency':'4.45', 'XY-Power':'-100 0', 'RO-Frequency':'4.89 to 4.9 * 20', 'RO-Power':'-30',
'Pulse-Period':'15994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'0', 'RO-Pulse-Width':'15994', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'15994',
'LO-Frequency':'lockro,', 'LO-Power':'-30', 'ADC-delay':'0', 'Average':'3000', 'Sampling-Time':'2 to 2000 * 999'
}
S_COMMENT = "Search for dressed cavity frequency at flux=2.757mA"
S_TAG = 'cavity, spectroscopy'
# Sweep XY-frequency:
X_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'0.002757', 'XY-Frequency':'4.4 to 4.5 * 50', 'XY-Power':'0', 'RO-Frequency':'4.8979', 'RO-Power':'-30',
'Pulse-Period':'15994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'0', 'RO-Pulse-Width':'15994', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'15994',
'LO-Frequency':'lockro,', 'LO-Power':'-30', 'ADC-delay':'0', 'Average':'3000', 'Sampling-Time':'2 to 2000 * 999'
}
X_COMMENT = "Search for qubit frequency at flux=2.757mA using dressed cavity"
X_TAG = 'two-tone, spectroscopy'
# Sweep XY-pulse:
XP_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'-0.00205', 'XY-Frequency':'5 to 5.3 * 50', 'XY-Power':'0', 'RO-Frequency':'4.893766', 'RO-Power':'-15',
'Pulse-Period':'63994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'0', 'RO-Pulse-Width':'1600', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'63994',
'LO-Frequency':'lockro,', 'LO-Power':'-25', 'ADC-delay':'0', 'Average':'10000', 'Sampling-Time':'2 to 3200 * 1599'
}
XP_COMMENT = "Searching higher Qubit-frequency with Pulse Readout and Continuus Pumping"
XP_TAG = 'ro-pulse'
# Test XY-Pulses
TP_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'0', 'XY-Frequency':'4.128', 'XY-Power':'0', 'RO-Frequency':'opt,', 'RO-Power':'opt,',
'Pulse-Period':'15994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'0', 'RO-Pulse-Width':'3000', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'100 to 1000 * 19',
'LO-Frequency':'4.128', 'LO-Power':'-10', 'ADC-delay':'0', 'Average':'100', 'Sampling-Time':'2 to 2000 * 999'
}
TP_COMMENT = "Testing AWG consistency of pulse generation"
# Two-tone:
TT_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'-0.002 to 0 * 48', 'XY-Frequency':'4.1 to 5.3 * 240', 'XY-Power':'10', 'RO-Frequency':'4.93', 'RO-Power':'-30',
'Pulse-Period':'63994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'lockxypwd+50,', 'RO-Pulse-Width':'1600', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'0 35',
'LO-Frequency':'lockro,', 'LO-Power':'-40', 'ADC-delay':'0', 'Average':'6000', 'Sampling-Time':'2 to 3200 * 1599'
}
TT_COMMENT = "Two-tone to search for Qubit frequency after flux offset shifting (Ground and Excited state)"
TT_TAG = "Two-tone"
# Rabi
R_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'0.002757', 'XY-Frequency':'4.45', 'XY-Power':'-100 0 5', 'RO-Frequency':'4.8937 4.8979', 'RO-Power':'-30',
'Pulse-Period':'63994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'lockxypwd+0,', 'RO-Pulse-Width':'1600', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'0 to 600 * 300',
'LO-Frequency':'lockro,', 'LO-Power':'-30', 'ADC-delay':'0', 'Average':'12000', 'Sampling-Time':'2 to 3200 * 1599'
}
R_COMMENT = "Checking Lower Qubit-frequency"
R_TAG = "Rabi"
# T1
T1_CORDER = {'C-Structure': CStructure,
'Flux-Bias':'-0.0031', 'XY-Frequency':'4.19', 'XY-Power':'10', 'RO-Frequency':'4.93', 'RO-Power':'-20',
'Pulse-Period':'63994', 'RO-ifLevel':'1', 'RO-Pulse-Delay':'85 to 5000 * 200', 'RO-Pulse-Width':'1600', 'XY-ifLevel':'1', 'XY-Pulse-Delay':'0', 'XY-Pulse-Width':'35',
'LO-Frequency':'lockro,', 'LO-Power':'-20', 'ADC-delay':'lockropdelay,', 'Average':'10000', 'Sampling-Time':'2 to 3200 * 1599'
}
T1_COMMENT = "Measure T1 with time offset and higher xy-power with cavity detuning"
T1_TAG = "T1"
# New measurement:
SQE_Pulse('abc', corder=R_CORDER, comment=R_COMMENT, tag=R_TAG, dayindex=-1, testeach=False)
| 58.305263
| 186
| 0.6039
|
a7eca502c9ff98729f4c4db202f212d9c501b3be
| 11,219
|
py
|
Python
|
slimta/relay/smtp/client.py
|
fisele/compare-slimta
|
4a59bd8e0d0b01551ed6d045b7e008846c8b4a80
|
[
"MIT"
] | null | null | null |
slimta/relay/smtp/client.py
|
fisele/compare-slimta
|
4a59bd8e0d0b01551ed6d045b7e008846c8b4a80
|
[
"MIT"
] | null | null | null |
slimta/relay/smtp/client.py
|
fisele/compare-slimta
|
4a59bd8e0d0b01551ed6d045b7e008846c8b4a80
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2012 Ian C. Good
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import absolute_import
from socket import getfqdn, error as socket_error
from functools import wraps
from gevent import Timeout
from gevent.socket import create_connection
from slimta.smtp import SmtpError
from slimta.smtp.reply import Reply, timed_out, connection_failed
from slimta.smtp.client import Client
from slimta import logging
from ..pool import RelayPoolClient
from . import SmtpRelayError
__all__ = ['SmtpRelayClient']
log = logging.getSocketLogger(__name__)
hostname = getfqdn()
def current_command(cmd):
def deco(old_f):
@wraps(old_f)
def new_f(self, *args, **kwargs):
prev = self.current_command
self.current_command = cmd
ret = old_f(self, *args, **kwargs)
self.current_command = prev
return ret
return new_f
return deco
class SmtpRelayClient(RelayPoolClient):
_client_class = Client
def __init__(self, address, queue, socket_creator=None, ehlo_as=None,
context=None, tls_immediately=False, tls_required=False,
connect_timeout=10.0, command_timeout=10.0,
data_timeout=None, idle_timeout=None,
credentials=None, binary_encoder=None):
super(SmtpRelayClient, self).__init__(queue, idle_timeout)
self.address = address
self.socket_creator = socket_creator or create_connection
self.socket = None
self.client = None
self.ehlo_as = ehlo_as or hostname
self.context = context
self.tls_immediately = tls_immediately
self.tls_required = tls_required
self.connect_timeout = connect_timeout
self.command_timeout = command_timeout
self.data_timeout = data_timeout or command_timeout
self.credentials = credentials
self.binary_encoder = binary_encoder
self.current_command = None
@current_command(b'[CONNECT]')
def _connect(self):
with Timeout(self.connect_timeout):
self.socket = self.socket_creator(self.address)
log.connect(self.socket, self.address)
self.client = self._client_class(self.socket, self.address)
@current_command(b'[BANNER]')
def _banner(self):
with Timeout(self.command_timeout):
banner = self.client.get_banner()
if banner.is_error():
raise SmtpRelayError.factory(banner)
@current_command(b'EHLO')
def _ehlo(self):
try:
ehlo_as = self.ehlo_as(self.address)
except TypeError:
ehlo_as = self.ehlo_as
with Timeout(self.command_timeout):
ehlo = self.client.ehlo(ehlo_as)
if ehlo.is_error():
if ehlo.code == '500':
return self._helo(ehlo_as)
raise SmtpRelayError.factory(ehlo)
return ehlo
@current_command(b'HELO')
def _helo(self, ehlo_as):
with Timeout(self.command_timeout):
helo = self.client.helo(ehlo_as)
if helo.is_error():
raise SmtpRelayError.factory(helo)
return helo
@current_command(b'STARTTLS')
def _starttls(self):
with Timeout(self.command_timeout):
starttls = self.client.starttls(self.context)
if starttls.is_error() and self.tls_required:
raise SmtpRelayError.factory(starttls)
@current_command(b'AUTH')
def _authenticate(self):
try:
credentials = self.credentials()
except TypeError:
credentials = self.credentials
with Timeout(self.command_timeout):
auth = self.client.auth(*credentials)
if auth.is_error():
raise SmtpRelayError.factory(auth)
def _handshake(self):
if self.tls_immediately:
self.client.encrypt(self.context)
self._banner()
self._ehlo()
else:
self._banner()
self._ehlo()
if self.tls_required or 'STARTTLS' in self.client.extensions:
self._starttls()
self._ehlo()
if self.credentials:
self._authenticate()
@current_command(b'RSET')
def _rset(self):
with Timeout(self.command_timeout):
self.client.rset()
@current_command(b'MAIL')
def _mailfrom(self, sender):
with Timeout(self.command_timeout):
mailfrom = self.client.mailfrom(sender)
if mailfrom and mailfrom.is_error():
raise SmtpRelayError.factory(mailfrom)
return mailfrom
@current_command(b'RCPT')
def _rcptto(self, rcpt):
with Timeout(self.command_timeout):
return self.client.rcptto(rcpt)
@current_command(b'DATA')
def _data(self):
with Timeout(self.command_timeout):
return self.client.data()
def _check_replies(self, mailfrom, rcpttos, data):
if mailfrom.is_error():
raise SmtpRelayError.factory(mailfrom)
for rcptto in rcpttos:
if not rcptto.is_error():
break
else:
raise SmtpRelayError.factory(rcpttos[0])
if data.is_error():
raise SmtpRelayError.factory(data)
@current_command(b'[SEND_DATA]')
def _send_empty_data(self):
with Timeout(self.data_timeout):
self.client.send_empty_data()
@current_command(b'[SEND_DATA]')
def _send_message_data(self, envelope):
header_data, message_data = envelope.flatten()
with Timeout(self.data_timeout):
send_data = self.client.send_data(
header_data, message_data)
self.client._flush_pipeline()
if isinstance(send_data, Reply) and send_data.is_error():
raise SmtpRelayError.factory(send_data)
return send_data
def _handle_encoding(self, envelope):
if '8BITMIME' not in self.client.extensions:
try:
envelope.encode_7bit(self.binary_encoder)
except UnicodeError:
reply = Reply('554', '5.6.3 Conversion not allowed',
command=b'[data conversion]',
address=self.address)
raise SmtpRelayError.factory(reply)
def _send_envelope(self, rcpt_results, envelope):
data = None
mailfrom = self._mailfrom(envelope.sender)
rcpttos = [self._rcptto(rcpt) for rcpt in envelope.recipients]
try:
data = self._data()
self._check_replies(mailfrom, rcpttos, data)
except SmtpRelayError:
if data and not data.is_error():
self._send_empty_data()
raise
for i, rcpt_reply in enumerate(rcpttos):
rcpt = envelope.recipients[i]
if rcpt_reply.is_error():
rcpt_results[rcpt] = SmtpRelayError.factory(rcpt_reply)
def _deliver(self, result, envelope):
rcpt_results = dict.fromkeys(envelope.recipients)
try:
self._handle_encoding(envelope)
self._send_envelope(rcpt_results, envelope)
msg_result = self._send_message_data(envelope)
except SmtpRelayError as e:
result.set_exception(e)
self._rset()
else:
for key, value in rcpt_results.items():
if value is None:
rcpt_results[key] = msg_result
result.set(rcpt_results)
def _check_server_timeout(self):
try:
if self.client.has_reply_waiting():
with Timeout(self.command_timeout):
self.client.get_reply()
return True
except SmtpError:
return True
return False
def _disconnect(self):
try:
with Timeout(self.command_timeout):
self.client.quit()
except (Timeout, Exception):
pass
finally:
if self.client:
self.client.io.close()
def _get_error_reply(self, exc):
try:
if self.client.last_error.code == '421':
return self.client.last_error
except Exception:
pass
return Reply('421', '4.3.0 '+str(exc),
command=self.current_command, address=self.address)
def _run(self):
result, envelope = self.poll()
if not result:
return
reraise = True
try:
self._connect()
self._handshake()
while result:
if self._check_server_timeout():
self.queue.appendleft((result, envelope))
break
self._deliver(result, envelope)
if self.idle_timeout is None:
break
result, envelope = self.poll()
except SmtpRelayError as e:
result.set_exception(e)
except SmtpError as e:
if not result.ready():
reply = self._get_error_reply(e)
relay_error = SmtpRelayError.factory(reply)
result.set_exception(relay_error)
except Timeout:
if not result.ready():
reply = Reply(command=self.current_command,
address=self.address).copy(timed_out)
relay_error = SmtpRelayError.factory(reply)
result.set_exception(relay_error)
except socket_error as exc:
log.error(self.socket, exc, self.address)
if not result.ready():
reply = Reply(command=self.current_command,
address=self.address).copy(connection_failed)
relay_error = SmtpRelayError.factory(reply)
result.set_exception(relay_error)
except Exception as e:
if not result.ready():
result.set_exception(e)
reraise = False
raise
finally:
try:
self._disconnect()
except Exception:
if reraise:
raise
# vim:et:fdm=marker:sts=4:sw=4:ts=4
| 35.169279
| 79
| 0.611017
|
daf06add899ebe9ebfd88d41af3dd325f3202606
| 1,208
|
py
|
Python
|
airflow/contrib/sensors/sagemaker_transform_sensor.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 79
|
2021-10-15T07:32:27.000Z
|
2022-03-28T04:10:19.000Z
|
airflow/contrib/sensors/sagemaker_transform_sensor.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 153
|
2021-10-15T05:23:46.000Z
|
2022-02-23T06:07:10.000Z
|
airflow/contrib/sensors/sagemaker_transform_sensor.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 23
|
2021-10-15T02:36:37.000Z
|
2022-03-17T02:59:27.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.amazon.aws.sensors.sagemaker_transform`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.amazon.aws.sensors.sagemaker_transform import SageMakerTransformSensor # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.sensors.sagemaker_transform`.",
DeprecationWarning,
stacklevel=2,
)
| 40.266667
| 104
| 0.778146
|
17bd47ef05cd906768fdec80b041f4c0465f9f16
| 2,724
|
py
|
Python
|
tests/test_eddb_systems.py
|
kominetz/elite-tools
|
5cefef9b45027b10479ccef8bdcdc94914245c43
|
[
"Apache-2.0"
] | 2
|
2020-08-17T17:23:58.000Z
|
2020-08-17T17:38:48.000Z
|
tests/test_eddb_systems.py
|
kominetz/elite-tools
|
5cefef9b45027b10479ccef8bdcdc94914245c43
|
[
"Apache-2.0"
] | 30
|
2020-05-06T16:25:10.000Z
|
2021-12-25T15:08:31.000Z
|
tests/test_eddb_systems.py
|
kominetz/elite-tools
|
5cefef9b45027b10479ccef8bdcdc94914245c43
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import pandas as pd
from elitetools import eddb
from elitetools.eddb import find_system_by_name, query_systems_by_name, query_nearby_systems, query_systems_by_faction, load_feeds
def setup_module():
load_feeds()
class TestFindSystemByName:
def test(self):
sol_system = find_system_by_name('Sol')
assert sol_system is not None
assert sol_system['name'] == 'Sol'
assert sol_system['needs_permit'] == True
sd_system = find_system_by_name('Shinrarta Dezhra')
assert sd_system is not None
assert sd_system['name'] == 'Shinrarta Dezhra'
assert sd_system['needs_permit'] == True
def test_system_not_found(self):
with pytest.raises(KeyError):
find_system_by_name('NOT_A_REAL_SYSTEM')
def test_case_insensitive(self):
assert (find_system_by_name('sOL')) is not None
class TestQuerySystemsByNames:
def test(self):
cs_from_list = query_systems_by_name(['Achenar', 'Alioth', 'Sol'])
assert len(cs_from_list) == 3
assert cs_from_list['Achenar']['name'] == 'Achenar'
assert cs_from_list['Alioth']['name'] == 'Alioth'
assert cs_from_list['Sol']['name'] == 'Sol'
assert cs_from_list['Sol']['x'] == 0.0
assert cs_from_list['Sol']['y'] == 0.0
assert cs_from_list['Sol']['z'] == 0.0
cs_from_string = query_systems_by_name('Achenar, Alioth, Sol')
assert len(cs_from_list) == 3
assert cs_from_string['Achenar'] == cs_from_list['Achenar']
assert cs_from_string['Alioth'] == cs_from_list['Alioth']
assert cs_from_string['Sol'] == cs_from_list['Sol']
cs_empty = query_systems_by_name([])
assert len(cs_empty) == 0
def test_systems_not_found(self):
with pytest.raises(KeyError):
query_systems_by_name('NOT_A_SYSTEM')
with pytest.raises(KeyError):
query_systems_by_name(['Sol', 'NOT_A_SYSTEM', 'Shinrarta Dezhra'])
def test_case_insensitive(self):
cs = query_systems_by_name('AcHeNar, aLIOTH, SOL')
assert len(cs) == 3
class TestQueryNearbySystems:
def test(self):
nearby_systems = query_nearby_systems('Azrael', 20)
# Nearby systems should not change due to BGS activity, but a major update? Unknown.
assert len(nearby_systems) == 3
assert "Dvorsi" in nearby_systems
class TestQuerySystemsByFaction:
def test(self):
faction_systems = query_systems_by_faction("The Order of Mobius")
# A faction is always in at least one system.
assert len(faction_systems) > 0
# A faction is always in its home system.
assert "Azrael" in faction_systems
| 34.923077
| 130
| 0.664097
|
b5132482a1a83f712f4bc812f5074170ab875cd3
| 7,979
|
py
|
Python
|
authtools/forms.py
|
tamiryspino/django-authtools
|
cfb4878c0c835c0877c0726cb023cf55d507e36c
|
[
"BSD-2-Clause"
] | null | null | null |
authtools/forms.py
|
tamiryspino/django-authtools
|
cfb4878c0c835c0877c0726cb023cf55d507e36c
|
[
"BSD-2-Clause"
] | null | null | null |
authtools/forms.py
|
tamiryspino/django-authtools
|
cfb4878c0c835c0877c0726cb023cf55d507e36c
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import unicode_literals
from django import forms, VERSION as DJANGO_VERSION
from django.forms.utils import flatatt
from django.contrib.auth.forms import (
ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget,
PasswordResetForm as OldPasswordResetForm,
UserChangeForm as DjangoUserChangeForm,
AuthenticationForm as DjangoAuthenticationForm,
)
from django.contrib.auth import get_user_model, password_validation
from django.contrib.auth.hashers import identify_hasher, UNUSABLE_PASSWORD_PREFIX
from django.utils.translation import ugettext_lazy as _, ugettext
from django.utils.html import format_html
User = get_user_model()
def is_password_usable(pw):
"""Decide whether a password is usable only by the unusable password prefix.
We can't use django.contrib.auth.hashers.is_password_usable either, because
it not only checks against the unusable password, but checks for a valid
hasher too. We need different error messages in those cases.
"""
return not pw.startswith(UNUSABLE_PASSWORD_PREFIX)
class BetterReadOnlyPasswordHashWidget(ReadOnlyPasswordHashWidget):
"""
A ReadOnlyPasswordHashWidget that has a less intimidating output.
"""
def render(self, name, value, attrs=None, renderer=None):
final_attrs = flatatt(self.build_attrs(attrs))
if not value or not is_password_usable(value):
summary = ugettext("No password set.")
else:
try:
identify_hasher(value)
except ValueError:
summary = ugettext("Invalid password format or unknown"
" hashing algorithm.")
else:
summary = ugettext('*************')
return format_html('<div{attrs}><strong>{summary}</strong></div>',
attrs=final_attrs, summary=summary)
class UserCreationForm(forms.ModelForm):
"""
A form for creating new users. Includes all the required
fields, plus a repeated password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
'duplicate_username': _("A user with that %(username)s already exists."),
}
password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above,"
" for verification."))
class Meta:
model = User
fields = (User.USERNAME_FIELD,) + tuple(User.REQUIRED_FIELDS)
def __init__(self, *args, **kwargs):
super(UserCreationForm, self).__init__(*args, **kwargs)
def validate_uniqueness_of_username_field(value):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
try:
User._default_manager.get_by_natural_key(value)
except User.DoesNotExist:
return value
raise forms.ValidationError(self.error_messages['duplicate_username'] % {
'username': User.USERNAME_FIELD,
})
self.fields[User.USERNAME_FIELD].validators.append(validate_uniqueness_of_username_field)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(self.error_messages['password_mismatch'])
return password2
def _post_clean(self):
super(UserCreationForm, self)._post_clean()
# Validate the password after self.instance is updated with form data
# by super().
password = self.cleaned_data.get('password2')
if password:
try:
password_validation.validate_password(password, self.instance)
except forms.ValidationError as error:
self.add_error('password2', error)
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class CaseInsensitiveUsernameFieldCreationForm(UserCreationForm):
"""
This form is the same as UserCreationForm, except that usernames are lowercased before they
are saved. This is to disallow the existence of email address usernames which differ only in
case.
"""
def clean_USERNAME_FIELD(self):
username = self.cleaned_data.get(User.USERNAME_FIELD)
if username:
username = username.lower()
return username
# set the correct clean method on the class so that child classes can override and call super()
setattr(
CaseInsensitiveUsernameFieldCreationForm,
'clean_' + User.USERNAME_FIELD,
CaseInsensitiveUsernameFieldCreationForm.clean_USERNAME_FIELD
)
# alias for the old name for backwards-compatability
CaseInsensitiveEmailUserCreationForm = CaseInsensitiveUsernameFieldCreationForm
class UserChangeForm(forms.ModelForm):
"""
A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField(label=_("Password"),
widget=BetterReadOnlyPasswordHashWidget)
class Meta:
model = User
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class AdminUserChangeForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super(AdminUserChangeForm, self).__init__(*args, **kwargs)
if not self.fields['password'].help_text:
self.fields['password'].help_text = \
DjangoUserChangeForm.base_fields['password'].help_text
class FriendlyPasswordResetForm(OldPasswordResetForm):
error_messages = dict(getattr(OldPasswordResetForm, 'error_messages', {}))
error_messages['unknown'] = _("This email address doesn't have an "
"associated user account. Are you "
"sure you've registered?")
def clean_email(self):
"""Return an error message if the email address being reset is unknown.
This is to revert https://code.djangoproject.com/ticket/19758
The bug #19758 tries not to leak emails through password reset because
only usernames are unique in Django's default user model.
django-authtools leaks email addresses through the registration form.
In the case of django-authtools not warning the user doesn't add any
security, and worsen user experience.
"""
email = self.cleaned_data['email']
results = list(self.get_users(email))
if not results:
raise forms.ValidationError(self.error_messages['unknown'])
return email
class AuthenticationForm(DjangoAuthenticationForm):
def __init__(self, request=None, *args, **kwargs):
super(AuthenticationForm, self).__init__(request, *args, **kwargs)
username_field = User._meta.get_field(User.USERNAME_FIELD)
self.fields['username'].widget = username_field.formfield().widget
| 38.73301
| 97
| 0.673393
|
38b7cf2fa10b5648af47c7a2310b434b1cbb3b50
| 70,389
|
py
|
Python
|
tests/Node.py
|
kushnirenko/remprotocol
|
ec450227a40bb18527b473266b07b982efc1d093
|
[
"MIT"
] | null | null | null |
tests/Node.py
|
kushnirenko/remprotocol
|
ec450227a40bb18527b473266b07b982efc1d093
|
[
"MIT"
] | null | null | null |
tests/Node.py
|
kushnirenko/remprotocol
|
ec450227a40bb18527b473266b07b982efc1d093
|
[
"MIT"
] | null | null | null |
import copy
import decimal
import subprocess
import time
import os
import re
import datetime
import json
import signal
from core_symbol import CORE_SYMBOL
from testUtils import Utils
from testUtils import Account
from testUtils import EnumType
from testUtils import addEnum
from testUtils import unhandledEnumType
from testUtils import ReturnType
class BlockType(EnumType):
pass
addEnum(BlockType, "head")
addEnum(BlockType, "lib")
# pylint: disable=too-many-public-methods
class Node(object):
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments
def __init__(self, host, port, pid=None, cmd=None, walletMgr=None, enableMongo=False, mongoHost="localhost", mongoPort=27017, mongoDb="EOStest"):
self.host=host
self.port=port
self.pid=pid
self.cmd=cmd
if Utils.Debug: Utils.Print("new Node host=%s, port=%s, pid=%s, cmd=%s" % (self.host, self.port, self.pid, self.cmd))
self.killed=False # marks node as killed
self.enableMongo=enableMongo
self.mongoHost=mongoHost
self.mongoPort=mongoPort
self.mongoDb=mongoDb
self.endpointHttp="http://%s:%d" % (self.host, self.port)
self.endpointArgs="--url %s" % (self.endpointHttp)
self.mongoEndpointArgs=""
self.infoValid=None
self.lastRetrievedHeadBlockNum=None
self.lastRetrievedLIB=None
self.lastRetrievedHeadBlockProducer=""
self.transCache={}
self.walletMgr=walletMgr
self.missingTransaction=False
self.popenProc=None # initial process is started by launcher, this will only be set on relaunch
if self.enableMongo:
self.mongoEndpointArgs += "--host %s --port %d %s" % (mongoHost, mongoPort, mongoDb)
def eosClientArgs(self):
walletArgs=" " + self.walletMgr.getWalletEndpointArgs() if self.walletMgr is not None else ""
return self.endpointArgs + walletArgs + " " + Utils.MiscEosClientArgs
def __str__(self):
#return "Host: %s, Port:%d, Pid:%s, Cmd:\"%s\"" % (self.host, self.port, self.pid, self.cmd)
return "Host: %s, Port:%d, Pid:%s" % (self.host, self.port, self.pid)
@staticmethod
def validateTransaction(trans):
assert trans
assert isinstance(trans, dict), print("Input type is %s" % type(trans))
executed="executed"
def printTrans(trans, status):
Utils.Print("ERROR: Valid transaction should be \"%s\" but it was \"%s\"." % (executed, status))
Utils.Print("Transaction: %s" % (json.dumps(trans, indent=1)))
transStatus=Node.getTransStatus(trans)
assert transStatus == executed, printTrans(trans, transStatus)
@staticmethod
def __printTransStructureError(trans, context):
Utils.Print("ERROR: Failure in expected transaction structure. Missing trans%s." % (context))
Utils.Print("Transaction: %s" % (json.dumps(trans, indent=1)))
class Context:
def __init__(self, obj, desc):
self.obj=obj
self.sections=[obj]
self.keyContext=[]
self.desc=desc
def __json(self):
return "%s=\n%s" % (self.desc, json.dumps(self.obj, indent=1))
def __keyContext(self):
msg=""
for key in self.keyContext:
if msg=="":
msg="["
else:
msg+="]["
msg+=key
if msg!="":
msg+="]"
return msg
def __contextDesc(self):
return "%s%s" % (self.desc, self.__keyContext())
def add(self, newKey):
assert isinstance(newKey, str), print("ERROR: Trying to use %s as a key" % (newKey))
subSection=self.sections[-1]
assert isinstance(subSection, dict), print("ERROR: Calling \"add\" method when context is not a dictionary. %s in %s" % (self.__contextDesc(), self.__json()))
assert newKey in subSection, print("ERROR: %s%s does not contain key \"%s\". %s" % (self.__contextDesc(), key, self.__json()))
current=subSection[newKey]
self.sections.append(current)
self.keyContext.append(newKey)
return current
def index(self, i):
assert isinstance(i, int), print("ERROR: Trying to use \"%s\" as a list index" % (i))
cur=self.getCurrent()
assert isinstance(cur, list), print("ERROR: Calling \"index\" method when context is not a list. %s in %s" % (self.__contextDesc(), self.__json()))
listLen=len(cur)
assert i < listLen, print("ERROR: Index %s is beyond the size of the current list (%s). %s in %s" % (i, listLen, self.__contextDesc(), self.__json()))
return self.sections.append(cur[i])
def getCurrent(self):
return self.sections[-1]
@staticmethod
def getTransStatus(trans):
cntxt=Node.Context(trans, "trans")
cntxt.add("processed")
cntxt.add("receipt")
return cntxt.add("status")
@staticmethod
def getTransBlockNum(trans):
cntxt=Node.Context(trans, "trans")
cntxt.add("processed")
cntxt.add("action_traces")
cntxt.index(0)
return cntxt.add("block_num")
@staticmethod
def stdinAndCheckOutput(cmd, subcommand):
"""Passes input to stdin, executes cmd. Returns tuple with return code(int), stdout(byte stream) and stderr(byte stream)."""
assert(cmd)
assert(isinstance(cmd, list))
assert(subcommand)
assert(isinstance(subcommand, str))
outs=None
errs=None
ret=0
try:
popen=subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
outs,errs=popen.communicate(input=subcommand.encode("utf-8"))
ret=popen.wait()
except subprocess.CalledProcessError as ex:
msg=ex.output
return (ex.returncode, msg, None)
return (ret, outs, errs)
@staticmethod
def normalizeJsonObject(extJStr):
tmpStr=extJStr
tmpStr=re.sub(r'ObjectId\("(\w+)"\)', r'"ObjectId-\1"', tmpStr)
tmpStr=re.sub(r'ISODate\("([\w|\-|\:|\.]+)"\)', r'"ISODate-\1"', tmpStr)
tmpStr=re.sub(r'NumberLong\("(\w+)"\)', r'"NumberLong-\1"', tmpStr)
tmpStr=re.sub(r'NumberLong\((\w+)\)', r'\1', tmpStr)
return tmpStr
@staticmethod
def runMongoCmdReturnJson(cmd, subcommand, trace=False, exitOnError=False):
"""Run mongodb subcommand and return response."""
assert(cmd)
assert(isinstance(cmd, list))
assert(subcommand)
assert(isinstance(subcommand, str))
retId,outs,errs=Node.stdinAndCheckOutput(cmd, subcommand)
if retId is not 0:
errorMsg="mongodb call failed. cmd=[ %s ] subcommand=\"%s\" - %s" % (", ".join(cmd), subcommand, errs)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
Utils.Print("ERROR: %s" % (errMsg))
return None
outStr=Node.byteArrToStr(outs)
if not outStr:
return None
extJStr=Utils.filterJsonObjectOrArray(outStr)
if not extJStr:
return None
jStr=Node.normalizeJsonObject(extJStr)
if not jStr:
return None
if trace: Utils.Print ("RAW > %s"% (outStr))
if trace: Utils.Print ("JSON> %s"% jStr)
try:
jsonData=json.loads(jStr)
except json.decoder.JSONDecodeError as _:
Utils.Print ("ERROR: JSONDecodeError")
Utils.Print ("Raw MongoDB response: > %s"% (outStr))
Utils.Print ("Normalized MongoDB response: > %s"% (jStr))
raise
return jsonData
@staticmethod
def getTransId(trans):
"""Retrieve transaction id from dictionary object."""
assert trans
assert isinstance(trans, dict), print("Input type is %s" % type(trans))
assert "transaction_id" in trans, print("trans does not contain key %s. trans={%s}" % ("transaction_id", json.dumps(trans, indent=2, sort_keys=True)))
transId=trans["transaction_id"]
return transId
@staticmethod
def isTrans(obj):
"""Identify if this is a transaction dictionary."""
if obj is None or not isinstance(obj, dict):
return False
return True if "transaction_id" in obj else False
@staticmethod
def byteArrToStr(arr):
return arr.decode("utf-8")
def validateAccounts(self, accounts):
assert(accounts)
assert(isinstance(accounts, list))
for account in accounts:
assert(account)
assert(isinstance(account, Account))
if Utils.Debug: Utils.Print("Validating account %s" % (account.name))
accountInfo=self.getEosAccount(account.name, exitOnError=True)
try:
if not self.enableMongo:
assert(accountInfo["account_name"] == account.name)
else:
assert(accountInfo["name"] == account.name)
except (AssertionError, TypeError, KeyError) as _:
Utils.Print("account validation failed. account: %s" % (account.name))
raise
# pylint: disable=too-many-branches
def getBlock(self, blockNum, silentErrors=False, exitOnError=False):
"""Given a blockId will return block details."""
assert(isinstance(blockNum, int))
if not self.enableMongo:
tmpVar = Utils.Debug
Utils.Debug = False
cmdDesc="get block"
cmd="%s %d" % (cmdDesc, blockNum)
msg="(block number=%s)" % (blockNum);
res = self.processCleosCmd(cmd, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=msg)
Utils.Debug = tmpVar
return res
else:
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.blocks.findOne( { "block_num": %d } )' % (blockNum)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
start=time.perf_counter()
try:
block=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
if block is not None:
return block
except subprocess.CalledProcessError as ex:
if not silentErrors:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
errorMsg="Exception during get db node get block. cmd Duration: %.3f sec. %s" % (end-start, msg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
return None
def getBlockByIdMdb(self, blockId, silentErrors=False):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.blocks.findOne( { "block_id": "%s" } )' % (blockId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
start=time.perf_counter()
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during db get block by id. cmd Duration: %.3f sec. %s" % (end-start, msg))
return None
return None
def isBlockPresent(self, blockNum, blockType=BlockType.head):
"""Does node have head_block_num/last_irreversible_block_num >= blockNum"""
assert isinstance(blockNum, int)
assert isinstance(blockType, BlockType)
assert (blockNum > 0)
info=self.getInfo(silentErrors=True, exitOnError=True)
node_block_num=0
try:
if blockType==BlockType.head:
node_block_num=int(info["head_block_num"])
elif blockType==BlockType.lib:
node_block_num=int(info["last_irreversible_block_num"])
else:
unhandledEnumType(blockType)
except (TypeError, KeyError) as _:
Utils.Print("Failure in get info parsing %s block. %s" % (blockType.type, info))
raise
present = True if blockNum <= node_block_num else False
if Utils.Debug and blockType==BlockType.lib:
decorator=""
if not present:
decorator="not "
Utils.Print("Block %d is %sfinalized." % (blockNum, decorator))
return present
def isBlockFinalized(self, blockNum):
"""Is blockNum finalized"""
return self.isBlockPresent(blockNum, blockType=BlockType.lib)
# pylint: disable=too-many-branches
def getTransaction(self, transId, silentErrors=False, exitOnError=False, delayedRetry=True):
assert(isinstance(transId, str))
exitOnErrorForDelayed=not delayedRetry and exitOnError
timeout=3
if not self.enableMongo:
cmdDesc="get transaction"
cmd="%s %s" % (cmdDesc, transId)
msg="(transaction id=%s)" % (transId);
for i in range(0,(int(60/timeout) - 1)):
trans=self.processCleosCmd(cmd, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnErrorForDelayed, exitMsg=msg)
if trans is not None or not delayedRetry:
return trans
if Utils.Debug: Utils.Print("Could not find transaction with id %s, delay and retry" % (transId))
time.sleep(timeout)
self.missingTransaction=True
# either it is there or the transaction has timed out
return self.processCleosCmd(cmd, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=msg)
else:
for i in range(0,(int(60/timeout) - 1)):
trans=self.getTransactionMdb(transId, silentErrors=silentErrors, exitOnError=exitOnErrorForDelayed)
if trans is not None or not delayedRetry:
return trans
if Utils.Debug: Utils.Print("Could not find transaction with id %s in mongodb, delay and retry" % (transId))
time.sleep(timeout)
return self.getTransactionMdb(transId, silentErrors=silentErrors, exitOnError=exitOnError)
def getTransactionMdb(self, transId, silentErrors=False, exitOnError=False):
"""Get transaction from MongoDB. Since DB only contains finalized blocks, transactions can take a while to appear in DB."""
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
#subcommand='db.Transactions.findOne( { $and : [ { "trx_id": "%s" }, {"irreversible":true} ] } )' % (transId)
subcommand='db.transactions.findOne( { "trx_id": "%s" } )' % (transId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
start=time.perf_counter()
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
errorMsg="Exception during get db node get trans in mongodb with transaction id=%s. cmd Duration: %.3f sec. %s" % (transId, end-start, msg)
if exitOnError:
Utils.cmdError("" % (errorMsg))
Utils.errorExit("Failed to retrieve transaction in mongodb for transaction id=%s" % (transId))
elif not silentErrors:
Utils.Print("ERROR: %s" % (errorMsg))
return None
def isTransInBlock(self, transId, blockId):
"""Check if transId is within block identified by blockId"""
assert(transId)
assert(isinstance(transId, str))
assert(blockId)
assert(isinstance(blockId, int))
block=self.getBlock(blockId, exitOnError=True)
transactions=None
key=""
try:
if not self.enableMongo:
key="[transactions]"
transactions=block["transactions"]
else:
key="[blocks][transactions]"
transactions=block["block"]["transactions"]
except (AssertionError, TypeError, KeyError) as _:
Utils.Print("block%s not found. Block: %s" % (key,block))
raise
if transactions is not None:
for trans in transactions:
assert(trans)
try:
myTransId=trans["trx"]["id"]
if transId == myTransId:
return True
except (TypeError, KeyError) as _:
Utils.Print("transaction%s not found. Transaction: %s" % (key, trans))
return False
def getBlockIdByTransId(self, transId, delayedRetry=True):
"""Given a transaction Id (string), will return the actual block id (int) containing the transaction"""
assert(transId)
assert(isinstance(transId, str))
trans=self.getTransaction(transId, exitOnError=True, delayedRetry=delayedRetry)
refBlockNum=None
key=""
try:
if not self.enableMongo:
key="[trx][trx][ref_block_num]"
refBlockNum=trans["trx"]["trx"]["ref_block_num"]
else:
key="[ref_block_num]"
refBlockNum=trans["ref_block_num"]
refBlockNum=int(refBlockNum)+1
except (TypeError, ValueError, KeyError) as _:
Utils.Print("transaction%s not found. Transaction: %s" % (key, trans))
return None
headBlockNum=self.getHeadBlockNum()
assert(headBlockNum)
try:
headBlockNum=int(headBlockNum)
except(ValueError) as _:
Utils.Print("ERROR: Block info parsing failed. %s" % (headBlockNum))
raise
if Utils.Debug: Utils.Print("Reference block num %d, Head block num: %d" % (refBlockNum, headBlockNum))
for blockNum in range(refBlockNum, headBlockNum+1):
if self.isTransInBlock(str(transId), blockNum):
if Utils.Debug: Utils.Print("Found transaction %s in block %d" % (transId, blockNum))
return blockNum
return None
def getBlockIdByTransIdMdb(self, transId):
"""Given a transaction Id (string), will return block id (int) containing the transaction. This is specific to MongoDB."""
assert(transId)
assert(isinstance(transId, str))
trans=self.getTransactionMdb(transId)
if not trans: return None
refBlockNum=None
try:
refBlockNum=trans["ref_block_num"]
refBlockNum=int(refBlockNum)+1
except (TypeError, ValueError, KeyError) as _:
Utils.Print("transaction[ref_block_num] not found. Transaction: %s" % (trans))
return None
headBlockNum=self.getHeadBlockNum()
assert(headBlockNum)
try:
headBlockNum=int(headBlockNum)
except(ValueError) as _:
Utils.Print("Info parsing failed. %s" % (headBlockNum))
for blockNum in range(refBlockNum, headBlockNum+1):
if self.isTransInBlock(str(transId), blockNum):
return blockNum
return None
def isTransInAnyBlock(self, transId):
"""Check if transaction (transId) is in a block."""
assert(transId)
assert(isinstance(transId, (str,int)))
# if not self.enableMongo:
blockId=self.getBlockIdByTransId(transId)
# else:
# blockId=self.getBlockIdByTransIdMdb(transId)
return True if blockId else False
def isTransFinalized(self, transId):
"""Check if transaction (transId) has been finalized."""
assert(transId)
assert(isinstance(transId, str))
blockId=self.getBlockIdByTransId(transId)
if not blockId:
return False
assert(isinstance(blockId, int))
return self.isBlockPresent(blockId, blockType=BlockType.lib)
# Create & initialize account and return creation transactions. Return transaction json object
def createInitializeAccount(self, account, creatorAccount, stakedDeposit=1000, waitForTransBlock=False, stakeQuantity=200, exitOnError=False):
cmdDesc="system newaccount"
cmd='%s -j %s %s %s %s --stake "%s %s" --transfer' % (
cmdDesc, creatorAccount.name, account.name, account.ownerPublicKey,
account.activePublicKey, stakeQuantity, CORE_SYMBOL)
msg="(creator account=%s, account=%s)" % (creatorAccount.name, account.name);
trans=self.processCleosCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
transId=Node.getTransId(trans)
if stakedDeposit > 0:
self.waitForTransInBlock(transId) # seems like account creation needs to be finalized before transfer can happen
trans = self.transferFunds(creatorAccount, account, Node.currencyIntToStr(stakedDeposit, CORE_SYMBOL), "init")
transId=Node.getTransId(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def createAccount(self, account, creatorAccount, stakedDeposit=1000, waitForTransBlock=False, exitOnError=False):
"""Create account and return creation transactions. Return transaction json object.
waitForTransBlock: wait on creation transaction id to appear in a block."""
cmdDesc="create account"
cmd="%s -j %s %s %s %s" % (
cmdDesc, creatorAccount.name, account.name, account.ownerPublicKey, account.activePublicKey)
msg="(creator account=%s, account=%s)" % (creatorAccount.name, account.name);
trans=self.processCleosCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
transId=Node.getTransId(trans)
if stakedDeposit > 0:
self.waitForTransInBlock(transId) # seems like account creation needs to be finlized before transfer can happen
trans = self.transferFunds(creatorAccount, account, "%0.04f %s" % (stakedDeposit/10000, CORE_SYMBOL), "init")
self.trackCmdTransaction(trans)
transId=Node.getTransId(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def getEosAccount(self, name, exitOnError=False, returnType=ReturnType.json, avoidMongo=False):
assert(isinstance(name, str))
if not self.enableMongo or avoidMongo:
cmdDesc="get account"
jsonFlag="-j" if returnType==ReturnType.json else ""
cmd="%s %s %s" % (cmdDesc, jsonFlag, name)
msg="( getEosAccount(name=%s) )" % (name);
return self.processCleosCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError, exitMsg=msg, returnType=returnType)
else:
assert returnType == ReturnType.json, "MongoDB only supports a returnType of ReturnType.json"
return self.getEosAccountFromDb(name, exitOnError=exitOnError)
def getEosAccountFromDb(self, name, exitOnError=False):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.accounts.findOne({"name" : "%s"})' % (name)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
timeout = 3
for i in range(0,(int(60/timeout) - 1)):
start=time.perf_counter()
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if trans is not None:
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
return trans
time.sleep(timeout)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
if exitOnError:
end=time.perf_counter()
Utils.cmdError("Exception during get account from db for %s. cmd Duration: %.3f sec. %s" % (name, end-start, msg))
Utils.errorExit("Failed during get account from db for %s. %s" % (name, msg))
Utils.Print("ERROR: Exception during get account from db for %s. %s" % (name, msg))
return None
def getTable(self, contract, scope, table, exitOnError=False):
cmdDesc = "get table"
cmd="%s %s %s %s" % (cmdDesc, contract, scope, table)
msg="contract=%s, scope=%s, table=%s" % (contract, scope, table);
return self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
def getTableAccountBalance(self, contract, scope):
assert(isinstance(contract, str))
assert(isinstance(scope, str))
table="accounts"
trans = self.getTable(contract, scope, table, exitOnError=True)
try:
return trans["rows"][0]["balance"]
except (TypeError, KeyError) as _:
print("transaction[rows][0][balance] not found. Transaction: %s" % (trans))
raise
def getCurrencyBalance(self, contract, account, symbol=CORE_SYMBOL, exitOnError=False):
"""returns raw output from get currency balance e.g. '99999.9950 CUR'"""
assert(contract)
assert(isinstance(contract, str))
assert(account)
assert(isinstance(account, str))
assert(symbol)
assert(isinstance(symbol, str))
cmdDesc = "get currency balance"
cmd="%s %s %s %s" % (cmdDesc, contract, account, symbol)
msg="contract=%s, account=%s, symbol=%s" % (contract, account, symbol);
return self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg, returnType=ReturnType.raw)
def getCurrencyStats(self, contract, symbol=CORE_SYMBOL, exitOnError=False):
"""returns Json output from get currency stats."""
assert(contract)
assert(isinstance(contract, str))
assert(symbol)
assert(isinstance(symbol, str))
cmdDesc = "get currency stats"
cmd="%s %s %s" % (cmdDesc, contract, symbol)
msg="contract=%s, symbol=%s" % (contract, symbol);
return self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
# Verifies account. Returns "get account" json return object
def verifyAccount(self, account):
assert(account)
if not self.enableMongo:
ret=self.getEosAccount(account.name)
if ret is not None:
account_name=ret["account_name"]
if account_name is None:
Utils.Print("ERROR: Failed to verify account creation.", account.name)
return None
return ret
else:
return self.verifyAccountMdb(account)
def verifyAccountMdb(self, account):
assert(account)
ret=self.getEosAccountFromDb(account.name)
if ret is not None:
account_name=ret["name"]
if account_name is None:
Utils.Print("ERROR: Failed to verify account creation.", account.name)
return None
return ret
return None
def waitForTransInBlock(self, transId, timeout=None):
"""Wait for trans id to be finalized."""
assert(isinstance(transId, str))
lam = lambda: self.isTransInAnyBlock(transId)
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForTransFinalization(self, transId, timeout=None):
"""Wait for trans id to be finalized."""
assert(isinstance(transId, str))
lam = lambda: self.isTransFinalized(transId)
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForNextBlock(self, timeout=None, blockType=BlockType.head):
num=self.getBlockNum(blockType=blockType)
lam = lambda: self.getHeadBlockNum() > num
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForBlock(self, blockNum, timeout=None, blockType=BlockType.head, reportInterval=None, sleepTime=0.5):
lam = lambda: self.getBlockNum(blockType=blockType) > blockNum
blockDesc = "head" if blockType == BlockType.head else "LIB"
count = 0
class WaitReporter:
def __init__(self, node, reportInterval):
self.count = 0
self.node = node
self.reportInterval = reportInterval
def __call__(self):
self.count += 1
if self.count % self.reportInterval == 0:
info = self.node.getInfo()
Utils.Print("Waiting on %s block num %d, get info = {\n%s\n}" % (blockDesc, blockNum, info))
reporter = WaitReporter(self, reportInterval) if reportInterval is not None else None
ret=Utils.waitForBool(lam, timeout, reporter=reporter, sleepTime=sleepTime)
return ret
def waitForIrreversibleBlock(self, blockNum, timeout=None, blockType=BlockType.head):
return self.waitForBlock(blockNum, timeout=timeout, blockType=blockType)
# Trasfer funds. Returns "transfer" json return object
def transferFunds(self, source, destination, amountStr, memo="memo", force=False, waitForTransBlock=False, exitOnError=True):
assert isinstance(amountStr, str)
assert(source)
assert(isinstance(source, Account))
assert(destination)
assert(isinstance(destination, Account))
cmd="%s %s -v transfer -j %s %s" % (
Utils.EosClientPath, self.eosClientArgs(), source.name, destination.name)
cmdArr=cmd.split()
cmdArr.append(amountStr)
cmdArr.append(memo)
if force:
cmdArr.append("-f")
s=" ".join(cmdArr)
if Utils.Debug: Utils.Print("cmd: %s" % (s))
trans=None
start=time.perf_counter()
try:
trans=Utils.runCmdArrReturnJson(cmdArr)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
self.trackCmdTransaction(trans)
except subprocess.CalledProcessError as ex:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during funds transfer. cmd Duration: %.3f sec. %s" % (end-start, msg))
if exitOnError:
Utils.cmdError("could not transfer \"%s\" from %s to %s" % (amountStr, source, destination))
Utils.errorExit("Failed to transfer \"%s\" from %s to %s" % (amountStr, source, destination))
return None
if trans is None:
Utils.cmdError("could not transfer \"%s\" from %s to %s" % (amountStr, source, destination))
Utils.errorExit("Failed to transfer \"%s\" from %s to %s" % (amountStr, source, destination))
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
@staticmethod
def currencyStrToInt(balanceStr):
"""Converts currency string of form "12.3456 SYS" to int 123456"""
assert(isinstance(balanceStr, str))
balanceStr=balanceStr.split()[0]
#balance=int(decimal.Decimal(balanceStr[1:])*10000)
balance=int(decimal.Decimal(balanceStr)*10000)
return balance
@staticmethod
def currencyIntToStr(balance, symbol):
"""Converts currency int of form 123456 to string "12.3456 SYS" where SYS is symbol string"""
assert(isinstance(balance, int))
assert(isinstance(symbol, str))
balanceStr="%.04f %s" % (balance/10000.0, symbol)
return balanceStr
def validateFunds(self, initialBalances, transferAmount, source, accounts):
"""Validate each account has the expected SYS balance. Validate cumulative balance matches expectedTotal."""
assert(source)
assert(isinstance(source, Account))
assert(accounts)
assert(isinstance(accounts, list))
assert(len(accounts) > 0)
assert(initialBalances)
assert(isinstance(initialBalances, dict))
assert(isinstance(transferAmount, int))
currentBalances=self.getEosBalances([source] + accounts)
assert(currentBalances)
assert(isinstance(currentBalances, dict))
assert(len(initialBalances) == len(currentBalances))
if len(currentBalances) != len(initialBalances):
Utils.Print("ERROR: validateFunds> accounts length mismatch. Initial: %d, current: %d" % (len(initialBalances), len(currentBalances)))
return False
for key, value in currentBalances.items():
initialBalance = initialBalances[key]
assert(initialBalances)
expectedInitialBalance = value - transferAmount
if key is source:
expectedInitialBalance = value + (transferAmount*len(accounts))
if (initialBalance != expectedInitialBalance):
Utils.Print("ERROR: validateFunds> Expected: %d, actual: %d for account %s" %
(expectedInitialBalance, initialBalance, key.name))
return False
def getEosBalances(self, accounts):
"""Returns a dictionary with account balances keyed by accounts"""
assert(accounts)
assert(isinstance(accounts, list))
balances={}
for account in accounts:
balance = self.getAccountEosBalance(account.name)
balances[account]=balance
return balances
# Gets accounts mapped to key. Returns json object
def getAccountsByKey(self, key, exitOnError=False):
cmdDesc = "get accounts"
cmd="%s %s" % (cmdDesc, key)
msg="key=%s" % (key);
return self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
# Get actions mapped to an account (remcli get actions)
def getActions(self, account, pos=-1, offset=-1, exitOnError=False):
assert(isinstance(account, Account))
assert(isinstance(pos, int))
assert(isinstance(offset, int))
if not self.enableMongo:
cmdDesc = "get actions"
cmd="%s -j %s %d %d" % (cmdDesc, account.name, pos, offset)
msg="account=%s, pos=%d, offset=%d" % (account.name, pos, offset);
return self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
else:
return self.getActionsMdb(account, pos, offset, exitOnError=exitOnError)
def getActionsMdb(self, account, pos=-1, offset=-1, exitOnError=False):
assert(isinstance(account, Account))
assert(isinstance(pos, int))
assert(isinstance(offset, int))
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.action_traces.find({$or: [{"act.data.from":"%s"},{"act.data.to":"%s"}]}).sort({"_id":%d}).limit(%d)' % (account.name, account.name, pos, abs(offset))
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
start=time.perf_counter()
try:
actions=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
if actions is not None:
return actions
except subprocess.CalledProcessError as ex:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
errorMsg="Exception during get db actions. cmd Duration: %.3f sec. %s" % (end-start, msg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
# Gets accounts mapped to key. Returns array
def getAccountsArrByKey(self, key):
trans=self.getAccountsByKey(key)
assert(trans)
assert("account_names" in trans)
accounts=trans["account_names"]
return accounts
def getServants(self, name, exitOnError=False):
cmdDesc = "get servants"
cmd="%s %s" % (cmdDesc, name)
msg="name=%s" % (name);
return self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
def getServantsArr(self, name):
trans=self.getServants(name, exitOnError=True)
servants=trans["controlled_accounts"]
return servants
def getAccountEosBalanceStr(self, scope):
"""Returns SYS currency0000 account balance from remcli get table command. Returned balance is string following syntax "98.0311 SYS". """
assert isinstance(scope, str)
amount=self.getTableAccountBalance("rem.token", scope)
if Utils.Debug: Utils.Print("getNodeAccountEosBalance %s %s" % (scope, amount))
assert isinstance(amount, str)
return amount
def getAccountEosBalance(self, scope):
"""Returns SYS currency0000 account balance from remcli get table command. Returned balance is an integer e.g. 980311. """
balanceStr=self.getAccountEosBalanceStr(scope)
balance=Node.currencyStrToInt(balanceStr)
return balance
def getAccountCodeHash(self, account):
cmd="%s %s get code %s" % (Utils.EosClientPath, self.eosClientArgs(), account)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
start=time.perf_counter()
try:
retStr=Utils.checkOutput(cmd.split())
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
#Utils.Print ("get code> %s"% retStr)
p=re.compile(r'code\shash: (\w+)\n', re.MULTILINE)
m=p.search(retStr)
if m is None:
msg="Failed to parse code hash."
Utils.Print("ERROR: "+ msg)
return None
return m.group(1)
except subprocess.CalledProcessError as ex:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during code hash retrieval. cmd Duration: %.3f sec. %s" % (end-start, msg))
return None
# publish contract and return transaction as json object
def publishContract(self, account, contractDir, wasmFile, abiFile, waitForTransBlock=False, shouldFail=False):
cmd="%s %s -v set contract -j %s %s" % (Utils.EosClientPath, self.eosClientArgs(), account, contractDir)
cmd += "" if wasmFile is None else (" "+ wasmFile)
cmd += "" if abiFile is None else (" " + abiFile)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
trans=None
start=time.perf_counter()
try:
trans=Utils.runCmdReturnJson(cmd, trace=False)
self.trackCmdTransaction(trans)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
except subprocess.CalledProcessError as ex:
if not shouldFail:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during set contract. cmd Duration: %.3f sec. %s" % (end-start, msg))
return None
else:
retMap={}
retMap["returncode"]=ex.returncode
retMap["cmd"]=ex.cmd
retMap["output"]=ex.output
# commented below as they are available only in Python3.5 and above
# retMap["stdout"]=ex.stdout
# retMap["stderr"]=ex.stderr
return retMap
if shouldFail:
Utils.Print("ERROR: The publish contract did not fail as expected.")
return None
Node.validateTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=False)
def getTableRows(self, contract, scope, table):
jsonData=self.getTable(contract, scope, table)
if jsonData is None:
return None
rows=jsonData["rows"]
return rows
def getTableRow(self, contract, scope, table, idx):
if idx < 0:
Utils.Print("ERROR: Table index cannot be negative. idx: %d" % (idx))
return None
rows=self.getTableRows(contract, scope, table)
if rows is None or idx >= len(rows):
Utils.Print("ERROR: Retrieved table does not contain row %d" % idx)
return None
row=rows[idx]
return row
def getTableColumns(self, contract, scope, table):
row=self.getTableRow(contract, scope, table, 0)
keys=list(row.keys())
return keys
# returns tuple with transaction and
def pushMessage(self, account, action, data, opts, silentErrors=False):
cmd="%s %s push action -j %s %s" % (Utils.EosClientPath, self.eosClientArgs(), account, action)
cmdArr=cmd.split()
if data is not None:
cmdArr.append(data)
if opts is not None:
cmdArr += opts.split()
s=" ".join(cmdArr)
if Utils.Debug: Utils.Print("cmd: %s" % (cmdArr))
start=time.perf_counter()
try:
trans=Utils.runCmdArrReturnJson(cmdArr)
self.trackCmdTransaction(trans, ignoreNonTrans=True)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
return (True, trans)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
if not silentErrors:
end=time.perf_counter()
Utils.Print("ERROR: Exception during push message. cmd Duration=%.3f sec. %s" % (end - start, msg))
return (False, msg)
def setPermission(self, account, code, pType, requirement, waitForTransBlock=False, exitOnError=False):
cmdDesc="set action permission"
cmd="%s -j %s %s %s %s" % (cmdDesc, account, code, pType, requirement)
trans=self.processCleosCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def delegatebw(self, fromAccount, stakeQuantity, toAccount=None, transferTo=False, waitForTransBlock=False, exitOnError=False):
if toAccount is None:
toAccount=fromAccount
cmdDesc="system delegatebw"
transferStr="--transfer" if transferTo else ""
cmd="%s -j %s %s \"%s %s\" %s" % (
cmdDesc, fromAccount.name, toAccount.name, stakeQuantity, CORE_SYMBOL, transferStr)
msg="fromAccount=%s, toAccount=%s" % (fromAccount.name, toAccount.name);
trans=self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def undelegatebw(self, fromAccount, netQuantity, cpuQuantity, toAccount=None, waitForTransBlock=False, exitOnError=False):
if toAccount is None:
toAccount=fromAccount
cmdDesc="system undelegatebw"
cmd="%s -j %s %s \"%s %s\" \"%s %s\"" % (
cmdDesc, fromAccount.name, toAccount.name, netQuantity, CORE_SYMBOL, cpuQuantity, CORE_SYMBOL)
msg="fromAccount=%s, toAccount=%s" % (fromAccount.name, toAccount.name);
trans=self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def regproducer(self, producer, url, location, waitForTransBlock=False, exitOnError=False):
cmdDesc="system regproducer"
cmd="%s -j %s %s %s %s" % (
cmdDesc, producer.name, producer.activePublicKey, url, location)
msg="producer=%s" % (producer.name);
trans=self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def vote(self, account, producers, waitForTransBlock=False, exitOnError=False):
cmdDesc = "system voteproducer prods"
cmd="%s -j %s %s" % (
cmdDesc, account.name, " ".join(producers))
msg="account=%s, producers=[ %s ]" % (account.name, ", ".join(producers));
trans=self.processCleosCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def processCleosCmd(self, cmd, cmdDesc, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
assert(isinstance(returnType, ReturnType))
cmd="%s %s %s" % (Utils.EosClientPath, self.eosClientArgs(), cmd)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
if exitMsg is not None:
exitMsg="Context: " + exitMsg
else:
exitMsg=""
trans=None
start=time.perf_counter()
try:
if returnType==ReturnType.json:
trans=Utils.runCmdReturnJson(cmd, silentErrors=silentErrors)
elif returnType==ReturnType.raw:
trans=Utils.runCmdReturnStr(cmd)
else:
unhandledEnumType(returnType)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
except subprocess.CalledProcessError as ex:
if not silentErrors:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
errorMsg="Exception during \"%s\". Exception message: %s. cmd Duration=%.3f sec. %s" % (cmdDesc, msg, end-start, exitMsg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
if exitOnError and trans is None:
Utils.cmdError("could not \"%s\". %s" % (cmdDesc,exitMsg))
Utils.errorExit("Failed to \"%s\"" % (cmdDesc))
return trans
def killNodeOnProducer(self, producer, whereInSequence, blockType=BlockType.head, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
assert(isinstance(producer, str))
assert(isinstance(whereInSequence, int))
assert(isinstance(blockType, BlockType))
assert(isinstance(returnType, ReturnType))
basedOnLib="true" if blockType==BlockType.lib else "false"
payload="{ \"producer\":\"%s\", \"where_in_sequence\":%d, \"based_on_lib\":\"%s\" }" % (producer, whereInSequence, basedOnLib)
return self.processCurlCmd("test_control", "kill_node_on_producer", payload, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=exitMsg, returnType=returnType)
def processCurlCmd(self, resource, command, payload, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
cmd="curl %s/v1/%s/%s -d '%s' -X POST -H \"Content-Type: application/json\"" % \
(self.endpointHttp, resource, command, payload)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
rtn=None
start=time.perf_counter()
try:
if returnType==ReturnType.json:
rtn=Utils.runCmdReturnJson(cmd, silentErrors=silentErrors)
elif returnType==ReturnType.raw:
rtn=Utils.runCmdReturnStr(cmd)
else:
unhandledEnumType(returnType)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
printReturn=json.dumps(rtn) if returnType==ReturnType.json else rtn
Utils.Print("cmd returned: %s" % (printReturn))
except subprocess.CalledProcessError as ex:
if not silentErrors:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
errorMsg="Exception during \"%s\". %s. cmd Duration=%.3f sec." % (cmd, msg, end-start)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
if exitMsg is not None:
exitMsg=": " + exitMsg
else:
exitMsg=""
if exitOnError and rtn is None:
Utils.cmdError("could not \"%s\" - %s" % (cmd,exitMsg))
Utils.errorExit("Failed to \"%s\"" % (cmd))
return rtn
def txnGenCreateTestAccounts(self, genAccount, genKey, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
assert(isinstance(genAccount, str))
assert(isinstance(genKey, str))
assert(isinstance(returnType, ReturnType))
payload="[ \"%s\", \"%s\" ]" % (genAccount, genKey)
return self.processCurlCmd("txn_test_gen", "create_test_accounts", payload, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=exitMsg, returnType=returnType)
def txnGenStart(self, salt, period, batchSize, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
assert(isinstance(salt, str))
assert(isinstance(period, int))
assert(isinstance(batchSize, int))
assert(isinstance(returnType, ReturnType))
payload="[ \"%s\", %d, %d ]" % (salt, period, batchSize)
return self.processCurlCmd("txn_test_gen", "start_generation", payload, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=exitMsg, returnType=returnType)
def waitForTransBlockIfNeeded(self, trans, waitForTransBlock, exitOnError=False):
if not waitForTransBlock:
return trans
transId=Node.getTransId(trans)
if not self.waitForTransInBlock(transId):
if exitOnError:
Utils.cmdError("transaction with id %s never made it to a block" % (transId))
Utils.errorExit("Failed to find transaction with id %s in a block before timeout" % (transId))
return None
return trans
def getInfo(self, silentErrors=False, exitOnError=False):
cmdDesc = "get info"
tmpVar = Utils.Debug
Utils.Debug = False
info=self.processCleosCmd(cmdDesc, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnError)
Utils.Debug = tmpVar
if info is None:
self.infoValid=False
else:
self.infoValid=True
self.lastRetrievedHeadBlockNum=int(info["head_block_num"])
self.lastRetrievedLIB=int(info["last_irreversible_block_num"])
self.lastRetrievedHeadBlockProducer=info["head_block_producer"]
return info
def getBlockFromDb(self, idx):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand="db.blocks.find().sort({\"_id\":%d}).limit(1).pretty()" % (idx)
if Utils.Debug: Utils.Print("cmd: echo \"%s\" | %s" % (subcommand, cmd))
start=time.perf_counter()
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if Utils.Debug:
end=time.perf_counter()
Utils.Print("cmd Duration: %.3f sec" % (end-start))
return trans
except subprocess.CalledProcessError as ex:
end=time.perf_counter()
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db block. cmd Duration: %.3f sec. %s" % (end-start, msg))
return None
def checkPulse(self, exitOnError=False):
info=self.getInfo(True, exitOnError=exitOnError)
return False if info is None else True
def getHeadBlockNum(self):
"""returns head block number(string) as returned by remcli get info."""
if not self.enableMongo:
info=self.getInfo(exitOnError=True)
if info is not None:
headBlockNumTag="head_block_num"
return info[headBlockNumTag]
else:
# Either this implementation or the one in getIrreversibleBlockNum are likely wrong.
time.sleep(1)
block=self.getBlockFromDb(-1)
if block is not None:
blockNum=block["block_num"]
return blockNum
return None
def getIrreversibleBlockNum(self):
if not self.enableMongo:
info=self.getInfo(exitOnError=True)
if info is not None:
Utils.Print("current lib: %d" % (info["last_irreversible_block_num"]))
return info["last_irreversible_block_num"]
else:
# Either this implementation or the one in getHeadBlockNum are likely wrong.
block=self.getBlockFromDb(-1)
if block is not None:
blockNum=block["block_num"]
return blockNum
return None
def getBlockNum(self, blockType=BlockType.head):
assert isinstance(blockType, BlockType)
if blockType==BlockType.head:
return self.getHeadBlockNum()
elif blockType==BlockType.lib:
return self.getIrreversibleBlockNum()
else:
unhandledEnumType(blockType)
def kill(self, killSignal):
if Utils.Debug: Utils.Print("Killing node: %s" % (self.cmd))
assert(self.pid is not None)
try:
if self.popenProc is not None:
self.popenProc.send_signal(killSignal)
self.popenProc.wait()
else:
os.kill(self.pid, killSignal)
except OSError as ex:
Utils.Print("ERROR: Failed to kill node (%s)." % (self.cmd), ex)
return False
# wait for kill validation
def myFunc():
try:
os.kill(self.pid, 0) #check if process with pid is running
except OSError as _:
return True
return False
if not Utils.waitForBool(myFunc):
Utils.Print("ERROR: Failed to validate node shutdown.")
return False
# mark node as killed
self.pid=None
self.killed=True
return True
def interruptAndVerifyExitStatus(self, timeout=60):
if Utils.Debug: Utils.Print("terminating node: %s" % (self.cmd))
assert self.popenProc is not None, "node: \"%s\" does not have a popenProc, this may be because it is only set after a relaunch." % (self.cmd)
self.popenProc.send_signal(signal.SIGINT)
try:
outs, _ = self.popenProc.communicate(timeout=timeout)
assert self.popenProc.returncode == 0, "Expected terminating \"%s\" to have an exit status of 0, but got %d" % (self.cmd, self.popenProc.returncode)
except subprocess.TimeoutExpired:
Utils.errorExit("Terminate call failed on node: %s" % (self.cmd))
# mark node as killed
self.pid=None
self.killed=True
def verifyAlive(self, silent=False):
logStatus=not silent and Utils.Debug
pid=self.pid
if logStatus: Utils.Print("Checking if node(pid=%s) is alive(killed=%s): %s" % (self.pid, self.killed, self.cmd))
if self.killed or self.pid is None:
self.killed=True
self.pid=None
return False
try:
os.kill(self.pid, 0)
except ProcessLookupError as ex:
# mark node as killed
self.pid=None
self.killed=True
if logStatus: Utils.Print("Determined node(formerly pid=%s) is killed" % (pid))
return False
except PermissionError as ex:
if logStatus: Utils.Print("Determined node(formerly pid=%s) is alive" % (pid))
return True
if logStatus: Utils.Print("Determined node(pid=%s) is alive" % (self.pid))
return True
def getBlockProducerByNum(self, blockNum, timeout=None, waitForBlock=True, exitOnError=True):
if waitForBlock:
self.waitForBlock(blockNum, timeout=timeout, blockType=BlockType.head)
block=self.getBlock(blockNum, exitOnError=exitOnError)
blockProducer=block["producer"]
if blockProducer is None and exitOnError:
Utils.cmdError("could not get producer for block number %s" % (blockNum))
Utils.errorExit("Failed to get block's producer")
return blockProducer
def getBlockProducer(self, timeout=None, waitForBlock=True, exitOnError=True, blockType=BlockType.head):
blockNum=self.getBlockNum(blockType=blockType)
block=self.getBlock(blockNum, exitOnError=exitOnError, blockType=blockType)
blockProducer=block["producer"]
if blockProducer is None and exitOnError:
Utils.cmdError("could not get producer for block number %s" % (blockNum))
Utils.errorExit("Failed to get block's producer")
return blockProducer
def getNextCleanProductionCycle(self, trans):
rounds=21*12*2 # max time to ensure that at least 2/3+1 of producers x blocks per producer x at least 2 times
if trans is not None:
transId=Node.getTransId(trans)
self.waitForTransFinalization(transId, timeout=rounds/2)
else:
transId="Null"
irreversibleBlockNum=self.getIrreversibleBlockNum()
# The voted schedule should be promoted now, then need to wait for that to become irreversible
votingTallyWindow=120 #could be up to 120 blocks before the votes were tallied
promotedBlockNum=self.getHeadBlockNum()+votingTallyWindow
self.waitForIrreversibleBlock(promotedBlockNum, timeout=rounds/2)
ibnSchedActive=self.getIrreversibleBlockNum()
blockNum=self.getHeadBlockNum()
Utils.Print("Searching for clean production cycle blockNum=%s ibn=%s transId=%s promoted bn=%s ibn for schedule active=%s" % (blockNum,irreversibleBlockNum,transId,promotedBlockNum,ibnSchedActive))
blockProducer=self.getBlockProducerByNum(blockNum)
blockNum+=1
Utils.Print("Advance until the next block producer is retrieved")
while blockProducer == self.getBlockProducerByNum(blockNum):
blockNum+=1
blockProducer=self.getBlockProducerByNum(blockNum)
return blockNum
# TBD: make nodeId an internal property
# pylint: disable=too-many-locals
# If nodeosPath is equal to None, it will use the existing remnode path
def relaunch(self, nodeId, chainArg=None, newChain=False, timeout=Utils.systemWaitTimeout, addSwapFlags=None, cachePopen=False, nodeosPath=None):
assert(self.pid is None)
assert(self.killed)
assert isinstance(nodeId, int) or (isinstance(nodeId, str) and nodeId == "bios"), "Invalid Node ID is passed"
if Utils.Debug: Utils.Print("Launching node process, Id: {}".format(nodeId))
cmdArr=[]
splittedCmd=self.cmd.split()
if nodeosPath: splittedCmd[0] = nodeosPath
myCmd=" ".join(splittedCmd)
toAddOrSwap=copy.deepcopy(addSwapFlags) if addSwapFlags is not None else {}
if not newChain:
skip=False
swapValue=None
for i in splittedCmd:
Utils.Print("\"%s\"" % (i))
if skip:
skip=False
continue
if "--genesis-json" == i or "--genesis-timestamp" == i:
skip=True
continue
if swapValue is None:
cmdArr.append(i)
else:
cmdArr.append(swapValue)
swapValue=None
if i in toAddOrSwap:
swapValue=toAddOrSwap[i]
del toAddOrSwap[i]
for k,v in toAddOrSwap.items():
cmdArr.append(k)
cmdArr.append(v)
myCmd=" ".join(cmdArr)
cmd=myCmd + ("" if chainArg is None else (" " + chainArg))
self.launchCmd(cmd, nodeId, cachePopen)
def isNodeAlive():
"""wait for node to be responsive."""
try:
return True if self.checkPulse() else False
except (TypeError) as _:
pass
return False
isAlive=Utils.waitForBool(isNodeAlive, timeout, sleepTime=0.1)
if isAlive:
Utils.Print("Node relaunch was successfull.")
else:
Utils.Print("ERROR: Node relaunch Failed.")
# Ensure the node process is really killed
if self.popenProc:
self.popenProc.send_signal(signal.SIGTERM)
self.popenProc.wait()
self.pid=None
return False
self.cmd=cmd
self.killed=False
return True
@staticmethod
def unstartedFile(nodeId):
assert(isinstance(nodeId, int))
startFile=Utils.getNodeDataDir(nodeId, "start.cmd")
if not os.path.exists(startFile):
Utils.errorExit("Cannot find unstarted node since %s file does not exist" % startFile)
return startFile
def launchUnstarted(self, nodeId, cachePopen=False):
Utils.Print("launchUnstarted cmd: %s" % (self.cmd))
self.launchCmd(self.cmd, nodeId, cachePopen)
def launchCmd(self, cmd, nodeId, cachePopen=False):
dataDir=Utils.getNodeDataDir(nodeId)
dt = datetime.datetime.now()
dateStr=Utils.getDateString(dt)
stdoutFile="%s/stdout.%s.txt" % (dataDir, dateStr)
stderrFile="%s/stderr.%s.txt" % (dataDir, dateStr)
with open(stdoutFile, 'w') as sout, open(stderrFile, 'w') as serr:
Utils.Print("cmd: %s" % (cmd))
popen=subprocess.Popen(cmd.split(), stdout=sout, stderr=serr)
if cachePopen:
self.popenProc=popen
self.pid=popen.pid
if Utils.Debug: Utils.Print("start Node host=%s, port=%s, pid=%s, cmd=%s" % (self.host, self.port, self.pid, self.cmd))
def trackCmdTransaction(self, trans, ignoreNonTrans=False):
if trans is None:
if Utils.Debug: Utils.Print(" cmd returned transaction: %s" % (trans))
return
if ignoreNonTrans and not Node.isTrans(trans):
if Utils.Debug: Utils.Print(" cmd returned a non-transaction")
return
transId=Node.getTransId(trans)
if Utils.Debug:
status=Node.getTransStatus(trans)
blockNum=Node.getTransBlockNum(trans)
if transId in self.transCache.keys():
replaceMsg="replacing previous trans=\n%s" % json.dumps(self.transCache[transId], indent=2, sort_keys=True)
else:
replaceMsg=""
Utils.Print(" cmd returned transaction id: %s, status: %s, (possible) block num: %s %s" % (transId, status, blockNum, replaceMsg))
self.transCache[transId]=trans
def reportStatus(self):
Utils.Print("Node State:")
Utils.Print(" cmd : %s" % (self.cmd))
self.verifyAlive(silent=True)
Utils.Print(" killed: %s" % (self.killed))
Utils.Print(" host : %s" % (self.host))
Utils.Print(" port : %s" % (self.port))
Utils.Print(" pid : %s" % (self.pid))
status="last getInfo returned None" if not self.infoValid else "at last call to getInfo"
Utils.Print(" hbn : %s (%s)" % (self.lastRetrievedHeadBlockNum, status))
Utils.Print(" lib : %s (%s)" % (self.lastRetrievedLIB, status))
# Require producer_api_plugin
def scheduleProtocolFeatureActivations(self, featureDigests=[]):
param = { "protocol_features_to_activate": featureDigests }
self.processCurlCmd("producer", "schedule_protocol_feature_activations", json.dumps(param))
# Require producer_api_plugin
def getSupportedProtocolFeatures(self, excludeDisabled=False, excludeUnactivatable=False):
param = {
"exclude_disabled": excludeDisabled,
"exclude_unactivatable": excludeUnactivatable
}
res = self.processCurlCmd("producer", "get_supported_protocol_features", json.dumps(param))
return res
# This will return supported protocol features in a dict (feature codename as the key), i.e.
# {
# "PREACTIVATE_FEATURE": {...},
# "ONLY_LINK_TO_EXISTING_PERMISSION": {...},
# }
# Require producer_api_plugin
def getSupportedProtocolFeatureDict(self, excludeDisabled=False, excludeUnactivatable=False):
protocolFeatureDigestDict = {}
supportedProtocolFeatures = self.getSupportedProtocolFeatures(excludeDisabled, excludeUnactivatable)
for protocolFeature in supportedProtocolFeatures:
for spec in protocolFeature["specification"]:
if (spec["name"] == "builtin_feature_codename"):
codename = spec["value"]
protocolFeatureDigestDict[codename] = protocolFeature
break
return protocolFeatureDigestDict
def waitForHeadToAdvance(self, timeout=6):
currentHead = self.getHeadBlockNum()
def isHeadAdvancing():
return self.getHeadBlockNum() > currentHead
return Utils.waitForBool(isHeadAdvancing, timeout)
def waitForLibToAdvance(self, timeout=30):
currentLib = self.getIrreversibleBlockNum()
def isLibAdvancing():
return self.getIrreversibleBlockNum() > currentLib
return Utils.waitForBool(isLibAdvancing, timeout)
# Require producer_api_plugin
def activatePreactivateFeature(self):
protocolFeatureDigestDict = self.getSupportedProtocolFeatureDict()
preactivateFeatureDigest = protocolFeatureDigestDict["PREACTIVATE_FEATURE"]["feature_digest"]
assert preactivateFeatureDigest
self.scheduleProtocolFeatureActivations([preactivateFeatureDigest])
# Wait for the next block to be produced so the scheduled protocol feature is activated
assert self.waitForHeadToAdvance(), print("ERROR: TIMEOUT WAITING FOR PREACTIVATE")
# Return an array of feature digests to be preactivated in a correct order respecting dependencies
# Require producer_api_plugin
def getAllBuiltinFeatureDigestsToPreactivate(self):
protocolFeatures = []
supportedProtocolFeatures = self.getSupportedProtocolFeatures()
for protocolFeature in supportedProtocolFeatures:
for spec in protocolFeature["specification"]:
if (spec["name"] == "builtin_feature_codename"):
codename = spec["value"]
# Filter out "PREACTIVATE_FEATURE"
if codename != "PREACTIVATE_FEATURE":
protocolFeatures.append(protocolFeature["feature_digest"])
break
return protocolFeatures
# Require PREACTIVATE_FEATURE to be activated and require rem.bios with preactivate_feature
def preactivateProtocolFeatures(self, featureDigests:list):
for digest in featureDigests:
Utils.Print("push activate action with digest {}".format(digest))
data="{{\"feature_digest\":{}}}".format(digest)
opts="--permission rem@active"
trans=self.pushMessage("rem", "activate", data, opts)
if trans is None or not trans[0]:
Utils.Print("ERROR: Failed to preactive digest {}".format(digest))
return None
self.waitForHeadToAdvance()
# Require PREACTIVATE_FEATURE to be activated and require rem.bios with preactivate_feature
def preactivateAllBuiltinProtocolFeature(self):
allBuiltinProtocolFeatureDigests = self.getAllBuiltinFeatureDigestsToPreactivate()
self.preactivateProtocolFeatures(allBuiltinProtocolFeatureDigests)
def getLatestBlockHeaderState(self):
headBlockNum = self.getHeadBlockNum()
cmdDesc = "get block {} --header-state".format(headBlockNum)
latestBlockHeaderState = self.processCleosCmd(cmdDesc, cmdDesc)
return latestBlockHeaderState
def getActivatedProtocolFeatures(self):
latestBlockHeaderState = self.getLatestBlockHeaderState()
return latestBlockHeaderState["activated_protocol_features"]["protocol_features"]
def modifyBuiltinPFSubjRestrictions(self, nodeId, featureCodename, subjectiveRestriction={}):
jsonPath = os.path.join(Utils.getNodeConfigDir(nodeId),
"protocol_features",
"BUILTIN-{}.json".format(featureCodename))
protocolFeatureJson = []
with open(jsonPath) as f:
protocolFeatureJson = json.load(f)
protocolFeatureJson["subjective_restrictions"].update(subjectiveRestriction)
with open(jsonPath, "w") as f:
json.dump(protocolFeatureJson, f, indent=2)
# Require producer_api_plugin
def createSnapshot(self):
param = { }
return self.processCurlCmd("producer", "create_snapshot", json.dumps(param))
| 44.103383
| 208
| 0.617866
|
4f947e6d8d47067ef3f72e026b1c63cd677cfdf9
| 524
|
py
|
Python
|
djangoapp_sample/urls.py
|
sveetch/sveetch-djangoapp-sample
|
19e3a03208aade78449d6617f09e830c235012f7
|
[
"MIT"
] | null | null | null |
djangoapp_sample/urls.py
|
sveetch/sveetch-djangoapp-sample
|
19e3a03208aade78449d6617f09e830c235012f7
|
[
"MIT"
] | null | null | null |
djangoapp_sample/urls.py
|
sveetch/sveetch-djangoapp-sample
|
19e3a03208aade78449d6617f09e830c235012f7
|
[
"MIT"
] | 1
|
2021-04-18T21:20:37.000Z
|
2021-04-18T21:20:37.000Z
|
"""
Application URLs
"""
from django.urls import path, include
from .views import (
BlogIndexView, BlogDetailView,
ArticleDetailView,
)
from .routers import router
app_name = "djangoapp_sample"
urlpatterns = [
path("", BlogIndexView.as_view(), name="blog-index"),
path("api/", include(router.urls)),
path("<int:blog_pk>/", BlogDetailView.as_view(), name="blog-detail"),
path(
"<int:blog_pk>/<int:article_pk>/",
ArticleDetailView.as_view(),
name="article-detail"
),
]
| 20.153846
| 73
| 0.650763
|
6b6e64b0c8ad04ca822a0e3973f4d0287c677695
| 199
|
py
|
Python
|
python201/__init__.py
|
glentner/python-102
|
ccf506296bb6ac2a3bbe5c8b1cb64909837660d6
|
[
"MIT"
] | 5
|
2020-07-27T19:59:23.000Z
|
2022-03-27T18:39:29.000Z
|
python201/__init__.py
|
lgorenstein/python201
|
ccf506296bb6ac2a3bbe5c8b1cb64909837660d6
|
[
"MIT"
] | 2
|
2020-07-26T17:51:02.000Z
|
2020-07-27T16:20:04.000Z
|
python201/__init__.py
|
lgorenstein/python201
|
ccf506296bb6ac2a3bbe5c8b1cb64909837660d6
|
[
"MIT"
] | 4
|
2020-07-25T23:31:06.000Z
|
2020-07-27T02:38:44.000Z
|
# SPDX-FileCopyrightText: 2019-2021 Python201 Contributors
# SPDX-License-Identifier: MIT
"""python201 package initialization."""
# internal libs
from .core import logging # force initialization
| 22.111111
| 58
| 0.78392
|
31e1dc171d22863aeee8b0aaa928819b3b1d814e
| 9,302
|
py
|
Python
|
test/jit/test_autodiff_subgraph_slicing.py
|
mrTsjolder/pytorch
|
778f9eab6c036964d6dbca06549fb138f1e21c67
|
[
"Intel"
] | 1
|
2021-04-11T08:27:46.000Z
|
2021-04-11T08:27:46.000Z
|
test/jit/test_autodiff_subgraph_slicing.py
|
mrTsjolder/pytorch
|
778f9eab6c036964d6dbca06549fb138f1e21c67
|
[
"Intel"
] | 1
|
2022-01-18T12:17:29.000Z
|
2022-01-18T12:17:29.000Z
|
test/jit/test_autodiff_subgraph_slicing.py
|
mrTsjolder/pytorch
|
778f9eab6c036964d6dbca06549fb138f1e21c67
|
[
"Intel"
] | 2
|
2021-07-02T10:18:21.000Z
|
2021-08-18T10:10:28.000Z
|
import os
import sys
import unittest
from torch.testing._internal.common_utils import GRAPH_EXECUTOR, ProfilingMode, enable_profiling_mode_for_profiling_tests
import torch
# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
from torch.testing._internal.jit_utils import JitTestCase, disable_autodiff_subgraph_inlining
from torch.testing import FileCheck
from torch.testing._internal.common_utils import num_profiled_runs
if __name__ == '__main__':
raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
"\tpython test/test_jit.py TESTNAME\n\n"
"instead.")
@unittest.skipIf(GRAPH_EXECUTOR == ProfilingMode.SIMPLE, "Simple Executor doesn't support gradients")
class TestAutodiffSubgraphSlicing(JitTestCase):
# TODO: It is better if we can test directly on graphs instead of the current
# end-to-end fashion.
def _perform_ad_subgraph_slicing(self, fn, *input_sizes):
with disable_autodiff_subgraph_inlining():
with enable_profiling_mode_for_profiling_tests():
ge = torch.jit.script(fn)
inputs = [torch.randn(size, requires_grad=True) for size in input_sizes]
ge(*inputs, profile_and_replay=True)
return ge.graph_for(*inputs)
def assertGraphSize(self, graph, size):
nodes = list(filter(lambda n: (n.kind() != "prim::BailOut" and
n.kind() != "prim::BailoutTemplate" and
n.kind() != "prim::TypeCheck" and
n.kind() != "prim::RequiresGradCheck"),
graph.nodes()))
self.assertEqual(len(list(nodes)), size)
def test_chunk_constant_script_ad(self):
@torch.jit.script
def func(x):
x1, x2 = torch.chunk(x, 2)
return (x1, x2)
input = torch.rand(6, 10).requires_grad_()
with disable_autodiff_subgraph_inlining():
with enable_profiling_mode_for_profiling_tests():
output = func(input, profile_and_replay=True)
self.assertAutodiffNode(func.graph_for(input), True, ['prim::ConstantChunk'], [])
@unittest.skipIf(GRAPH_EXECUTOR != ProfilingMode.PROFILING,
"Requires fusion optimization pass to be effective")
def test_differentiable_graph_ops_requires_grad(self):
x = torch.randn(8, 2, dtype=torch.float).requires_grad_()
y = torch.randn(8, 2, dtype=torch.float)
def t(x : torch.Tensor, y : torch.Tensor):
o = x + 1.0
o1 = torch.relu(o)
o = y + 1.5
o2 = torch.relu(o)
o3 = o1 + o2
return o1, o2, o3
with enable_profiling_mode_for_profiling_tests():
t_jit = torch.jit.script(t)
jit_o = t_jit(x, y)
jit_o = t_jit(x, y)
o = t(x, y)
FileCheck().check("prim::DifferentiableGraph").run(t_jit.graph_for(x, y))
# validate the differentiableGraphOps are marking proper requires_grad
for oo, jit_oo in zip(o, jit_o):
self.assertEqual(oo.requires_grad, jit_oo.requires_grad)
self.assertEqual(oo, jit_oo)
# one more runs to trigger fusion
jit_o = t_jit(x, y)
for oo, jit_oo in zip(o, jit_o):
self.assertEqual(oo.dtype, jit_oo.dtype)
self.assertEqual(oo.requires_grad, jit_oo.requires_grad)
self.assertEqual(oo, jit_oo)
@unittest.skipIf(GRAPH_EXECUTOR == ProfilingMode.PROFILING, "Simple Executor doesn't support gradients")
def test_prune_grad(self):
@torch.jit.script
def t(input, bias):
return torch.nn.functional.relu(input + bias)
input = torch.randn(2, 8, requires_grad=True)
bias = torch.randn(8, requires_grad=False) # bias does NOT require grad
NUM_PROFILED_RUNS = 1
with num_profiled_runs(NUM_PROFILED_RUNS):
WARMUP = 3 # 2 runs to reach backward + 1 to optimize it
for x in range(WARMUP):
o = t(input, bias)
o.sum().backward()
fwd_plan = list(t.get_debug_state().execution_plans.values())[0]
bwd_graph = list(fwd_plan.code.grad_executor_states()[0].execution_plans.values())[0].graph
tup = next(bwd_graph.outputs())
self.assertEqual(len(list(tup.node().inputs())), 1)
def test_simple_merge(self):
# o --> o
def fn(x, y, z):
a = x * y
b = a * z
return b
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1)
self.assertGraphSize(graph, 1)
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 1)
def test_simple_no_merge(self):
# o: autodiff supported. x: not autodiff supported.
# o --> x
def fn(x, y, z):
a = x * y
b = torch.zeros([abs(int(y))])
return a, b
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1)
g_str = str(graph)
FileCheck().check("aten::Int").check("aten::zeros").check_not("aten::mul").run(g_str[0:g_str.find("return")])
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 1)
def test_does_not_merge_unrelated(self):
# o o
def fn(w, x, y, z):
a = x * y
b = w * z
return a, b
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1, 1)
self.assertGraphSize(graph, 3)
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 2)
def test_merges_without_cycles(self):
# o --> o --> o
# | ^
# \_________/
def fn(w, x, y):
a = w * x
b = a * y
c = a * b
return c
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1)
self.assertGraphSize(graph, 1)
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 1)
def test_merges_dense(self):
# o o
# |\ /|
# | \ / |
# | /\ |
# vv vv
# o o
def fn(x, y):
a, b = x.chunk(2)
c, d = y.chunk(2)
return a + c, b + d
graph = self._perform_ad_subgraph_slicing(fn, 2, 2)
self.assertGraphSize(graph, 2)
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 1)
def test_does_not_create_cycles(self):
# o --> x --> o
# | ^
# \_________/
def fn(w, x, y):
a = w * x
b = torch.zeros(abs(int(a)))
c = a * b
return c
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1)
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 2)
def test_merges_up(self):
# o --> x o
# | ^
# \_________/
def fn(w, x, y, z):
a = w * x
b = torch.zeros(abs(int(y)))
c = a * z
return b, c
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1, 1)
g_str = str(graph)
FileCheck().check_not("aten::add").run(g_str[0:g_str.find("return")])
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 1)
def test_merges_down(self):
# o x --> o
# | ^
# \_________/
def fn(v, w, x, y):
a = v * w
b = torch.ones(int(y))
c = b * a
return a, c
graph = self._perform_ad_subgraph_slicing(fn, 1, 1, 1, 1)
num_nodes = 4 if GRAPH_EXECUTOR == ProfilingMode.PROFILING else 3
# add moved down
g_str = str(graph)
FileCheck().check_not("aten::add").run(g_str[0:g_str.find("return")])
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 1)
def test_respects_lexical_scoping(self):
def fn(x, k):
y = x * 1.1
if bool(k):
k = k + y
z = y * k
return z, k
graph = self._perform_ad_subgraph_slicing(fn, 1, 1)
# We should not have combined the two multiplications into
# the same group; they should each be a separate DiffGraph
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 3)
def test_merge_respects_aliasing(self):
def fn(x, k, cond):
y = x * 1.1
y = y * k
y = y * 2.2
if bool(cond):
z1 = y[0]
z2 = y[1]
z1.add_(3)
out = z2 + k + 3.3
out = out * out
return out
graph = self._perform_ad_subgraph_slicing(fn, [2, 2], [2, 2], 1)
# z2 did did not get merged into the subgraph
FileCheck().check("prim::If").check("aten::select").check_next("aten::select")\
.check_next("aten::add_").check("Differentiable").run(graph)
self.assertGraphContainsExactly(graph, 'prim::DifferentiableGraph', 2)
| 36.912698
| 121
| 0.56203
|
e532ed41955cac513ad624688b323946500b1296
| 27,242
|
py
|
Python
|
beets/dbcore/query.py
|
Kraymer/beets
|
e7ea7ab5f22342136b2a8f038bbbac19a0c66555
|
[
"MIT"
] | null | null | null |
beets/dbcore/query.py
|
Kraymer/beets
|
e7ea7ab5f22342136b2a8f038bbbac19a0c66555
|
[
"MIT"
] | null | null | null |
beets/dbcore/query.py
|
Kraymer/beets
|
e7ea7ab5f22342136b2a8f038bbbac19a0c66555
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""The Query type hierarchy for DBCore.
"""
from __future__ import division, absolute_import, print_function
import re
from operator import mul
from beets import util
from datetime import datetime, timedelta
import unicodedata
from functools import reduce
import six
if not six.PY2:
buffer = memoryview # sqlite won't accept memoryview in python 2
class ParsingError(ValueError):
"""Abstract class for any unparseable user-requested album/query
specification.
"""
class InvalidQueryError(ParsingError):
"""Represent any kind of invalid query.
The query should be a unicode string or a list, which will be space-joined.
"""
def __init__(self, query, explanation):
if isinstance(query, list):
query = " ".join(query)
message = u"'{0}': {1}".format(query, explanation)
super(InvalidQueryError, self).__init__(message)
class InvalidQueryArgumentValueError(ParsingError):
"""Represent a query argument that could not be converted as expected.
It exists to be caught in upper stack levels so a meaningful (i.e. with the
query) InvalidQueryError can be raised.
"""
def __init__(self, what, expected, detail=None):
message = u"'{0}' is not {1}".format(what, expected)
if detail:
message = u"{0}: {1}".format(message, detail)
super(InvalidQueryArgumentValueError, self).__init__(message)
class Query(object):
"""An abstract class representing a query into the item database.
"""
def clause(self):
"""Generate an SQLite expression implementing the query.
Return (clause, subvals) where clause is a valid sqlite
WHERE clause implementing the query and subvals is a list of
items to be substituted for ?s in the clause.
"""
return None, ()
def match(self, item):
"""Check whether this query matches a given Item. Can be used to
perform queries on arbitrary sets of Items.
"""
raise NotImplementedError
def __repr__(self):
return "{0.__class__.__name__}()".format(self)
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return 0
class FieldQuery(Query):
"""An abstract query that searches in a specific field for a
pattern. Subclasses must provide a `value_match` class method, which
determines whether a certain pattern string matches a certain value
string. Subclasses may also provide `col_clause` to implement the
same matching functionality in SQLite.
"""
def __init__(self, field, pattern, fast=True):
self.field = field
self.pattern = pattern
self.fast = fast
def col_clause(self):
return None, ()
def clause(self):
if self.fast:
return self.col_clause()
else:
# Matching a flexattr. This is a slow query.
return None, ()
@classmethod
def value_match(cls, pattern, value):
"""Determine whether the value matches the pattern. Both
arguments are strings.
"""
raise NotImplementedError()
def match(self, item):
return self.value_match(self.pattern, item.get(self.field))
def __repr__(self):
return ("{0.__class__.__name__}({0.field!r}, {0.pattern!r}, "
"{0.fast})".format(self))
def __eq__(self, other):
return super(FieldQuery, self).__eq__(other) and \
self.field == other.field and self.pattern == other.pattern
def __hash__(self):
return hash((self.field, hash(self.pattern)))
class MatchQuery(FieldQuery):
"""A query that looks for exact matches in an item field."""
def col_clause(self):
return self.field + " = ?", [self.pattern]
@classmethod
def value_match(cls, pattern, value):
return pattern == value
class NoneQuery(FieldQuery):
def __init__(self, field, fast=True):
super(NoneQuery, self).__init__(field, None, fast)
def col_clause(self):
return self.field + " IS NULL", ()
@classmethod
def match(cls, item):
try:
return item[cls.field] is None
except KeyError:
return True
def __repr__(self):
return "{0.__class__.__name__}({0.field!r}, {0.fast})".format(self)
class StringFieldQuery(FieldQuery):
"""A FieldQuery that converts values to strings before matching
them.
"""
@classmethod
def value_match(cls, pattern, value):
"""Determine whether the value matches the pattern. The value
may have any type.
"""
return cls.string_match(pattern, util.as_string(value))
@classmethod
def string_match(cls, pattern, value):
"""Determine whether the value matches the pattern. Both
arguments are strings. Subclasses implement this method.
"""
raise NotImplementedError()
class SubstringQuery(StringFieldQuery):
"""A query that matches a substring in a specific item field."""
def col_clause(self):
pattern = (self.pattern
.replace('\\', '\\\\')
.replace('%', '\\%')
.replace('_', '\\_'))
search = '%' + pattern + '%'
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals
@classmethod
def string_match(cls, pattern, value):
return pattern.lower() in value.lower()
class RegexpQuery(StringFieldQuery):
"""A query that matches a regular expression in a specific item
field.
Raises InvalidQueryError when the pattern is not a valid regular
expression.
"""
def __init__(self, field, pattern, fast=True):
super(RegexpQuery, self).__init__(field, pattern, fast)
pattern = self._normalize(pattern)
try:
self.pattern = re.compile(self.pattern)
except re.error as exc:
# Invalid regular expression.
raise InvalidQueryArgumentValueError(pattern,
u"a regular expression",
format(exc))
@staticmethod
def _normalize(s):
"""Normalize a Unicode string's representation (used on both
patterns and matched values).
"""
return unicodedata.normalize('NFC', s)
@classmethod
def string_match(cls, pattern, value):
return pattern.search(cls._normalize(value)) is not None
class BooleanQuery(MatchQuery):
"""Matches a boolean field. Pattern should either be a boolean or a
string reflecting a boolean.
"""
def __init__(self, field, pattern, fast=True):
super(BooleanQuery, self).__init__(field, pattern, fast)
if isinstance(pattern, six.string_types):
self.pattern = util.str2bool(pattern)
self.pattern = int(self.pattern)
class BytesQuery(MatchQuery):
"""Match a raw bytes field (i.e., a path). This is a necessary hack
to work around the `sqlite3` module's desire to treat `bytes` and
`unicode` equivalently in Python 2. Always use this query instead of
`MatchQuery` when matching on BLOB values.
"""
def __init__(self, field, pattern):
super(BytesQuery, self).__init__(field, pattern)
# Use a buffer/memoryview representation of the pattern for SQLite
# matching. This instructs SQLite to treat the blob as binary
# rather than encoded Unicode.
if isinstance(self.pattern, (six.text_type, bytes)):
if isinstance(self.pattern, six.text_type):
self.pattern = self.pattern.encode('utf-8')
self.buf_pattern = buffer(self.pattern)
elif isinstance(self.pattern, buffer):
self.buf_pattern = self.pattern
self.pattern = bytes(self.pattern)
def col_clause(self):
return self.field + " = ?", [self.buf_pattern]
class NumericQuery(FieldQuery):
"""Matches numeric fields. A syntax using Ruby-style range ellipses
(``..``) lets users specify one- or two-sided ranges. For example,
``year:2001..`` finds music released since the turn of the century.
Raises InvalidQueryError when the pattern does not represent an int or
a float.
"""
def _convert(self, s):
"""Convert a string to a numeric type (float or int).
Return None if `s` is empty.
Raise an InvalidQueryError if the string cannot be converted.
"""
# This is really just a bit of fun premature optimization.
if not s:
return None
try:
return int(s)
except ValueError:
try:
return float(s)
except ValueError:
raise InvalidQueryArgumentValueError(s, u"an int or a float")
def __init__(self, field, pattern, fast=True):
super(NumericQuery, self).__init__(field, pattern, fast)
parts = pattern.split('..', 1)
if len(parts) == 1:
# No range.
self.point = self._convert(parts[0])
self.rangemin = None
self.rangemax = None
else:
# One- or two-sided range.
self.point = None
self.rangemin = self._convert(parts[0])
self.rangemax = self._convert(parts[1])
def match(self, item):
if self.field not in item:
return False
value = item[self.field]
if isinstance(value, six.string_types):
value = self._convert(value)
if self.point is not None:
return value == self.point
else:
if self.rangemin is not None and value < self.rangemin:
return False
if self.rangemax is not None and value > self.rangemax:
return False
return True
def col_clause(self):
if self.point is not None:
return self.field + '=?', (self.point,)
else:
if self.rangemin is not None and self.rangemax is not None:
return (u'{0} >= ? AND {0} <= ?'.format(self.field),
(self.rangemin, self.rangemax))
elif self.rangemin is not None:
return u'{0} >= ?'.format(self.field), (self.rangemin,)
elif self.rangemax is not None:
return u'{0} <= ?'.format(self.field), (self.rangemax,)
else:
return u'1', ()
class CollectionQuery(Query):
"""An abstract query class that aggregates other queries. Can be
indexed like a list to access the sub-queries.
"""
def __init__(self, subqueries=()):
self.subqueries = subqueries
# Act like a sequence.
def __len__(self):
return len(self.subqueries)
def __getitem__(self, key):
return self.subqueries[key]
def __iter__(self):
return iter(self.subqueries)
def __contains__(self, item):
return item in self.subqueries
def clause_with_joiner(self, joiner):
"""Return a clause created by joining together the clauses of
all subqueries with the string joiner (padded by spaces).
"""
clause_parts = []
subvals = []
for subq in self.subqueries:
subq_clause, subq_subvals = subq.clause()
if not subq_clause:
# Fall back to slow query.
return None, ()
clause_parts.append('(' + subq_clause + ')')
subvals += subq_subvals
clause = (' ' + joiner + ' ').join(clause_parts)
return clause, subvals
def __repr__(self):
return "{0.__class__.__name__}({0.subqueries!r})".format(self)
def __eq__(self, other):
return super(CollectionQuery, self).__eq__(other) and \
self.subqueries == other.subqueries
def __hash__(self):
"""Since subqueries are mutable, this object should not be hashable.
However and for conveniences purposes, it can be hashed.
"""
return reduce(mul, map(hash, self.subqueries), 1)
class AnyFieldQuery(CollectionQuery):
"""A query that matches if a given FieldQuery subclass matches in
any field. The individual field query class is provided to the
constructor.
"""
def __init__(self, pattern, fields, cls):
self.pattern = pattern
self.fields = fields
self.query_class = cls
subqueries = []
for field in self.fields:
subqueries.append(cls(field, pattern, True))
super(AnyFieldQuery, self).__init__(subqueries)
def clause(self):
return self.clause_with_joiner('or')
def match(self, item):
for subq in self.subqueries:
if subq.match(item):
return True
return False
def __repr__(self):
return ("{0.__class__.__name__}({0.pattern!r}, {0.fields!r}, "
"{0.query_class.__name__})".format(self))
def __eq__(self, other):
return super(AnyFieldQuery, self).__eq__(other) and \
self.query_class == other.query_class
def __hash__(self):
return hash((self.pattern, tuple(self.fields), self.query_class))
class MutableCollectionQuery(CollectionQuery):
"""A collection query whose subqueries may be modified after the
query is initialized.
"""
def __setitem__(self, key, value):
self.subqueries[key] = value
def __delitem__(self, key):
del self.subqueries[key]
class AndQuery(MutableCollectionQuery):
"""A conjunction of a list of other queries."""
def clause(self):
return self.clause_with_joiner('and')
def match(self, item):
return all([q.match(item) for q in self.subqueries])
class OrQuery(MutableCollectionQuery):
"""A conjunction of a list of other queries."""
def clause(self):
return self.clause_with_joiner('or')
def match(self, item):
return any([q.match(item) for q in self.subqueries])
class NotQuery(Query):
"""A query that matches the negation of its `subquery`, as a shorcut for
performing `not(subquery)` without using regular expressions.
"""
def __init__(self, subquery):
self.subquery = subquery
def clause(self):
clause, subvals = self.subquery.clause()
if clause:
return 'not ({0})'.format(clause), subvals
else:
# If there is no clause, there is nothing to negate. All the logic
# is handled by match() for slow queries.
return clause, subvals
def match(self, item):
return not self.subquery.match(item)
def __repr__(self):
return "{0.__class__.__name__}({0.subquery!r})".format(self)
def __eq__(self, other):
return super(NotQuery, self).__eq__(other) and \
self.subquery == other.subquery
def __hash__(self):
return hash(('not', hash(self.subquery)))
class TrueQuery(Query):
"""A query that always matches."""
def clause(self):
return '1', ()
def match(self, item):
return True
class FalseQuery(Query):
"""A query that never matches."""
def clause(self):
return '0', ()
def match(self, item):
return False
# Time/date queries.
def _to_epoch_time(date):
"""Convert a `datetime` object to an integer number of seconds since
the (local) Unix epoch.
"""
if hasattr(date, 'timestamp'):
# The `timestamp` method exists on Python 3.3+.
return int(date.timestamp())
else:
epoch = datetime.fromtimestamp(0)
delta = date - epoch
return int(delta.total_seconds())
def _parse_periods(pattern):
"""Parse a string containing two dates separated by two dots (..).
Return a pair of `Period` objects.
"""
parts = pattern.split('..', 1)
if len(parts) == 1:
instant = Period.parse(parts[0])
return (instant, instant)
else:
start = Period.parse(parts[0])
end = Period.parse(parts[1])
return (start, end)
class Period(object):
"""A period of time given by a date, time and precision.
Example: 2014-01-01 10:50:30 with precision 'month' represents all
instants of time during January 2014.
"""
precisions = ('year', 'month', 'day')
date_formats = ('%Y', '%Y-%m', '%Y-%m-%d')
def __init__(self, date, precision):
"""Create a period with the given date (a `datetime` object) and
precision (a string, one of "year", "month", or "day").
"""
if precision not in Period.precisions:
raise ValueError(u'Invalid precision {0}'.format(precision))
self.date = date
self.precision = precision
@classmethod
def parse(cls, string):
"""Parse a date and return a `Period` object, or `None` if the
string is empty, or raise an InvalidQueryArgumentValueError if
the string could not be parsed to a date.
"""
if not string:
return None
date = None
for ordinal, date_format in enumerate(cls.date_formats):
try:
date = datetime.strptime(string, date_format)
break
except ValueError:
# Parsing failed.
pass
if date is None:
raise InvalidQueryArgumentValueError(string,
'a valid datetime string')
precision = cls.precisions[ordinal]
return cls(date, precision)
def open_right_endpoint(self):
"""Based on the precision, convert the period to a precise
`datetime` for use as a right endpoint in a right-open interval.
"""
precision = self.precision
date = self.date
if 'year' == self.precision:
return date.replace(year=date.year + 1, month=1)
elif 'month' == precision:
if (date.month < 12):
return date.replace(month=date.month + 1)
else:
return date.replace(year=date.year + 1, month=1)
elif 'day' == precision:
return date + timedelta(days=1)
else:
raise ValueError(u'unhandled precision {0}'.format(precision))
class DateInterval(object):
"""A closed-open interval of dates.
A left endpoint of None means since the beginning of time.
A right endpoint of None means towards infinity.
"""
def __init__(self, start, end):
if start is not None and end is not None and not start < end:
raise ValueError(u"start date {0} is not before end date {1}"
.format(start, end))
self.start = start
self.end = end
@classmethod
def from_periods(cls, start, end):
"""Create an interval with two Periods as the endpoints.
"""
end_date = end.open_right_endpoint() if end is not None else None
start_date = start.date if start is not None else None
return cls(start_date, end_date)
def contains(self, date):
if self.start is not None and date < self.start:
return False
if self.end is not None and date >= self.end:
return False
return True
def __str__(self):
return '[{0}, {1})'.format(self.start, self.end)
class DateQuery(FieldQuery):
"""Matches date fields stored as seconds since Unix epoch time.
Dates can be specified as ``year-month-day`` strings where only year
is mandatory.
The value of a date field can be matched against a date interval by
using an ellipsis interval syntax similar to that of NumericQuery.
"""
def __init__(self, field, pattern, fast=True):
super(DateQuery, self).__init__(field, pattern, fast)
start, end = _parse_periods(pattern)
self.interval = DateInterval.from_periods(start, end)
def match(self, item):
if self.field not in item:
return False
timestamp = float(item[self.field])
date = datetime.utcfromtimestamp(timestamp)
return self.interval.contains(date)
_clause_tmpl = "{0} {1} ?"
def col_clause(self):
clause_parts = []
subvals = []
if self.interval.start:
clause_parts.append(self._clause_tmpl.format(self.field, ">="))
subvals.append(_to_epoch_time(self.interval.start))
if self.interval.end:
clause_parts.append(self._clause_tmpl.format(self.field, "<"))
subvals.append(_to_epoch_time(self.interval.end))
if clause_parts:
# One- or two-sided interval.
clause = ' AND '.join(clause_parts)
else:
# Match any date.
clause = '1'
return clause, subvals
class DurationQuery(NumericQuery):
"""NumericQuery that allow human-friendly (M:SS) time interval formats.
Converts the range(s) to a float value, and delegates on NumericQuery.
Raises InvalidQueryError when the pattern does not represent an int, float
or M:SS time interval.
"""
def _convert(self, s):
"""Convert a M:SS or numeric string to a float.
Return None if `s` is empty.
Raise an InvalidQueryError if the string cannot be converted.
"""
if not s:
return None
try:
return util.raw_seconds_short(s)
except ValueError:
try:
return float(s)
except ValueError:
raise InvalidQueryArgumentValueError(
s,
u"a M:SS string or a float")
# Sorting.
class Sort(object):
"""An abstract class representing a sort operation for a query into
the item database.
"""
def order_clause(self):
"""Generates a SQL fragment to be used in a ORDER BY clause, or
None if no fragment is used (i.e., this is a slow sort).
"""
return None
def sort(self, items):
"""Sort the list of objects and return a list.
"""
return sorted(items)
def is_slow(self):
"""Indicate whether this query is *slow*, meaning that it cannot
be executed in SQL and must be executed in Python.
"""
return False
def __hash__(self):
return 0
def __eq__(self, other):
return type(self) == type(other)
class MultipleSort(Sort):
"""Sort that encapsulates multiple sub-sorts.
"""
def __init__(self, sorts=None):
self.sorts = sorts or []
def add_sort(self, sort):
self.sorts.append(sort)
def _sql_sorts(self):
"""Return the list of sub-sorts for which we can be (at least
partially) fast.
A contiguous suffix of fast (SQL-capable) sub-sorts are
executable in SQL. The remaining, even if they are fast
independently, must be executed slowly.
"""
sql_sorts = []
for sort in reversed(self.sorts):
if not sort.order_clause() is None:
sql_sorts.append(sort)
else:
break
sql_sorts.reverse()
return sql_sorts
def order_clause(self):
order_strings = []
for sort in self._sql_sorts():
order = sort.order_clause()
order_strings.append(order)
return ", ".join(order_strings)
def is_slow(self):
for sort in self.sorts:
if sort.is_slow():
return True
return False
def sort(self, items):
slow_sorts = []
switch_slow = False
for sort in reversed(self.sorts):
if switch_slow:
slow_sorts.append(sort)
elif sort.order_clause() is None:
switch_slow = True
slow_sorts.append(sort)
else:
pass
for sort in slow_sorts:
items = sort.sort(items)
return items
def __repr__(self):
return 'MultipleSort({!r})'.format(self.sorts)
def __hash__(self):
return hash(tuple(self.sorts))
def __eq__(self, other):
return super(MultipleSort, self).__eq__(other) and \
self.sorts == other.sorts
class FieldSort(Sort):
"""An abstract sort criterion that orders by a specific field (of
any kind).
"""
def __init__(self, field, ascending=True, case_insensitive=True):
self.field = field
self.ascending = ascending
self.case_insensitive = case_insensitive
def sort(self, objs):
# TODO: Conversion and null-detection here. In Python 3,
# comparisons with None fail. We should also support flexible
# attributes with different types without falling over.
def key(item):
field_val = item.get(self.field, '')
if self.case_insensitive and isinstance(field_val, six.text_type):
field_val = field_val.lower()
return field_val
return sorted(objs, key=key, reverse=not self.ascending)
def __repr__(self):
return '<{0}: {1}{2}>'.format(
type(self).__name__,
self.field,
'+' if self.ascending else '-',
)
def __hash__(self):
return hash((self.field, self.ascending))
def __eq__(self, other):
return super(FieldSort, self).__eq__(other) and \
self.field == other.field and \
self.ascending == other.ascending
class FixedFieldSort(FieldSort):
"""Sort object to sort on a fixed field.
"""
def order_clause(self):
order = "ASC" if self.ascending else "DESC"
if self.case_insensitive:
field = '(CASE ' \
'WHEN TYPEOF({0})="text" THEN LOWER({0}) ' \
'WHEN TYPEOF({0})="blob" THEN LOWER({0}) ' \
'ELSE {0} END)'.format(self.field)
else:
field = self.field
return "{0} {1}".format(field, order)
class SlowFieldSort(FieldSort):
"""A sort criterion by some model field other than a fixed field:
i.e., a computed or flexible field.
"""
def is_slow(self):
return True
class NullSort(Sort):
"""No sorting. Leave results unsorted."""
def sort(self, items):
return items
def __nonzero__(self):
return self.__bool__()
def __bool__(self):
return False
def __eq__(self, other):
return type(self) == type(other) or other is None
def __hash__(self):
return 0
| 31.348677
| 79
| 0.607004
|
52c815b7c2f8028a5b1b366811f6f95ba6994b4f
| 3,290
|
py
|
Python
|
samples/reservation/reservation.py
|
RedShiftCompany/flask-ask
|
82d092152993e2a9e7faa4181ce0598d5d77dd1b
|
[
"Apache-2.0"
] | null | null | null |
samples/reservation/reservation.py
|
RedShiftCompany/flask-ask
|
82d092152993e2a9e7faa4181ce0598d5d77dd1b
|
[
"Apache-2.0"
] | 1
|
2020-04-20T19:40:46.000Z
|
2020-04-20T19:40:46.000Z
|
samples/reservation/reservation.py
|
RedShiftCompany/flask-ask
|
82d092152993e2a9e7faa4181ce0598d5d77dd1b
|
[
"Apache-2.0"
] | null | null | null |
import logging
import datetime
from flask import Flask
import sys
sys.path.append('lib/flask_ask')
from flask_ask import Ask, statement, question, session, state
app = Flask(__name__)
ask = Ask(app, '/')
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.launch
def welcome():
state.transition('capture_where_departing')
return question("Welcome to the flight booking service. What city are you leaving from?")
@ask.intent("AMAZON.StopIntent")
def stop():
return statement("The flight booking service has been stopped.")
# state - capture_where_departing
@ask.intent("CityIntent", state='capture_where_departing')
def where_departing(city):
session.attributes['where_departing'] = city
state.transition('capture_where_arriving')
return question("Where are you going?")
# state - capture_where_arriving
@ask.intent("CityIntent", state='capture_where_arriving')
def where_arriving(city):
session.attributes['where_arriving'] = city
state.transition('capture_when_departing')
return question("What date do you want to leave?")
# state - capture_when_departing
@ask.intent("DateIntent", state='capture_when_departing', convert={'date': 'date'})
def when_departing(date):
session.attributes['when_departing'] = date
session.attributes_encoder = _json_date_handler
state.transition('capture_one_way_option')
return question("Is it a one-way trip?")
# state - capture_one_way_option
@ask.intent("AMAZON.YesIntent", state='capture_one_way_option')
def one_way_yes():
state.transition('confirmation')
return _confirmation_question()
@ask.intent("AMAZON.NoIntent", state='capture_one_way_option')
def one_way_no():
state.transition('capture_when_returning')
return question("What date do you want to return?")
# state - capture_when_returning
@ask.intent("DateIntent", state='capture_when_returning', convert={'date': 'date'})
def when_returning(date):
session.attributes['when_returning'] = date
session.attributes_encoder = _json_date_handler
state.transition('confirmation')
return _confirmation_question()
# state - confirmation
@ask.intent("AMAZON.YesIntent", state='confirmation')
def confirm_yes():
return statement("Your flight has been booked. Thank you.")
@ask.intent("AMAZON.NoIntent", state='confirmation')
def confirm_no():
state.transition('capture_where_departing')
return question("What city are you leaving from?")
def _json_date_handler(obj):
if isinstance(obj, datetime.date):
return obj.isoformat()
def _confirmation_question():
where_departing = session.attributes.get('where_departing')
when_departing = session.attributes.get('when_departing')
where_arriving = session.attributes.get('where_arriving')
when_returning = session.attributes.get('when_returning')
speech_text = "I have you leaving from {} and going to {} on {}. " \
.format(where_departing, where_arriving, when_departing)
if when_returning is not None:
speech_text += "You are set to return on {}. ".format(when_returning)
else:
speech_text += "This is a one-way trip. "
speech_text += "Do you want to confirm?"
return question(speech_text)
if __name__ == '__main__':
app.run(debug=True)
| 26.967213
| 93
| 0.735562
|
f8069f4bc155684f08b4d3206e264bc6d6f6f21a
| 785
|
py
|
Python
|
mock_serial.py
|
TUhybridlab/vj-servo-controller
|
685a93d4b0daddc847ae0177528c527e0d840cc3
|
[
"MIT"
] | null | null | null |
mock_serial.py
|
TUhybridlab/vj-servo-controller
|
685a93d4b0daddc847ae0177528c527e0d840cc3
|
[
"MIT"
] | null | null | null |
mock_serial.py
|
TUhybridlab/vj-servo-controller
|
685a93d4b0daddc847ae0177528c527e0d840cc3
|
[
"MIT"
] | null | null | null |
import logging
from time import sleep
MESSAGE = '600 1'
class MockSerial(object):
def __init__(self):
self.log = logging.getLogger(MockSerial.__name__)
self.is_open = True
self.log.error("Serial running on mock!")
def read(self):
sleep(0.02)
return '#'
def readline(self):
if not self.is_open:
self.log.critical("Call to readline when Serial is already closed!")
raise Exception("Call to readline when Serial is already closed!")
#self.log.error("Mock serial returns " + MESSAGE)
return MESSAGE
def close(self):
self.log.error("Close mock serial")
self.is_open = False
def isOpen(self):
return self.is_open
def flushInput(self):
pass
| 24.53125
| 80
| 0.616561
|
5b34707c1b5292add3799a0c82dfb729afd88bae
| 7,441
|
py
|
Python
|
dsenum/converter.py
|
lan496/dsenum
|
24412c20737bdaf20179afabb49ff407cb92e880
|
[
"MIT"
] | null | null | null |
dsenum/converter.py
|
lan496/dsenum
|
24412c20737bdaf20179afabb49ff407cb92e880
|
[
"MIT"
] | 15
|
2019-01-21T07:30:36.000Z
|
2020-08-14T10:58:24.000Z
|
dsenum/converter.py
|
lan496/dsenum
|
24412c20737bdaf20179afabb49ff407cb92e880
|
[
"MIT"
] | null | null | null |
from itertools import product
from typing import List, Optional, Tuple, Union, cast
import numpy as np
from dsenum.site import CanonicalSite, DerivativeSite
from dsenum.smith_normal_form import smith_normal_form
from dsenum.utils import cast_integer_matrix
"""
hash_frac_coords ravel_canonical_site
frac_coords ---------------> CanonicalSite -------------------------> Int
| |^ <-------------------------
| || unravel_to_canonical_site
| ||
| embed_to_derivative_site||hash_derivative_site
| ||
| V|
|-----------------------> DerivativeSite
get_frac_coords
"""
class DerivativeMultiLatticeHash:
"""
Parameters
----------
hnf: array, (dim, dim)
Hermite normal form(lower triangular)
displacement_set: array, (num_site_parent, dim)
fractinal coordinates in primitive cell of base structure
"""
def __init__(self, hnf: np.ndarray, displacement_set: np.ndarray):
self._hnf = hnf
self._index = np.around(np.abs(np.linalg.det(self.hnf))).astype(int)
assert self.index != 0
self.displacement_set = displacement_set
assert self.dim == self.displacement_set.shape[1]
self.num_site_base = len(displacement_set)
D, L, R = smith_normal_form(self.hnf)
self.snf = D
self.left = L
self.right = R
self.left_inv = cast_integer_matrix(np.linalg.inv(self.left))
self.invariant_factors = tuple(self.snf.diagonal())
self.shape = (self.num_site_base,) + self.invariant_factors
@property
def hnf(self) -> np.ndarray:
return self._hnf
@property
def dim(self) -> int:
return self.hnf.shape[0]
@property
def index(self) -> int:
return self._index
@property
def num_sites(self) -> int:
return self.num_site_base * self.index
@property
def num_base_sites(self) -> int:
return self.num_site_base
def hash_derivative_site(
self, dsite: DerivativeSite, return_image: bool = False
) -> Union[CanonicalSite, Tuple[CanonicalSite, np.ndarray]]:
"""
Returns
-------
csite: CanoncalSite
derivative_jimage: (Optional), array
self.get_frac_coords(dsite) == self.displacement_set[dsite.site_index]
+ np.dot(self.left_inv, csite.factor)
+ np.dot(self.hnf, derivative_jimage)
"""
site_index, jimage = dsite.site_index, dsite.jimage
factor_tmp = np.dot(self.left, np.array(jimage, dtype=int))
factor = np.mod(factor_tmp, np.array(self.invariant_factors))
csite = CanonicalSite(site_index, tuple(factor))
if return_image:
derivative_jimage = cast_integer_matrix(factor_tmp - factor) / np.array(
self.invariant_factors
)
derivative_jimage = cast_integer_matrix(derivative_jimage)
derivative_jimage = np.dot(self.right, derivative_jimage).astype(int)
return csite, derivative_jimage
else:
return csite
def hash_frac_coords(self, frac_coord: np.ndarray) -> Optional[CanonicalSite]:
for site_index, fc in enumerate(self.displacement_set):
jimage = cast_integer_matrix(frac_coord - fc)
if np.allclose(fc + jimage, frac_coord):
dsite = DerivativeSite(site_index, jimage)
csite = cast(CanonicalSite, self.hash_derivative_site(dsite))
return csite
return None
def get_canonical_sites_list(self) -> List[CanonicalSite]:
list_csites = []
for site_index in range(len(self.displacement_set)):
for factor in product(*[range(f) for f in self.invariant_factors]):
csite = CanonicalSite(site_index, tuple(factor))
list_csites.append(csite)
assert all([self.ravel_canonical_site(csite) == i for i, csite in enumerate(list_csites)])
return list_csites
def get_distinct_derivative_sites_list(self) -> List[DerivativeSite]:
list_csites = self.get_canonical_sites_list()
list_dsites = [self.embed_to_derivative_site(csite) for csite in list_csites]
return list_dsites
def get_canonical_and_derivative_sites_list(
self,
) -> List[Tuple[CanonicalSite, DerivativeSite]]:
list_csites_dsites = []
for site_index in range(len(self.displacement_set)):
for factor in product(*[range(f) for f in self.invariant_factors]):
csite = CanonicalSite(site_index, tuple(factor))
dsite = self.embed_to_derivative_site(csite)
list_csites_dsites.append((csite, dsite))
return list_csites_dsites
def get_lattice_points(self) -> List[np.ndarray]:
"""
return list of lattice points with the supercell
Note that the returned lattice points are not guaranteed to be "within" the supercell
"""
lattice_points = []
for factor in self.get_all_factors():
lp = cast_integer_matrix(np.dot(self.left_inv, factor))
lattice_points.append(lp)
return lattice_points
def get_all_factors(self) -> List[Tuple[int, ...]]:
list_factors = list(product(*[range(f) for f in self.invariant_factors]))
return list_factors
def modulus_factor(self, factor: np.ndarray) -> np.ndarray:
modded_factor = np.mod(factor, np.array(self.invariant_factors))
return modded_factor
def get_frac_coords(self, dsite: DerivativeSite) -> np.ndarray:
return self.displacement_set[dsite.site_index] + np.array(dsite.jimage)
def ravel_canonical_site(self, csite: CanonicalSite) -> int:
# hash canonical site s.t. self.get_canonical_sites_list <=> identity
multi_index = (csite.site_index,) + tuple(csite.factor)
raveled = np.ravel_multi_index(multi_index, self.shape)
return raveled
def ravel_derivative_site(self, dsite: DerivativeSite) -> int:
csite = self.hash_derivative_site(dsite)
raveled = self.ravel_canonical_site(csite)
return raveled
def unravel_to_canonical_site(self, indices: int) -> CanonicalSite:
unraveled = np.unravel_index(indices, self.shape)
site_index, factor = unraveled[0], unraveled[1:]
csite = CanonicalSite(site_index, factor)
return csite
def embed_to_derivative_site(self, csite: CanonicalSite) -> DerivativeSite:
jimage_base = cast_integer_matrix(np.dot(self.left_inv, csite.factor))
dsite = DerivativeSite(csite.site_index, tuple(jimage_base.tolist()))
return dsite
def get_species_list(index: int, list_species: List[str]) -> List[str]:
"""
tile list of species for derivative structure
"""
species = []
for sp in list_species:
species.extend([sp] * index)
return species
def convert_site_constraints(
base_site_constraints: List[List[int]], index: int
) -> List[List[int]]:
site_constraints = []
for sc in base_site_constraints:
for _ in range(index):
site_constraints.append(sc)
return site_constraints
| 36.836634
| 98
| 0.627335
|
4fe352dc5c9f3c9d9b5ca3a9ae3d2ba489a55e37
| 1,959
|
py
|
Python
|
python/baseline/train.py
|
pk1601cs33/baseline
|
3cbe990533dad15e67018b4e529d26845574fb67
|
[
"Apache-2.0"
] | 2
|
2018-07-06T02:01:12.000Z
|
2018-07-06T02:01:14.000Z
|
python/baseline/train.py
|
ZhenyueChin/baseline
|
3aad1addfedad3b2ce2c78bab95855c0d41a5c93
|
[
"Apache-2.0"
] | null | null | null |
python/baseline/train.py
|
ZhenyueChin/baseline
|
3aad1addfedad3b2ce2c78bab95855c0d41a5c93
|
[
"Apache-2.0"
] | null | null | null |
import time
from baseline.utils import create_user_trainer, export
__all__ = []
exporter = export(__all__)
@exporter
class Trainer(object):
def __init__(self):
self.train_epochs = 0
self.valid_epochs = 0
pass
def test(self, loader, reporting_fns):
pass
def train(self, loader, reporting_fns):
pass
@exporter
class EpochReportingTrainer(Trainer):
def __init__(self):
super(EpochReportingTrainer, self).__init__()
def train(self, ts, reporting_fns):
start_time = time.time()
metrics = self._train(ts)
duration = time.time() - start_time
print('Training time (%.3f sec)' % duration)
self.train_epochs += 1
for reporting in reporting_fns:
reporting(metrics, self.train_epochs * len(ts), 'Train')
return metrics
def test(self, vs, reporting_fns, phase='Valid', **kwargs):
start_time = time.time()
metrics = self._test(vs, **kwargs)
duration = time.time() - start_time
print('%s time (%.3f sec)' % (phase, duration))
epochs = 0
if phase == 'Valid':
self.valid_epochs += 1
epochs = self.valid_epochs
for reporting in reporting_fns:
reporting(metrics, epochs, phase)
return metrics
def _train(self, ts):
pass
def _test(self, vs, **kwargs):
pass
@exporter
def create_trainer(default_create_model_fn, model, **kwargs):
"""Create the default trainer, or a user-defined one if `trainer_type` is not `default`
:param default_create_model_fn: The constructor for the default trainer (defined in each platform/task)
:param model: The model to train
:param kwargs:
:return:
"""
model_type = kwargs.get('trainer_type', 'default')
if model_type == 'default':
return default_create_model_fn(model, **kwargs)
return create_user_trainer(model, **kwargs)
| 26.472973
| 107
| 0.632976
|
4d01c485351119a7687fba09aa1462c0a060f413
| 204
|
py
|
Python
|
FreeCodeCamp.org/IfStatementAndComparison.py
|
MizaN13/PythonAbc
|
db8aa05497a6a1bf3eafcfdd6c31880b57e509fb
|
[
"MIT"
] | null | null | null |
FreeCodeCamp.org/IfStatementAndComparison.py
|
MizaN13/PythonAbc
|
db8aa05497a6a1bf3eafcfdd6c31880b57e509fb
|
[
"MIT"
] | null | null | null |
FreeCodeCamp.org/IfStatementAndComparison.py
|
MizaN13/PythonAbc
|
db8aa05497a6a1bf3eafcfdd6c31880b57e509fb
|
[
"MIT"
] | null | null | null |
def max_num(num1, num2, num3):
if num1 >= num2 and num1 >= num3:
return num1
elif num2 >= num1 and num2 >= num3:
return num2
else:
return num3
print(max_num(3, 45, 5))
| 22.666667
| 39
| 0.568627
|
9f3087562e326eb4b0b0cc4627694d7b9845ddcb
| 70
|
py
|
Python
|
stores/__init__.py
|
galaddirie/django-cassiopeia
|
e3e75e6c815cfc96e3b7ef5991aa1265221a2122
|
[
"MIT"
] | 13
|
2020-07-08T17:23:18.000Z
|
2022-02-13T09:19:42.000Z
|
stores/__init__.py
|
galaddirie/django-cassiopeia
|
e3e75e6c815cfc96e3b7ef5991aa1265221a2122
|
[
"MIT"
] | 16
|
2020-07-19T22:14:20.000Z
|
2022-03-24T02:57:45.000Z
|
stores/__init__.py
|
galaddirie/django-cassiopeia
|
e3e75e6c815cfc96e3b7ef5991aa1265221a2122
|
[
"MIT"
] | 6
|
2020-07-21T01:37:54.000Z
|
2022-01-01T19:28:54.000Z
|
from .django_cache import DjangoCache
from .omnistone import Omnistone
| 35
| 37
| 0.871429
|
15e8ffddbace6d921ebaa3c384dcd8fa8b5d864a
| 130
|
py
|
Python
|
env/apolo/apps/Usuario/admin.py
|
XeresRed/Apolo
|
ab76f14abae89e59a8212cb9334134980719b989
|
[
"MIT"
] | null | null | null |
env/apolo/apps/Usuario/admin.py
|
XeresRed/Apolo
|
ab76f14abae89e59a8212cb9334134980719b989
|
[
"MIT"
] | null | null | null |
env/apolo/apps/Usuario/admin.py
|
XeresRed/Apolo
|
ab76f14abae89e59a8212cb9334134980719b989
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from apps.Usuario import models
# Register your models here.
admin.site.register(models.Persona)
| 26
| 35
| 0.823077
|
1978b7ccf23574069bfa1e8174c7dfbe2315ce78
| 575
|
py
|
Python
|
util/test/test_Constants.py
|
JohannSuarez/tokenspice
|
d791d8821078fe1156c62632c4bbe533c15b3d96
|
[
"Apache-2.0"
] | 26
|
2020-11-20T15:28:09.000Z
|
2021-07-25T20:24:55.000Z
|
util/test/test_Constants.py
|
JohannSuarez/tokenspice
|
d791d8821078fe1156c62632c4bbe533c15b3d96
|
[
"Apache-2.0"
] | 13
|
2021-06-16T14:06:08.000Z
|
2021-07-27T17:25:54.000Z
|
util/test/test_Constants.py
|
JohannSuarez/tokenspice
|
d791d8821078fe1156c62632c4bbe533c15b3d96
|
[
"Apache-2.0"
] | 7
|
2021-02-19T01:04:16.000Z
|
2021-07-22T11:50:18.000Z
|
from util.constants import *
def testINF():
assert (INF > 1.0)
def testSeconds():
assert (0 <
S_PER_MIN <
S_PER_HOUR <
S_PER_DAY <
S_PER_WEEK <
S_PER_MONTH <
S_PER_YEAR)
assert S_PER_HOUR == (60 * 60)
assert S_PER_WEEK == (60 * 60 * 24 * 7)
assert S_PER_YEAR == (60 * 60 * 24 * 365)
assert isinstance(S_PER_HOUR, int)
assert isinstance(S_PER_DAY, int)
assert isinstance(S_PER_WEEK, int)
assert isinstance(S_PER_MONTH, int)
assert isinstance(S_PER_YEAR, int)
| 23.958333
| 45
| 0.586087
|
add69553fb275373da8737cd58a1389ae7c84047
| 3,566
|
py
|
Python
|
twnews/common.py
|
zuxfoucault/twnews
|
ace7d81d3fdca4e04b0f7edf827a92a45830540d
|
[
"MIT"
] | 1
|
2019-09-07T06:46:47.000Z
|
2019-09-07T06:46:47.000Z
|
twnews/common.py
|
zuxfoucault/twnews
|
ace7d81d3fdca4e04b0f7edf827a92a45830540d
|
[
"MIT"
] | null | null | null |
twnews/common.py
|
zuxfoucault/twnews
|
ace7d81d3fdca4e04b0f7edf827a92a45830540d
|
[
"MIT"
] | null | null | null |
"""
twnews 共用項目
"""
import json
import os
import os.path
import logging
import logging.config
import socket
import requests
# pylint: disable=global-statement
__LOGGER_LOADED = False
__ALLCONF = None
__SESSION = {
"direct": None,
"proxy": None
}
VERSION = '0.3.3'
def found_socks5():
"""
檢查是否有 SOCKS 5 Proxy
"""
found = False
with socket.socket(socket.AF_INET) as sock:
try:
sock.connect(('localhost', 9050))
found = True
except socket.error:
pass
finally:
sock.close()
return found
def get_package_dir():
"""
取得套件根目錄,用來定位套件內資源
"""
return os.path.dirname(__file__)
def get_logger(name='news'):
"""
取得 logger 如果已經存在就使用現有的
"""
global __LOGGER_LOADED
if not __LOGGER_LOADED:
logdir = os.path.expanduser('~/.twnews/log')
if not os.path.isdir(logdir):
os.makedirs(logdir)
logini = '{}/conf/logging.ini'.format(get_package_dir())
if os.path.isfile(logini):
logging.config.fileConfig(logini)
__LOGGER_LOADED = True
logger = None
if __LOGGER_LOADED:
logger = logging.getLogger(name)
return logger
def get_session(proxy_first):
"""
取得 requests session 如果已經存在就使用現有的
"""
global __SESSION
logger = get_logger()
if proxy_first and found_socks5():
logger.debug('透過 proxy 連線')
session_type = 'proxy'
else:
session_type = 'direct'
if __SESSION[session_type] is None:
logger.debug('建立新的 session')
user_agent = 'Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) ' \
+ 'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.36'
__SESSION[session_type] = requests.Session()
__SESSION[session_type].headers.update({
"Accept": "text/html,application/xhtml+xml,application/xml",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7",
"Cache-Control": "max-age=0",
"Connection": "keep-alive",
"User-Agent": user_agent
})
if session_type == 'proxy':
__SESSION[session_type].proxies = {
'http': 'socks5h://localhost:9050',
'https': 'socks5h://localhost:9050'
}
else:
logger.debug('使用現有 session')
return __SESSION[session_type]
def get_all_conf():
"""
取得完整設定
"""
global __ALLCONF
if __ALLCONF is None:
soup_cfg = '{}/conf/news-soup.json'.format(get_package_dir())
with open(soup_cfg, 'r') as conf_file:
__ALLCONF = json.load(conf_file)
return __ALLCONF
def detect_channel(path):
"""
偵測路徑對應的新聞頻道
"""
all_conf = get_all_conf()
for channel in all_conf:
if channel in path:
return channel
return ''
def get_channel_conf(channel, action=None):
"""
載入新聞台設定
"""
all_conf = get_all_conf()
if channel in all_conf:
chconf = all_conf[channel]
if action is None:
return chconf
if action in chconf:
return chconf[action]
return None
def get_cache_dir(category):
"""
取得快取目錄
"""
logger = get_logger()
cache_dir = os.path.expanduser('~/.twnews/cache/' + category)
if not os.path.isdir(cache_dir):
logger.debug('建立快取目錄: %s', cache_dir)
os.makedirs(cache_dir)
logger.debug('使用快取目錄: %s', cache_dir)
return cache_dir
| 23.615894
| 95
| 0.593382
|
59f6953b6919d842412a7bcad2208bf5a4f884b0
| 7,644
|
py
|
Python
|
hackday/teams/views.py
|
mpirnat/hackday
|
82a13665e2d93c80d6dfb1a1d5602495d01a243e
|
[
"MIT"
] | 4
|
2015-05-21T19:44:09.000Z
|
2019-12-03T11:07:03.000Z
|
hackday/teams/views.py
|
mpirnat/hackday
|
82a13665e2d93c80d6dfb1a1d5602495d01a243e
|
[
"MIT"
] | 3
|
2020-02-11T21:47:49.000Z
|
2021-06-10T17:28:44.000Z
|
hackday/teams/views.py
|
mpirnat/hackday
|
82a13665e2d93c80d6dfb1a1d5602495d01a243e
|
[
"MIT"
] | 2
|
2016-01-05T13:39:09.000Z
|
2017-02-19T09:31:58.000Z
|
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.generic import CreateView
from django.views.generic import DetailView
from django.views.generic import ListView
from django.views.generic import UpdateView
from hackday.teams.forms import CreateTeamForm
from hackday.teams.forms import UpdateTeamForm
from hackday.teams.models import STATUS
from hackday.teams.models import Team
from hackday.teams.models import TeamCreateStatus
from hackday.assets.models import Attachment
from hackday.assets.models import ImageAttachment
from hackday.assets.models import Link
from hackday.assets.forms import AttachmentForm
from hackday.assets.forms import ImageAttachmentForm
from hackday.assets.forms import LinkAttachmentForm
class TeamListView(ListView):
model = Team
queryset = Team.objects.filter(status=STATUS.ACTIVE)
def get_context_data(self, **kwargs):
context = super(TeamListView, self).get_context_data(**kwargs)
try:
online = TeamCreateStatus.objects.all()[0].online
except:
online = True
context["create_status"] = online
return context
class TeamDetailView(DetailView):
model = Team
queryset = Team.objects.filter(status=STATUS.ACTIVE)
def get_context_data(self, **kwargs):
context = super(TeamDetailView, self).get_context_data(**kwargs)
try:
online = TeamCreateStatus.objects.all()[0].online
except:
online = True
context["create_status"] = online
return context
class TeamUpdateView(UpdateView):
model = Team
form_class = UpdateTeamForm
queryset = Team.objects.filter(status=STATUS.ACTIVE)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
""" If user isn't a team member (or team isn't active),
redirect to team listing.
"""
try:
response = super(TeamUpdateView, self).dispatch(*args, **kwargs)
except:
return HttpResponseRedirect(reverse('teams-list'))
if not self.object.is_member(self.request.user):
return HttpResponseRedirect(reverse('teams-list'))
return response
def form_valid(self, form):
""" In case the user forgets, add the captain as a team member.
"""
team = form.save()
team.add_captain_as_member()
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': team.slug}))
class TeamCreateView(CreateView):
model = Team
form_class = CreateTeamForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
""" If team creation is disabled (via admin), redirect.
"""
try:
online = TeamCreateStatus.objects.all()[0].online
except:
online = True
if online:
return super(CreateView, self).dispatch(*args, **kwargs)
else:
return render(args[0], 'teams/create_offline.html');
def form_valid(self, form):
""" Because team doesn't exist in DB yet, and creator is required,
we need to first save the form, add creator, and THEN save to DB
"""
team = form.save(commit=False)
team.status = STATUS.ACTIVE
team.creator = self.request.user
team.save()
form.save_m2m()
team.add_captain_as_member()
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': team.slug}))
def delete(request, slug):
try:
team = Team.objects.get(slug=slug)
if team.is_member(request.user):
team.status = STATUS.DELETED
team.save()
return HttpResponseRedirect(reverse('teams-list'))
else:
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
except:
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
def upload_attachment(request, slug):
env = {}
team = Team.objects.get(slug=slug)
if not team.is_member(request.user):
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
if request.method == 'POST':
form = AttachmentForm(request.POST, request.FILES)
if form.is_valid():
attachment = Attachment(attached_file = form.cleaned_data['attached_file'],
title = form.cleaned_data['title'],
alt_text = form.cleaned_data['alt_text'])
attachment.save()
team.attachments.add(attachment)
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
else:
form = AttachmentForm()
env['form'] = form
return render(request, 'teams/upload.html', env)
def remove_attachment(request, slug, attachment_id):
team = Team.objects.get(slug=slug)
if team.is_member(request.user):
attachment = Attachment.objects.get(id=attachment_id)
team.attachments.remove(attachment)
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
def upload_image(request, slug):
env = {}
team = Team.objects.get(slug=slug)
if not team.is_member(request.user):
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
if request.method == 'POST':
form = ImageAttachmentForm(request.POST, request.FILES)
if form.is_valid():
image = ImageAttachment(attached_file = form.cleaned_data['attached_file'],
title = form.cleaned_data['title'],
alt_text = form.cleaned_data['alt_text'])
image.save()
team.images.add(image)
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
else:
form = ImageAttachmentForm()
env['form'] = form
return render(request, 'teams/upload.html', env)
def remove_image(request, slug, attachment_id):
team = Team.objects.get(slug=slug)
if team.is_member(request.user):
image = ImageAttachment.objects.get(id=attachment_id)
team.images.remove(image)
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
def add_link(request, slug):
env = {}
team = Team.objects.get(slug=slug)
if not team.is_member(request.user):
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
if request.method == 'POST':
form = LinkAttachmentForm(request.POST)
if form.is_valid():
link = Link(url = form.cleaned_data['url'],
title = form.cleaned_data['title'],
text = form.cleaned_data['text'])
link.save()
team.links.add(link)
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
else:
form = LinkAttachmentForm()
env['form'] = form
return render(request, 'teams/upload.html', env)
def remove_link(request, slug, attachment_id):
team = Team.objects.get(slug=slug)
if team.is_member(request.user):
link = Link.objects.get(id=attachment_id)
team.links.remove(link)
return HttpResponseRedirect(reverse('teams-detail',
kwargs={'slug': slug}))
| 33.234783
| 87
| 0.637101
|
136a0383d17086626661b3873898ec6cc05f834d
| 121,224
|
py
|
Python
|
corporate/tests/test_stripe.py
|
preetmishra/zulip
|
cc9dc07b079503a206831bef91e1a5175b8fb64c
|
[
"Apache-2.0"
] | null | null | null |
corporate/tests/test_stripe.py
|
preetmishra/zulip
|
cc9dc07b079503a206831bef91e1a5175b8fb64c
|
[
"Apache-2.0"
] | null | null | null |
corporate/tests/test_stripe.py
|
preetmishra/zulip
|
cc9dc07b079503a206831bef91e1a5175b8fb64c
|
[
"Apache-2.0"
] | null | null | null |
import json
import operator
import os
import re
import sys
from datetime import datetime, timedelta, timezone
from decimal import Decimal
from functools import wraps
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, TypeVar, cast
from unittest.mock import Mock, patch
import responses
import stripe
import ujson
from django.conf import settings
from django.core import signing
from django.http import HttpResponse
from django.urls.resolvers import get_resolver
from django.utils.timezone import now as timezone_now
from corporate.lib.stripe import (
MAX_INVOICED_LICENSES,
MIN_INVOICED_LICENSES,
BillingError,
StripeCardError,
add_months,
attach_discount_to_realm,
catch_stripe_errors,
compute_plan_parameters,
get_discount_for_realm,
get_latest_seat_count,
invoice_plan,
invoice_plans_as_needed,
make_end_of_cycle_updates_if_needed,
next_month,
process_initial_upgrade,
sign_string,
stripe_get_customer,
unsign_string,
update_license_ledger_for_automanaged_plan,
update_license_ledger_if_needed,
update_or_create_stripe_customer,
)
from corporate.models import (
Customer,
CustomerPlan,
LicenseLedger,
get_current_plan_by_customer,
get_current_plan_by_realm,
get_customer_by_realm,
)
from zerver.lib.actions import (
do_activate_user,
do_create_user,
do_deactivate_realm,
do_deactivate_user,
do_reactivate_realm,
do_reactivate_user,
)
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import reset_emails_in_zulip_realm
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
from zerver.models import Realm, RealmAuditLog, UserProfile, get_realm
CallableT = TypeVar('CallableT', bound=Callable[..., Any])
STRIPE_FIXTURES_DIR = "corporate/tests/stripe_fixtures"
# TODO: check that this creates a token similar to what is created by our
# actual Stripe Checkout flows
def stripe_create_token(card_number: str="4242424242424242") -> stripe.Token:
return stripe.Token.create(
card={
"number": card_number,
"exp_month": 3,
"exp_year": 2033,
"cvc": "333",
"name": "Ada Starr",
"address_line1": "Under the sea,",
"address_city": "Pacific",
"address_zip": "33333",
"address_country": "United States",
})
def stripe_fixture_path(decorated_function_name: str, mocked_function_name: str, call_count: int) -> str:
# Make the eventual filename a bit shorter, and also we conventionally
# use test_* for the python test files
if decorated_function_name[:5] == 'test_':
decorated_function_name = decorated_function_name[5:]
return f"{STRIPE_FIXTURES_DIR}/{decorated_function_name}--{mocked_function_name[7:]}.{call_count}.json"
def fixture_files_for_function(decorated_function: CallableT) -> List[str]: # nocoverage
decorated_function_name = decorated_function.__name__
if decorated_function_name[:5] == 'test_':
decorated_function_name = decorated_function_name[5:]
return sorted([f'{STRIPE_FIXTURES_DIR}/{f}' for f in os.listdir(STRIPE_FIXTURES_DIR)
if f.startswith(decorated_function_name + '--')])
def generate_and_save_stripe_fixture(decorated_function_name: str, mocked_function_name: str,
mocked_function: CallableT) -> Callable[[Any, Any], Any]: # nocoverage
def _generate_and_save_stripe_fixture(*args: Any, **kwargs: Any) -> Any:
# Note that mock is not the same as mocked_function, even though their
# definitions look the same
mock = operator.attrgetter(mocked_function_name)(sys.modules[__name__])
fixture_path = stripe_fixture_path(decorated_function_name, mocked_function_name, mock.call_count)
try:
with responses.RequestsMock() as request_mock:
request_mock.add_passthru("https://api.stripe.com")
# Talk to Stripe
stripe_object = mocked_function(*args, **kwargs)
except stripe.error.StripeError as e:
with open(fixture_path, 'w') as f:
error_dict = e.__dict__
error_dict["headers"] = dict(error_dict["headers"])
f.write(json.dumps(error_dict, indent=2, separators=(',', ': '), sort_keys=True) + "\n")
raise e
with open(fixture_path, 'w') as f:
if stripe_object is not None:
f.write(str(stripe_object) + "\n")
else:
f.write("{}\n")
return stripe_object
return _generate_and_save_stripe_fixture
def read_stripe_fixture(decorated_function_name: str,
mocked_function_name: str) -> Callable[[Any, Any], Any]:
def _read_stripe_fixture(*args: Any, **kwargs: Any) -> Any:
mock = operator.attrgetter(mocked_function_name)(sys.modules[__name__])
fixture_path = stripe_fixture_path(decorated_function_name, mocked_function_name, mock.call_count)
fixture = ujson.load(open(fixture_path))
# Check for StripeError fixtures
if "json_body" in fixture:
requestor = stripe.api_requestor.APIRequestor()
# This function will raise the relevant StripeError according to the fixture
requestor.interpret_response(fixture["http_body"], fixture["http_status"], fixture["headers"])
return stripe.util.convert_to_stripe_object(fixture)
return _read_stripe_fixture
def delete_fixture_data(decorated_function: CallableT) -> None: # nocoverage
for fixture_file in fixture_files_for_function(decorated_function):
os.remove(fixture_file)
def normalize_fixture_data(decorated_function: CallableT,
tested_timestamp_fields: Sequence[str] = []) -> None: # nocoverage
# stripe ids are all of the form cus_D7OT2jf5YAtZQ2
id_lengths = [
('cus', 14), ('sub', 14), ('si', 14), ('sli', 14), ('req', 14), ('tok', 24), ('card', 24),
('txn', 24), ('ch', 24), ('in', 24), ('ii', 24), ('test', 12), ('src_client_secret', 24),
('src', 24), ('invst', 26), ('acct', 16), ('rcpt', 31)]
# We'll replace cus_D7OT2jf5YAtZQ2 with something like cus_NORMALIZED0001
pattern_translations = {
f"{prefix}_[A-Za-z0-9]{{{length}}}": f"{prefix}_NORMALIZED%0{length - 10}d"
for prefix, length in id_lengths
}
# We'll replace "invoice_prefix": "A35BC4Q" with something like "invoice_prefix": "NORMA01"
pattern_translations.update({
'"invoice_prefix": "([A-Za-z0-9]{7,8})"': 'NORMA%02d',
'"fingerprint": "([A-Za-z0-9]{16})"': 'NORMALIZED%06d',
'"number": "([A-Za-z0-9]{7,8}-[A-Za-z0-9]{4})"': 'NORMALI-%04d',
'"address": "([A-Za-z0-9]{9}-test_[A-Za-z0-9]{12})"': '000000000-test_NORMALIZED%02d',
# Don't use (..) notation, since the matched strings may be small integers that will also match
# elsewhere in the file
'"realm_id": "[0-9]+"': '"realm_id": "%d"',
})
# Normalizing across all timestamps still causes a lot of variance run to run, which is
# why we're doing something a bit more complicated
for i, timestamp_field in enumerate(tested_timestamp_fields):
# Don't use (..) notation, since the matched timestamp can easily appear in other fields
pattern_translations[
f'"{timestamp_field}": 1[5-9][0-9]{{8}}(?![0-9-])'
] = f'"{timestamp_field}": 1{i+1:02}%07d'
normalized_values: Dict[str, Dict[str, str]] = {
pattern: {} for pattern in pattern_translations.keys()
}
for fixture_file in fixture_files_for_function(decorated_function):
with open(fixture_file) as f:
file_content = f.read()
for pattern, translation in pattern_translations.items():
for match in re.findall(pattern, file_content):
if match not in normalized_values[pattern]:
normalized_values[pattern][match] = translation % (len(normalized_values[pattern]) + 1,)
file_content = file_content.replace(match, normalized_values[pattern][match])
file_content = re.sub(r'(?<="risk_score": )(\d+)', '0', file_content)
file_content = re.sub(r'(?<="times_redeemed": )(\d+)', '0', file_content)
file_content = re.sub(r'(?<="idempotency-key": )"([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f-]*)"',
'"00000000-0000-0000-0000-000000000000"', file_content)
# Dates
file_content = re.sub(r'(?<="Date": )"(.* GMT)"', '"NORMALIZED DATETIME"', file_content)
file_content = re.sub(r'[0-3]\d [A-Z][a-z]{2} 20[1-2]\d', 'NORMALIZED DATE', file_content)
# IP addresses
file_content = re.sub(r'"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"', '"0.0.0.0"', file_content)
# All timestamps not in tested_timestamp_fields
file_content = re.sub(r': (1[5-9][0-9]{8})(?![0-9-])', ': 1000000000', file_content)
with open(fixture_file, "w") as f:
f.write(file_content)
MOCKED_STRIPE_FUNCTION_NAMES = [f"stripe.{name}" for name in [
"Charge.create", "Charge.list",
"Coupon.create",
"Customer.create", "Customer.retrieve", "Customer.save",
"Invoice.create", "Invoice.finalize_invoice", "Invoice.list", "Invoice.pay", "Invoice.upcoming",
"InvoiceItem.create", "InvoiceItem.list",
"Plan.create",
"Product.create",
"Subscription.create", "Subscription.delete", "Subscription.retrieve", "Subscription.save",
"Token.create",
]]
def mock_stripe(tested_timestamp_fields: Sequence[str]=[],
generate: Optional[bool]=None) -> Callable[[CallableT], CallableT]:
def _mock_stripe(decorated_function: CallableT) -> CallableT:
generate_fixture = generate
if generate_fixture is None:
generate_fixture = settings.GENERATE_STRIPE_FIXTURES
for mocked_function_name in MOCKED_STRIPE_FUNCTION_NAMES:
mocked_function = operator.attrgetter(mocked_function_name)(sys.modules[__name__])
if generate_fixture:
side_effect = generate_and_save_stripe_fixture(
decorated_function.__name__, mocked_function_name, mocked_function) # nocoverage
else:
side_effect = read_stripe_fixture(decorated_function.__name__, mocked_function_name)
decorated_function = patch(mocked_function_name, side_effect=side_effect)(decorated_function)
@wraps(decorated_function)
def wrapped(*args: object, **kwargs: object) -> object:
if generate_fixture: # nocoverage
delete_fixture_data(decorated_function)
val = decorated_function(*args, **kwargs)
normalize_fixture_data(decorated_function, tested_timestamp_fields)
return val
else:
return decorated_function(*args, **kwargs)
return cast(CallableT, wrapped)
return _mock_stripe
# A Kandra is a fictional character that can become anything. Used as a
# wildcard when testing for equality.
class Kandra: # nocoverage: TODO
def __eq__(self, other: Any) -> bool:
return True
class StripeTestCase(ZulipTestCase):
def setUp(self, *mocks: Mock) -> None:
super().setUp()
reset_emails_in_zulip_realm()
realm = get_realm('zulip')
# Explicitly limit our active users to 6 regular users,
# to make seat_count less prone to changes in our test data.
# We also keep a guest user and a bot to make the data
# slightly realistic.
active_emails = [
self.example_email('AARON'),
self.example_email('cordelia'),
self.example_email('hamlet'),
self.example_email('iago'),
self.example_email('othello'),
self.example_email('desdemona'),
self.example_email('polonius'), # guest
self.example_email('default_bot'), # bot
]
# Deactivate all users in our realm that aren't in our whitelist.
UserProfile.objects.filter(realm_id=realm.id).exclude(email__in=active_emails).update(is_active=False)
# sanity check our 8 expected users are active
self.assertEqual(
UserProfile.objects.filter(realm=realm, is_active=True).count(),
8,
)
# Make sure we have active users outside our realm (to make
# sure relevant queries restrict on realm).
self.assertEqual(
UserProfile.objects.exclude(realm=realm).filter(is_active=True).count(),
10,
)
# Our seat count excludes our guest user and bot, and
# we want this to be predictable for certain tests with
# arithmetic calculations.
self.assertEqual(get_latest_seat_count(realm), 6)
self.seat_count = 6
self.signed_seat_count, self.salt = sign_string(str(self.seat_count))
# Choosing dates with corresponding timestamps below 1500000000 so that they are
# not caught by our timestamp normalization regex in normalize_fixture_data
self.now = datetime(2012, 1, 2, 3, 4, 5, tzinfo=timezone.utc)
self.next_month = datetime(2012, 2, 2, 3, 4, 5, tzinfo=timezone.utc)
self.next_year = datetime(2013, 1, 2, 3, 4, 5, tzinfo=timezone.utc)
def get_signed_seat_count_from_response(self, response: HttpResponse) -> Optional[str]:
match = re.search(r'name=\"signed_seat_count\" value=\"(.+)\"', response.content.decode("utf-8"))
return match.group(1) if match else None
def get_salt_from_response(self, response: HttpResponse) -> Optional[str]:
match = re.search(r'name=\"salt\" value=\"(\w+)\"', response.content.decode("utf-8"))
return match.group(1) if match else None
def upgrade(self, invoice: bool=False, talk_to_stripe: bool=True,
realm: Optional[Realm]=None, del_args: Sequence[str]=[],
**kwargs: Any) -> HttpResponse:
host_args = {}
if realm is not None: # nocoverage: TODO
host_args['HTTP_HOST'] = realm.host
response = self.client_get("/upgrade/", {}, **host_args)
params: Dict[str, Any] = {
'schedule': 'annual',
'signed_seat_count': self.get_signed_seat_count_from_response(response),
'salt': self.get_salt_from_response(response)}
if invoice: # send_invoice
params.update({
'billing_modality': 'send_invoice',
'licenses': 123})
else: # charge_automatically
stripe_token = None
if not talk_to_stripe:
stripe_token = 'token'
stripe_token = kwargs.get('stripe_token', stripe_token)
if stripe_token is None:
stripe_token = stripe_create_token().id
params.update({
'billing_modality': 'charge_automatically',
'license_management': 'automatic',
'stripe_token': stripe_token,
})
params.update(kwargs)
for key in del_args:
if key in params:
del params[key]
for key, value in params.items():
params[key] = ujson.dumps(value)
return self.client_post("/json/billing/upgrade", params, **host_args)
# Upgrade without talking to Stripe
def local_upgrade(self, *args: Any) -> None:
class StripeMock(Mock):
def __init__(self, depth: int=1):
super().__init__(spec=stripe.Card)
self.id = 'id'
self.created = '1000'
self.last4 = '4242'
if depth == 1:
self.source = StripeMock(depth=2)
def upgrade_func(*args: Any) -> Any:
return process_initial_upgrade(self.example_user('hamlet'), *args[:4])
for mocked_function_name in MOCKED_STRIPE_FUNCTION_NAMES:
upgrade_func = patch(mocked_function_name, return_value=StripeMock())(upgrade_func)
upgrade_func(*args)
class StripeTest(StripeTestCase):
@patch("corporate.lib.stripe.billing_logger.error")
def test_catch_stripe_errors(self, mock_billing_logger_error: Mock) -> None:
@catch_stripe_errors
def raise_invalid_request_error() -> None:
raise stripe.error.InvalidRequestError(
"message", "param", "code", json_body={})
with self.assertRaises(BillingError) as context:
raise_invalid_request_error()
self.assertEqual('other stripe error', context.exception.description)
mock_billing_logger_error.assert_called()
@catch_stripe_errors
def raise_card_error() -> None:
error_message = "The card number is not a valid credit card number."
json_body = {"error": {"message": error_message}}
raise stripe.error.CardError(error_message, "number", "invalid_number",
json_body=json_body)
with self.assertRaises(StripeCardError) as context:
raise_card_error()
self.assertIn('not a valid credit card', context.exception.message)
self.assertEqual('card error', context.exception.description)
mock_billing_logger_error.assert_called()
def test_billing_not_enabled(self) -> None:
iago = self.example_user('iago')
with self.settings(BILLING_ENABLED=False):
self.login_user(iago)
response = self.client_get("/upgrade/", follow=True)
self.assertEqual(response.status_code, 404)
@mock_stripe(tested_timestamp_fields=["created"])
def test_upgrade_by_card(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
response = self.client_get("/upgrade/")
self.assert_in_success_response(['Pay annually'], response)
self.assertNotEqual(user.realm.plan_type, Realm.STANDARD)
self.assertFalse(Customer.objects.filter(realm=user.realm).exists())
# Click "Make payment" in Stripe Checkout
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade()
# Check that we correctly created a Customer object in Stripe
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
self.assertEqual(stripe_customer.default_source.id[:5], 'card_')
self.assertEqual(stripe_customer.description, "zulip (Zulip Dev)")
self.assertEqual(stripe_customer.discount, None)
self.assertEqual(stripe_customer.email, user.email)
metadata_dict = dict(stripe_customer.metadata)
self.assertEqual(metadata_dict['realm_str'], 'zulip')
try:
int(metadata_dict['realm_id'])
except ValueError: # nocoverage
raise AssertionError("realm_id is not a number")
# Check Charges in Stripe
stripe_charges = [charge for charge in stripe.Charge.list(customer=stripe_customer.id)]
self.assertEqual(len(stripe_charges), 1)
self.assertEqual(stripe_charges[0].amount, 8000 * self.seat_count)
# TODO: fix Decimal
self.assertEqual(stripe_charges[0].description,
f"Upgrade to Zulip Standard, $80.0 x {self.seat_count}")
self.assertEqual(stripe_charges[0].receipt_email, user.email)
self.assertEqual(stripe_charges[0].statement_descriptor, "Zulip Standard")
# Check Invoices in Stripe
stripe_invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(stripe_invoices), 1)
self.assertIsNotNone(stripe_invoices[0].status_transitions.finalized_at)
invoice_params = {
# auto_advance is False because the invoice has been paid
'amount_due': 0, 'amount_paid': 0, 'auto_advance': False, 'billing': 'charge_automatically',
'charge': None, 'status': 'paid', 'total': 0}
for key, value in invoice_params.items():
self.assertEqual(stripe_invoices[0].get(key), value)
# Check Line Items on Stripe Invoice
stripe_line_items = [item for item in stripe_invoices[0].lines]
self.assertEqual(len(stripe_line_items), 2)
line_item_params = {
'amount': 8000 * self.seat_count, 'description': 'Zulip Standard', 'discountable': False,
'period': {
'end': datetime_to_timestamp(self.next_year),
'start': datetime_to_timestamp(self.now)},
# There's no unit_amount on Line Items, probably because it doesn't show up on the
# user-facing invoice. We could pull the Invoice Item instead and test unit_amount there,
# but testing the amount and quantity seems sufficient.
'plan': None, 'proration': False, 'quantity': self.seat_count}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[0].get(key), value)
line_item_params = {
'amount': -8000 * self.seat_count, 'description': 'Payment (Card ending in 4242)',
'discountable': False, 'plan': None, 'proration': False, 'quantity': 1}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[1].get(key), value)
# Check that we correctly populated Customer, CustomerPlan, and LicenseLedger in Zulip
customer = Customer.objects.get(stripe_customer_id=stripe_customer.id, realm=user.realm)
plan = CustomerPlan.objects.get(
customer=customer, automanage_licenses=True,
price_per_license=8000, fixed_price=None, discount=None, billing_cycle_anchor=self.now,
billing_schedule=CustomerPlan.ANNUAL, invoiced_through=LicenseLedger.objects.first(),
next_invoice_date=self.next_month, tier=CustomerPlan.STANDARD,
status=CustomerPlan.ACTIVE)
LicenseLedger.objects.get(
plan=plan, is_renewal=True, event_time=self.now, licenses=self.seat_count,
licenses_at_next_renewal=self.seat_count)
# Check RealmAuditLog
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', 'event_time').order_by('id'))
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.STRIPE_CARD_CHANGED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.CUSTOMER_PLAN_CREATED, self.now),
# TODO: Check for REALM_PLAN_TYPE_CHANGED
# (RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra()),
])
self.assertEqual(ujson.loads(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED).values_list(
'extra_data', flat=True).first())['automanage_licenses'], True)
# Check that we correctly updated Realm
realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.STANDARD)
self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX)
# Check that we can no longer access /upgrade
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/billing/', response.url)
# Check /billing has the correct information
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/")
self.assert_not_in_success_response(['Pay annually'], response)
for substring in [
'Zulip Standard', str(self.seat_count),
'You are using', f'{self.seat_count} of {self.seat_count} licenses',
'Licenses are automatically managed by Zulip; when you add',
'Your plan will renew on', 'January 2, 2013', f'${80 * self.seat_count}.00',
'Visa ending in 4242',
'Update card']:
self.assert_in_response(substring, response)
@mock_stripe(tested_timestamp_fields=["created"])
def test_upgrade_by_invoice(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
# Click "Make payment" in Stripe Checkout
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade(invoice=True)
# Check that we correctly created a Customer in Stripe
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
# It can take a second for Stripe to attach the source to the customer, and in
# particular it may not be attached at the time stripe_get_customer is called above,
# causing test flakes.
# So commenting the next line out, but leaving it here so future readers know what
# is supposed to happen here
# self.assertEqual(stripe_customer.default_source.type, 'ach_credit_transfer')
# Check Charges in Stripe
self.assertFalse(stripe.Charge.list(customer=stripe_customer.id))
# Check Invoices in Stripe
stripe_invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(stripe_invoices), 1)
self.assertIsNotNone(stripe_invoices[0].due_date)
self.assertIsNotNone(stripe_invoices[0].status_transitions.finalized_at)
invoice_params = {
'amount_due': 8000 * 123, 'amount_paid': 0, 'attempt_count': 0,
'auto_advance': True, 'billing': 'send_invoice', 'statement_descriptor': 'Zulip Standard',
'status': 'open', 'total': 8000 * 123}
for key, value in invoice_params.items():
self.assertEqual(stripe_invoices[0].get(key), value)
# Check Line Items on Stripe Invoice
stripe_line_items = [item for item in stripe_invoices[0].lines]
self.assertEqual(len(stripe_line_items), 1)
line_item_params = {
'amount': 8000 * 123, 'description': 'Zulip Standard', 'discountable': False,
'period': {
'end': datetime_to_timestamp(self.next_year),
'start': datetime_to_timestamp(self.now)},
'plan': None, 'proration': False, 'quantity': 123}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[0].get(key), value)
# Check that we correctly populated Customer, CustomerPlan and LicenseLedger in Zulip
customer = Customer.objects.get(stripe_customer_id=stripe_customer.id, realm=user.realm)
plan = CustomerPlan.objects.get(
customer=customer, automanage_licenses=False, charge_automatically=False,
price_per_license=8000, fixed_price=None, discount=None, billing_cycle_anchor=self.now,
billing_schedule=CustomerPlan.ANNUAL, invoiced_through=LicenseLedger.objects.first(),
next_invoice_date=self.next_year, tier=CustomerPlan.STANDARD,
status=CustomerPlan.ACTIVE)
LicenseLedger.objects.get(
plan=plan, is_renewal=True, event_time=self.now, licenses=123, licenses_at_next_renewal=123)
# Check RealmAuditLog
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', 'event_time').order_by('id'))
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.CUSTOMER_PLAN_CREATED, self.now),
# TODO: Check for REALM_PLAN_TYPE_CHANGED
# (RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra()),
])
self.assertEqual(ujson.loads(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED).values_list(
'extra_data', flat=True).first())['automanage_licenses'], False)
# Check that we correctly updated Realm
realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.STANDARD)
self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX)
# Check that we can no longer access /upgrade
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/billing/', response.url)
# Check /billing has the correct information
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/")
self.assert_not_in_success_response(['Pay annually', 'Update card'], response)
for substring in [
'Zulip Standard', str(123),
'You are using', f'{self.seat_count} of {123} licenses',
'Licenses are manually managed. You will not be able to add ',
'Your plan will renew on', 'January 2, 2013', '$9,840.00', # 9840 = 80 * 123
'Billed by invoice']:
self.assert_in_response(substring, response)
@mock_stripe(tested_timestamp_fields=["created"])
def test_free_trial_upgrade_by_card(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
with self.settings(FREE_TRIAL_DAYS=60):
response = self.client_get("/upgrade/")
free_trial_end_date = self.now + timedelta(days=60)
self.assert_in_success_response(['Pay annually', 'Free Trial', '60 day'], response)
self.assertNotEqual(user.realm.plan_type, Realm.STANDARD)
self.assertFalse(Customer.objects.filter(realm=user.realm).exists())
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade()
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
self.assertEqual(stripe_customer.default_source.id[:5], 'card_')
self.assertEqual(stripe_customer.description, "zulip (Zulip Dev)")
self.assertEqual(stripe_customer.discount, None)
self.assertEqual(stripe_customer.email, user.email)
metadata_dict = dict(stripe_customer.metadata)
self.assertEqual(metadata_dict['realm_str'], 'zulip')
try:
int(metadata_dict['realm_id'])
except ValueError: # nocoverage
raise AssertionError("realm_id is not a number")
stripe_charges = [charge for charge in stripe.Charge.list(customer=stripe_customer.id)]
self.assertEqual(len(stripe_charges), 0)
stripe_invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(stripe_invoices), 0)
customer = Customer.objects.get(stripe_customer_id=stripe_customer.id, realm=user.realm)
plan = CustomerPlan.objects.get(
customer=customer, automanage_licenses=True,
price_per_license=8000, fixed_price=None, discount=None, billing_cycle_anchor=self.now,
billing_schedule=CustomerPlan.ANNUAL, invoiced_through=LicenseLedger.objects.first(),
next_invoice_date=free_trial_end_date, tier=CustomerPlan.STANDARD,
status=CustomerPlan.FREE_TRIAL)
LicenseLedger.objects.get(
plan=plan, is_renewal=True, event_time=self.now, licenses=self.seat_count,
licenses_at_next_renewal=self.seat_count)
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', 'event_time').order_by('id'))
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.STRIPE_CARD_CHANGED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.CUSTOMER_PLAN_CREATED, self.now),
# TODO: Check for REALM_PLAN_TYPE_CHANGED
# (RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra()),
])
self.assertEqual(ujson.loads(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED).values_list(
'extra_data', flat=True).first())['automanage_licenses'], True)
realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.STANDARD)
self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX)
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/")
self.assert_not_in_success_response(['Pay annually'], response)
for substring in [
'Zulip Standard', 'Free Trial', str(self.seat_count),
'You are using', f'{self.seat_count} of {self.seat_count} licenses',
'Your plan will be upgraded to', 'March 2, 2012', f'${80 * self.seat_count}.00',
'Visa ending in 4242',
'Update card']:
self.assert_in_response(substring, response)
self.assert_not_in_success_response(["Go to your Zulip organization"], response)
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/?onboarding=true")
self.assert_in_success_response(["Go to your Zulip organization"], response)
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=12):
update_license_ledger_if_needed(realm, self.now)
self.assertEqual(
LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(),
(12, 12),
)
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=15):
update_license_ledger_if_needed(realm, self.next_month)
self.assertEqual(
LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(),
(15, 15),
)
invoice_plans_as_needed(self.next_month)
invoices = stripe.Invoice.list(customer=stripe_customer.id)
self.assertEqual(len(invoices), 0)
customer_plan = CustomerPlan.objects.get(customer=customer)
self.assertEqual(customer_plan.status, CustomerPlan.FREE_TRIAL)
self.assertEqual(customer_plan.next_invoice_date, free_trial_end_date)
invoice_plans_as_needed(free_trial_end_date)
customer_plan.refresh_from_db()
realm.refresh_from_db()
self.assertEqual(customer_plan.status, CustomerPlan.ACTIVE)
self.assertEqual(customer_plan.next_invoice_date, add_months(free_trial_end_date, 1))
self.assertEqual(realm.plan_type, Realm.STANDARD)
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 1)
invoice_params = {
"amount_due": 15 * 80 * 100, "amount_paid": 0, "amount_remaining": 15 * 80 * 100,
"auto_advance": True, "billing": "charge_automatically", "collection_method": "charge_automatically",
"customer_email": self.example_email("hamlet"), "discount": None, "paid": False, "status": "open",
"total": 15 * 80 * 100,
}
for key, value in invoice_params.items():
self.assertEqual(invoices[0].get(key), value)
invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(invoice_items), 1)
invoice_item_params = {
"amount": 15 * 80 * 100, "description": "Zulip Standard - renewal",
"plan": None, "quantity": 15, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(free_trial_end_date),
"end": datetime_to_timestamp(add_months(free_trial_end_date, 12)),
},
}
for key, value in invoice_item_params.items():
self.assertEqual(invoice_items[0][key], value)
invoice_plans_as_needed(add_months(free_trial_end_date, 1))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 1)
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=19):
update_license_ledger_if_needed(realm, add_months(free_trial_end_date, 10))
self.assertEqual(
LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(),
(19, 19),
)
invoice_plans_as_needed(add_months(free_trial_end_date, 10))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 2)
invoice_params = {
"amount_due": 5172, "auto_advance": True, "billing": "charge_automatically",
"collection_method": "charge_automatically", "customer_email": "hamlet@zulip.com",
}
invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(invoice_items), 1)
invoice_item_params = {
"amount": 5172, "description": "Additional license (Jan 2, 2013 - Mar 2, 2013)",
"discountable": False, "quantity": 4,
"period": {
"start": datetime_to_timestamp(add_months(free_trial_end_date, 10)),
"end": datetime_to_timestamp(add_months(free_trial_end_date, 12)),
},
}
invoice_plans_as_needed(add_months(free_trial_end_date, 12))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 3)
@mock_stripe(tested_timestamp_fields=["created"])
def test_free_trial_upgrade_by_invoice(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
free_trial_end_date = self.now + timedelta(days=60)
with self.settings(FREE_TRIAL_DAYS=60):
response = self.client_get("/upgrade/")
self.assert_in_success_response(['Pay annually', 'Free Trial', '60 day'], response)
self.assertNotEqual(user.realm.plan_type, Realm.STANDARD)
self.assertFalse(Customer.objects.filter(realm=user.realm).exists())
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade(invoice=True)
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
self.assertEqual(stripe_customer.discount, None)
self.assertEqual(stripe_customer.email, user.email)
metadata_dict = dict(stripe_customer.metadata)
self.assertEqual(metadata_dict['realm_str'], 'zulip')
try:
int(metadata_dict['realm_id'])
except ValueError: # nocoverage
raise AssertionError("realm_id is not a number")
stripe_invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(stripe_invoices), 0)
customer = Customer.objects.get(stripe_customer_id=stripe_customer.id, realm=user.realm)
plan = CustomerPlan.objects.get(
customer=customer, automanage_licenses=False,
price_per_license=8000, fixed_price=None, discount=None, billing_cycle_anchor=self.now,
billing_schedule=CustomerPlan.ANNUAL, invoiced_through=LicenseLedger.objects.first(),
next_invoice_date=free_trial_end_date, tier=CustomerPlan.STANDARD,
status=CustomerPlan.FREE_TRIAL)
LicenseLedger.objects.get(
plan=plan, is_renewal=True, event_time=self.now, licenses=123,
licenses_at_next_renewal=123)
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', 'event_time').order_by('id'))
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.CUSTOMER_PLAN_CREATED, self.now),
# TODO: Check for REALM_PLAN_TYPE_CHANGED
# (RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra()),
])
self.assertEqual(ujson.loads(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED).values_list(
'extra_data', flat=True).first())['automanage_licenses'], False)
realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.STANDARD)
self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX)
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/")
self.assert_not_in_success_response(['Pay annually'], response)
for substring in [
'Zulip Standard', 'Free Trial', str(self.seat_count),
'You are using', f'{self.seat_count} of {123} licenses',
'Your plan will be upgraded to', 'March 2, 2012',
f'{80 * 123:,.2f}', 'Billed by invoice',
]:
self.assert_in_response(substring, response)
with patch('corporate.lib.stripe.invoice_plan') as mocked:
invoice_plans_as_needed(self.next_month)
mocked.assert_not_called()
mocked.reset_mock()
customer_plan = CustomerPlan.objects.get(customer=customer)
self.assertEqual(customer_plan.status, CustomerPlan.FREE_TRIAL)
self.assertEqual(customer_plan.next_invoice_date, free_trial_end_date)
invoice_plans_as_needed(free_trial_end_date)
customer_plan.refresh_from_db()
realm.refresh_from_db()
self.assertEqual(customer_plan.status, CustomerPlan.ACTIVE)
self.assertEqual(customer_plan.next_invoice_date, add_months(free_trial_end_date, 12))
self.assertEqual(realm.plan_type, Realm.STANDARD)
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 1)
invoice_params = {
"amount_due": 123 * 80 * 100, "amount_paid": 0, "amount_remaining": 123 * 80 * 100,
"auto_advance": True, "billing": "send_invoice", "collection_method": "send_invoice",
"customer_email": self.example_email("hamlet"), "discount": None, "paid": False, "status": "open",
"total": 123 * 80 * 100,
}
for key, value in invoice_params.items():
self.assertEqual(invoices[0].get(key), value)
invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(invoice_items), 1)
invoice_item_params = {
"amount": 123 * 80 * 100, "description": "Zulip Standard - renewal",
"plan": None, "quantity": 123, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(free_trial_end_date),
"end": datetime_to_timestamp(add_months(free_trial_end_date, 12)),
},
}
for key, value in invoice_item_params.items():
self.assertEqual(invoice_items[0][key], value)
invoice_plans_as_needed(add_months(free_trial_end_date, 1))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 1)
invoice_plans_as_needed(add_months(free_trial_end_date, 10))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 1)
invoice_plans_as_needed(add_months(free_trial_end_date, 12))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 2)
@mock_stripe()
def test_billing_page_permissions(self, *mocks: Mock) -> None:
# Guest users can't access /upgrade page
self.login_user(self.example_user('polonius'))
response = self.client_get("/upgrade/", follow=True)
self.assertEqual(response.status_code, 404)
# Check that non-admins can access /upgrade via /billing, when there is no Customer object
self.login_user(self.example_user('hamlet'))
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
# Check that non-admins can sign up and pay
self.upgrade()
# Check that the non-admin hamlet can still access /billing
response = self.client_get("/billing/")
self.assert_in_success_response(["Your current plan is"], response)
# Check realm owners can access billing, even though they are not a billing admin
desdemona = self.example_user('desdemona')
desdemona.role = UserProfile.ROLE_REALM_OWNER
desdemona.save(update_fields=["role"])
self.login_user(self.example_user('desdemona'))
response = self.client_get("/billing/")
self.assert_in_success_response(["Your current plan is"], response)
# Check that member who is not a billing admin does not have access
self.login_user(self.example_user('cordelia'))
response = self.client_get("/billing/")
self.assert_in_success_response(["You must be an organization owner or a billing administrator"], response)
@mock_stripe(tested_timestamp_fields=["created"])
def test_upgrade_by_card_with_outdated_seat_count(self, *mocks: Mock) -> None:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
new_seat_count = 23
# Change the seat count while the user is going through the upgrade flow
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=new_seat_count):
self.upgrade()
stripe_customer_id = Customer.objects.first().stripe_customer_id
# Check that the Charge used the old quantity, not new_seat_count
self.assertEqual(8000 * self.seat_count,
[charge for charge in stripe.Charge.list(customer=stripe_customer_id)][0].amount)
# Check that the invoice has a credit for the old amount and a charge for the new one
stripe_invoice = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer_id)][0]
self.assertEqual([8000 * new_seat_count, -8000 * self.seat_count],
[item.amount for item in stripe_invoice.lines])
# Check LicenseLedger has the new amount
self.assertEqual(LicenseLedger.objects.first().licenses, new_seat_count)
self.assertEqual(LicenseLedger.objects.first().licenses_at_next_renewal, new_seat_count)
@mock_stripe()
def test_upgrade_where_first_card_fails(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
# From https://stripe.com/docs/testing#cards: Attaching this card to
# a Customer object succeeds, but attempts to charge the customer fail.
with patch("corporate.lib.stripe.billing_logger.error") as mock_billing_logger:
self.upgrade(stripe_token=stripe_create_token('4000000000000341').id)
mock_billing_logger.assert_called()
# Check that we created a Customer object but no CustomerPlan
stripe_customer_id = Customer.objects.get(realm=get_realm('zulip')).stripe_customer_id
self.assertFalse(CustomerPlan.objects.exists())
# Check that we created a Customer in stripe, a failed Charge, and no Invoices or Invoice Items
self.assertTrue(stripe_get_customer(stripe_customer_id))
stripe_charges = [charge for charge in stripe.Charge.list(customer=stripe_customer_id)]
self.assertEqual(len(stripe_charges), 1)
self.assertEqual(stripe_charges[0].failure_code, 'card_declined')
# TODO: figure out what these actually are
self.assertFalse(stripe.Invoice.list(customer=stripe_customer_id))
self.assertFalse(stripe.InvoiceItem.list(customer=stripe_customer_id))
# Check that we correctly populated RealmAuditLog
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', flat=True).order_by('id'))
self.assertEqual(audit_log_entries, [RealmAuditLog.STRIPE_CUSTOMER_CREATED,
RealmAuditLog.STRIPE_CARD_CHANGED])
# Check that we did not update Realm
realm = get_realm("zulip")
self.assertNotEqual(realm.plan_type, Realm.STANDARD)
# Check that we still get redirected to /upgrade
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
# Try again, with a valid card, after they added a few users
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=23):
with patch('corporate.views.get_latest_seat_count', return_value=23):
self.upgrade()
customer = Customer.objects.get(realm=get_realm('zulip'))
# It's impossible to create two Customers, but check that we didn't
# change stripe_customer_id
self.assertEqual(customer.stripe_customer_id, stripe_customer_id)
# Check that we successfully added a CustomerPlan, and have the right number of licenses
plan = CustomerPlan.objects.get(customer=customer)
ledger_entry = LicenseLedger.objects.get(plan=plan)
self.assertEqual(ledger_entry.licenses, 23)
self.assertEqual(ledger_entry.licenses_at_next_renewal, 23)
# Check the Charges and Invoices in Stripe
self.assertEqual(8000 * 23, [charge for charge in
stripe.Charge.list(customer=stripe_customer_id)][0].amount)
stripe_invoice = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer_id)][0]
self.assertEqual([8000 * 23, -8000 * 23],
[item.amount for item in stripe_invoice.lines])
# Check that we correctly populated RealmAuditLog
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', flat=True).order_by('id'))
# TODO: Test for REALM_PLAN_TYPE_CHANGED as the last entry
self.assertEqual(audit_log_entries, [RealmAuditLog.STRIPE_CUSTOMER_CREATED,
RealmAuditLog.STRIPE_CARD_CHANGED,
RealmAuditLog.STRIPE_CARD_CHANGED,
RealmAuditLog.CUSTOMER_PLAN_CREATED])
# Check that we correctly updated Realm
realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.STANDARD)
# Check that we can no longer access /upgrade
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/billing/', response.url)
def test_upgrade_with_tampered_seat_count(self) -> None:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
response = self.upgrade(talk_to_stripe=False, salt='badsalt')
self.assert_json_error_contains(response, "Something went wrong. Please contact")
self.assertEqual(ujson.loads(response.content)['error_description'], 'tampered seat count')
def test_upgrade_race_condition(self) -> None:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
with patch("corporate.lib.stripe.billing_logger.warning") as mock_billing_logger:
with self.assertRaises(BillingError) as context:
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.assertEqual('subscribing with existing subscription', context.exception.description)
mock_billing_logger.assert_called()
def test_check_upgrade_parameters(self) -> None:
# Tests all the error paths except 'not enough licenses'
def check_error(error_description: str, upgrade_params: Mapping[str, Any],
del_args: Sequence[str] = []) -> None:
response = self.upgrade(talk_to_stripe=False, del_args=del_args, **upgrade_params)
self.assert_json_error_contains(response, "Something went wrong. Please contact")
self.assertEqual(ujson.loads(response.content)['error_description'], error_description)
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
check_error('unknown billing_modality', {'billing_modality': 'invalid'})
check_error('unknown schedule', {'schedule': 'invalid'})
check_error('unknown license_management', {'license_management': 'invalid'})
check_error('autopay with no card', {}, del_args=['stripe_token'])
def test_upgrade_license_counts(self) -> None:
def check_min_licenses_error(invoice: bool, licenses: Optional[int], min_licenses_in_response: int,
upgrade_params: Dict[str, Any]={}) -> None:
if licenses is None:
del_args = ['licenses']
else:
del_args = []
upgrade_params['licenses'] = licenses
response = self.upgrade(invoice=invoice, talk_to_stripe=False,
del_args=del_args, **upgrade_params)
self.assert_json_error_contains(response, f"at least {min_licenses_in_response} users")
self.assertEqual(ujson.loads(response.content)['error_description'], 'not enough licenses')
def check_max_licenses_error(licenses: int) -> None:
response = self.upgrade(invoice=True, talk_to_stripe=False,
licenses=licenses)
self.assert_json_error_contains(response, f"with more than {MAX_INVOICED_LICENSES} licenses")
self.assertEqual(ujson.loads(response.content)['error_description'], 'too many licenses')
def check_success(invoice: bool, licenses: Optional[int], upgrade_params: Dict[str, Any]={}) -> None:
if licenses is None:
del_args = ['licenses']
else:
del_args = []
upgrade_params['licenses'] = licenses
with patch('corporate.views.process_initial_upgrade'):
response = self.upgrade(invoice=invoice, talk_to_stripe=False,
del_args=del_args, **upgrade_params)
self.assert_json_success(response)
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
# Autopay with licenses < seat count
check_min_licenses_error(False, self.seat_count - 1, self.seat_count, {'license_management': 'manual'})
# Autopay with not setting licenses
check_min_licenses_error(False, None, self.seat_count, {'license_management': 'manual'})
# Invoice with licenses < MIN_INVOICED_LICENSES
check_min_licenses_error(True, MIN_INVOICED_LICENSES - 1, MIN_INVOICED_LICENSES)
# Invoice with licenses < seat count
with patch("corporate.views.MIN_INVOICED_LICENSES", 3):
check_min_licenses_error(True, 4, self.seat_count)
# Invoice with not setting licenses
check_min_licenses_error(True, None, MIN_INVOICED_LICENSES)
# Invoice exceeding max licenses
check_max_licenses_error(MAX_INVOICED_LICENSES + 1)
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=MAX_INVOICED_LICENSES + 5):
check_max_licenses_error(MAX_INVOICED_LICENSES + 5)
# Autopay with automatic license_management
check_success(False, None)
# Autopay with automatic license_management, should just ignore the licenses entry
check_success(False, self.seat_count)
# Autopay
check_success(False, self.seat_count, {'license_management': 'manual'})
# Autopay has no limit on max licenses
check_success(False, MAX_INVOICED_LICENSES + 1, {'license_management': 'manual'})
# Invoice
check_success(True, self.seat_count + MIN_INVOICED_LICENSES)
# Invoice
check_success(True, MAX_INVOICED_LICENSES)
@patch("corporate.lib.stripe.billing_logger.error")
def test_upgrade_with_uncaught_exception(self, mock_: Mock) -> None:
hamlet = self.example_user('hamlet')
self.login_user(hamlet)
with patch("corporate.views.process_initial_upgrade", side_effect=Exception):
response = self.upgrade(talk_to_stripe=False)
self.assert_json_error_contains(response, "Something went wrong. Please contact desdemona+admin@zulip.com.")
self.assertEqual(ujson.loads(response.content)['error_description'], 'uncaught exception during upgrade')
def test_request_sponsorship(self) -> None:
user = self.example_user("hamlet")
self.assertIsNone(get_customer_by_realm(user.realm))
self.login_user(user)
data = {
"organization-type": ujson.dumps("Open-source"),
"website": ujson.dumps("https://infinispan.org/"),
"description": ujson.dumps("Infinispan is a distributed in-memory key/value data store with optional schema."),
}
response = self.client_post("/json/billing/sponsorship", data)
self.assert_json_success(response)
customer = get_customer_by_realm(user.realm)
assert(customer is not None)
self.assertEqual(customer.sponsorship_pending, True)
from django.core.mail import outbox
self.assertEqual(len(outbox), 1)
for message in outbox:
self.assertEqual(len(message.to), 1)
self.assertEqual(message.to[0], "desdemona+admin@zulip.com")
self.assertEqual(message.subject, "Sponsorship request (Open-source) for zulip")
self.assertEqual(message.reply_to, ['hamlet@zulip.com'])
self.assertIn('Zulip sponsorship <noreply-', message.from_email)
self.assertIn("Requested by: King Hamlet (Member)", message.body)
self.assertIn("Support URL: http://zulip.testserver/activity/support?q=zulip", message.body)
self.assertIn("Website: https://infinispan.org", message.body)
self.assertIn("Organization type: Open-source", message.body)
self.assertIn("Description:\nInfinispan is a distributed in-memory", message.body)
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/billing/")
response = self.client_get("/billing/")
self.assert_in_success_response(["Your organization has requested sponsored or discounted hosting."], response)
self.login_user(self.example_user("othello"))
response = self.client_get("/billing/")
self.assert_in_success_response(["You must be an organization owner or a billing administrator to view this page."], response)
def test_redirect_for_billing_home(self) -> None:
user = self.example_user("iago")
self.login_user(user)
# No Customer yet; check that we are redirected to /upgrade
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
# Customer, but no CustomerPlan; check that we are still redirected to /upgrade
Customer.objects.create(realm=user.realm, stripe_customer_id='cus_123')
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
def test_redirect_for_upgrade_page(self) -> None:
user = self.example_user("iago")
self.login_user(user)
# No Customer yet;
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 200)
# Customer, but no CustomerPlan;
customer = Customer.objects.create(realm=user.realm, stripe_customer_id='cus_123')
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 200)
CustomerPlan.objects.create(customer=customer, billing_cycle_anchor=timezone_now(),
billing_schedule=CustomerPlan.ANNUAL, tier=CustomerPlan.STANDARD)
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/billing/")
with self.settings(FREE_TRIAL_DAYS=30):
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/billing/")
response = self.client_get("/upgrade/?onboarding=true")
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/billing/?onboarding=true")
def test_get_latest_seat_count(self) -> None:
realm = get_realm("zulip")
initial_count = get_latest_seat_count(realm)
user1 = UserProfile.objects.create(realm=realm, email='user1@zulip.com',
delivery_email='user1@zulip.com')
user2 = UserProfile.objects.create(realm=realm, email='user2@zulip.com',
delivery_email='user2@zulip.com')
self.assertEqual(get_latest_seat_count(realm), initial_count + 2)
# Test that bots aren't counted
user1.is_bot = True
user1.save(update_fields=['is_bot'])
self.assertEqual(get_latest_seat_count(realm), initial_count + 1)
# Test that inactive users aren't counted
do_deactivate_user(user2)
self.assertEqual(get_latest_seat_count(realm), initial_count)
# Test guests
# Adding a guest to a realm with a lot of members shouldn't change anything
UserProfile.objects.create(realm=realm, email='user3@zulip.com', delivery_email='user3@zulip.com',
role=UserProfile.ROLE_GUEST)
self.assertEqual(get_latest_seat_count(realm), initial_count)
# Test 1 member and 5 guests
realm = Realm.objects.create(string_id='second', name='second')
UserProfile.objects.create(realm=realm, email='member@second.com',
delivery_email='member@second.com')
for i in range(5):
UserProfile.objects.create(realm=realm, email=f'guest{i}@second.com',
delivery_email=f'guest{i}@second.com',
role=UserProfile.ROLE_GUEST)
self.assertEqual(get_latest_seat_count(realm), 1)
# Test 1 member and 6 guests
UserProfile.objects.create(realm=realm, email='guest5@second.com',
delivery_email='guest5@second.com',
role=UserProfile.ROLE_GUEST)
self.assertEqual(get_latest_seat_count(realm), 2)
def test_sign_string(self) -> None:
string = "abc"
signed_string, salt = sign_string(string)
self.assertEqual(string, unsign_string(signed_string, salt))
with self.assertRaises(signing.BadSignature):
unsign_string(signed_string, "randomsalt")
# This tests both the payment method string, and also is a very basic
# test that the various upgrade paths involving non-standard payment
# histories don't throw errors
@mock_stripe()
def test_payment_method_string(self, *mocks: Mock) -> None:
pass
# If you signup with a card, we should show your card as the payment method
# Already tested in test_initial_upgrade
# If you pay by invoice, your payment method should be
# "Billed by invoice", even if you have a card on file
# user = self.example_user("hamlet")
# do_create_stripe_customer(user, stripe_create_token().id)
# self.login_user(user)
# self.upgrade(invoice=True)
# stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
# self.assertEqual('Billed by invoice', payment_method_string(stripe_customer))
# If you signup with a card and then downgrade, we still have your
# card on file, and should show it
# TODO
@mock_stripe()
def test_attach_discount_to_realm(self, *mocks: Mock) -> None:
# Attach discount before Stripe customer exists
user = self.example_user('hamlet')
attach_discount_to_realm(user.realm, Decimal(85))
self.login_user(user)
# Check that the discount appears in page_params
self.assert_in_success_response(['85'], self.client_get("/upgrade/"))
# Check that the customer was charged the discounted amount
self.upgrade()
stripe_customer_id = Customer.objects.values_list('stripe_customer_id', flat=True).first()
self.assertEqual(1200 * self.seat_count,
[charge for charge in stripe.Charge.list(customer=stripe_customer_id)][0].amount)
stripe_invoice = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer_id)][0]
self.assertEqual([1200 * self.seat_count, -1200 * self.seat_count],
[item.amount for item in stripe_invoice.lines])
# Check CustomerPlan reflects the discount
plan = CustomerPlan.objects.get(price_per_license=1200, discount=Decimal(85))
# Attach discount to existing Stripe customer
plan.status = CustomerPlan.ENDED
plan.save(update_fields=['status'])
attach_discount_to_realm(user.realm, Decimal(25))
process_initial_upgrade(user, self.seat_count, True, CustomerPlan.ANNUAL, stripe_create_token().id)
self.assertEqual(6000 * self.seat_count,
[charge for charge in stripe.Charge.list(customer=stripe_customer_id)][0].amount)
stripe_invoice = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer_id)][0]
self.assertEqual([6000 * self.seat_count, -6000 * self.seat_count],
[item.amount for item in stripe_invoice.lines])
plan = CustomerPlan.objects.get(price_per_license=6000, discount=Decimal(25))
def test_get_discount_for_realm(self) -> None:
user = self.example_user('hamlet')
self.assertEqual(get_discount_for_realm(user.realm), None)
attach_discount_to_realm(user.realm, Decimal(85))
self.assertEqual(get_discount_for_realm(user.realm), 85)
@mock_stripe()
def test_replace_payment_source(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
self.upgrade()
# Create an open invoice
stripe_customer_id = Customer.objects.first().stripe_customer_id
stripe.InvoiceItem.create(amount=5000, currency='usd', customer=stripe_customer_id)
stripe_invoice = stripe.Invoice.create(customer=stripe_customer_id)
stripe.Invoice.finalize_invoice(stripe_invoice)
RealmAuditLog.objects.filter(event_type=RealmAuditLog.STRIPE_CARD_CHANGED).delete()
# Replace with an invalid card
stripe_token = stripe_create_token(card_number='4000000000009987').id
with patch("corporate.lib.stripe.billing_logger.error") as mock_billing_logger:
with patch("stripe.Invoice.list") as mock_invoice_list:
response = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps(stripe_token)})
mock_billing_logger.assert_called()
mock_invoice_list.assert_not_called()
self.assertEqual(ujson.loads(response.content)['error_description'], 'card error')
self.assert_json_error_contains(response, 'Your card was declined')
for stripe_source in stripe_get_customer(stripe_customer_id).sources:
assert isinstance(stripe_source, stripe.Card)
self.assertEqual(stripe_source.last4, '4242')
self.assertFalse(RealmAuditLog.objects.filter(event_type=RealmAuditLog.STRIPE_CARD_CHANGED).exists())
# Replace with a card that's valid, but charging the card fails
stripe_token = stripe_create_token(card_number='4000000000000341').id
with patch("corporate.lib.stripe.billing_logger.error") as mock_billing_logger:
response = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps(stripe_token)})
mock_billing_logger.assert_called()
self.assertEqual(ujson.loads(response.content)['error_description'], 'card error')
self.assert_json_error_contains(response, 'Your card was declined')
for stripe_source in stripe_get_customer(stripe_customer_id).sources:
assert isinstance(stripe_source, stripe.Card)
self.assertEqual(stripe_source.last4, '0341')
self.assertEqual(len(list(stripe.Invoice.list(customer=stripe_customer_id, status='open'))), 1)
self.assertEqual(1, RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STRIPE_CARD_CHANGED).count())
# Replace with a valid card
stripe_token = stripe_create_token(card_number='5555555555554444').id
response = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps(stripe_token)})
self.assert_json_success(response)
number_of_sources = 0
for stripe_source in stripe_get_customer(stripe_customer_id).sources:
assert isinstance(stripe_source, stripe.Card)
self.assertEqual(stripe_source.last4, '4444')
number_of_sources += 1
# Verify that we replaced the previous card, rather than adding a new one
self.assertEqual(number_of_sources, 1)
# Ideally we'd also test that we don't pay invoices with billing=='send_invoice'
for stripe_invoice in stripe.Invoice.list(customer=stripe_customer_id):
self.assertEqual(stripe_invoice.status, 'paid')
self.assertEqual(2, RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STRIPE_CARD_CHANGED).count())
@patch("corporate.lib.stripe.billing_logger.info")
def test_downgrade(self, mock_: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
response = self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE})
self.assert_json_success(response)
# Verify that we still write LicenseLedger rows during the remaining
# part of the cycle
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=20):
update_license_ledger_if_needed(user.realm, self.now)
self.assertEqual(LicenseLedger.objects.order_by('-id').values_list(
'licenses', 'licenses_at_next_renewal').first(), (20, 20))
# Verify that we invoice them for the additional users
from stripe import Invoice
Invoice.create = lambda **args: None # type: ignore[assignment] # cleaner than mocking
Invoice.finalize_invoice = lambda *args: None # type: ignore[assignment] # cleaner than mocking
with patch("stripe.InvoiceItem.create") as mocked:
invoice_plans_as_needed(self.next_month)
mocked.assert_called_once()
mocked.reset_mock()
# Check that we downgrade properly if the cycle is over
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=30):
update_license_ledger_if_needed(user.realm, self.next_year)
self.assertEqual(get_realm('zulip').plan_type, Realm.LIMITED)
self.assertEqual(CustomerPlan.objects.first().status, CustomerPlan.ENDED)
self.assertEqual(LicenseLedger.objects.order_by('-id').values_list(
'licenses', 'licenses_at_next_renewal').first(), (20, 20))
# Verify that we don't write LicenseLedger rows once we've downgraded
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=40):
update_license_ledger_if_needed(user.realm, self.next_year)
self.assertEqual(LicenseLedger.objects.order_by('-id').values_list(
'licenses', 'licenses_at_next_renewal').first(), (20, 20))
# Verify that we call invoice_plan once more after cycle end but
# don't invoice them for users added after the cycle end
self.assertIsNotNone(CustomerPlan.objects.first().next_invoice_date)
with patch("stripe.InvoiceItem.create") as mocked:
invoice_plans_as_needed(self.next_year + timedelta(days=32))
mocked.assert_not_called()
mocked.reset_mock()
# Check that we updated next_invoice_date in invoice_plan
self.assertIsNone(CustomerPlan.objects.first().next_invoice_date)
# Check that we don't call invoice_plan after that final call
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=50):
update_license_ledger_if_needed(user.realm, self.next_year + timedelta(days=80))
with patch("corporate.lib.stripe.invoice_plan") as mocked:
invoice_plans_as_needed(self.next_year + timedelta(days=400))
mocked.assert_not_called()
@mock_stripe()
@patch("corporate.lib.stripe.billing_logger.info")
def test_switch_from_monthly_plan_to_annual_plan_for_automatic_license_management(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade(schedule='monthly')
monthly_plan = get_current_plan_by_realm(user.realm)
assert(monthly_plan is not None)
self.assertEqual(monthly_plan.automanage_licenses, True)
self.assertEqual(monthly_plan.billing_schedule, CustomerPlan.MONTHLY)
response = self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.SWITCH_TO_ANNUAL_AT_END_OF_CYCLE})
self.assert_json_success(response)
monthly_plan.refresh_from_db()
self.assertEqual(monthly_plan.status, CustomerPlan.SWITCH_TO_ANNUAL_AT_END_OF_CYCLE)
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/")
self.assert_in_success_response(["be switched from monthly to annual billing on <strong>February 2, 2012"], response)
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=20):
update_license_ledger_if_needed(user.realm, self.now)
self.assertEqual(LicenseLedger.objects.filter(plan=monthly_plan).count(), 2)
self.assertEqual(LicenseLedger.objects.order_by('-id').values_list(
'licenses', 'licenses_at_next_renewal').first(), (20, 20))
with patch('corporate.lib.stripe.timezone_now', return_value=self.next_month):
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=25):
update_license_ledger_if_needed(user.realm, self.next_month)
self.assertEqual(LicenseLedger.objects.filter(plan=monthly_plan).count(), 2)
customer = get_customer_by_realm(user.realm)
assert(customer is not None)
self.assertEqual(CustomerPlan.objects.filter(customer=customer).count(), 2)
monthly_plan.refresh_from_db()
self.assertEqual(monthly_plan.status, CustomerPlan.ENDED)
self.assertEqual(monthly_plan.next_invoice_date, self.next_month)
annual_plan = get_current_plan_by_realm(user.realm)
assert(annual_plan is not None)
self.assertEqual(annual_plan.status, CustomerPlan.ACTIVE)
self.assertEqual(annual_plan.billing_schedule, CustomerPlan.ANNUAL)
self.assertEqual(annual_plan.invoicing_status, CustomerPlan.INITIAL_INVOICE_TO_BE_SENT)
self.assertEqual(annual_plan.billing_cycle_anchor, self.next_month)
self.assertEqual(annual_plan.next_invoice_date, self.next_month)
self.assertEqual(annual_plan.invoiced_through, None)
annual_ledger_entries = LicenseLedger.objects.filter(plan=annual_plan).order_by('id')
self.assertEqual(len(annual_ledger_entries), 2)
self.assertEqual(annual_ledger_entries[0].is_renewal, True)
self.assertEqual(annual_ledger_entries.values_list('licenses', 'licenses_at_next_renewal')[0], (20, 20))
self.assertEqual(annual_ledger_entries[1].is_renewal, False)
self.assertEqual(annual_ledger_entries.values_list('licenses', 'licenses_at_next_renewal')[1], (25, 25))
audit_log = RealmAuditLog.objects.get(event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN)
self.assertEqual(audit_log.realm, user.realm)
self.assertEqual(ujson.loads(audit_log.extra_data)["monthly_plan_id"], monthly_plan.id)
self.assertEqual(ujson.loads(audit_log.extra_data)["annual_plan_id"], annual_plan.id)
invoice_plans_as_needed(self.next_month)
annual_ledger_entries = LicenseLedger.objects.filter(plan=annual_plan).order_by('id')
self.assertEqual(len(annual_ledger_entries), 2)
annual_plan.refresh_from_db()
self.assertEqual(annual_plan.invoicing_status, CustomerPlan.DONE)
self.assertEqual(annual_plan.invoiced_through, annual_ledger_entries[1])
self.assertEqual(annual_plan.billing_cycle_anchor, self.next_month)
self.assertEqual(annual_plan.next_invoice_date, add_months(self.next_month, 1))
monthly_plan.refresh_from_db()
self.assertEqual(monthly_plan.next_invoice_date, None)
invoices = [invoice for invoice in stripe.Invoice.list(customer=customer.stripe_customer_id)]
self.assertEqual(len(invoices), 3)
annual_plan_invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(annual_plan_invoice_items), 2)
annual_plan_invoice_item_params = {
"amount": 5 * 80 * 100,
"description": "Additional license (Feb 2, 2012 - Feb 2, 2013)",
"plan": None, "quantity": 5, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(self.next_month),
"end": datetime_to_timestamp(add_months(self.next_month, 12))
},
}
for key, value in annual_plan_invoice_item_params.items():
self.assertEqual(annual_plan_invoice_items[0][key], value)
annual_plan_invoice_item_params = {
"amount": 20 * 80 * 100, "description": "Zulip Standard - renewal",
"plan": None, "quantity": 20, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(self.next_month),
"end": datetime_to_timestamp(add_months(self.next_month, 12))
},
}
for key, value in annual_plan_invoice_item_params.items():
self.assertEqual(annual_plan_invoice_items[1][key], value)
monthly_plan_invoice_items = [invoice_item for invoice_item in invoices[1].get("lines")]
self.assertEqual(len(monthly_plan_invoice_items), 1)
monthly_plan_invoice_item_params = {
"amount": 14 * 8 * 100,
"description": "Additional license (Jan 2, 2012 - Feb 2, 2012)",
"plan": None, "quantity": 14, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(self.now),
"end": datetime_to_timestamp(self.next_month)
},
}
for key, value in monthly_plan_invoice_item_params.items():
self.assertEqual(monthly_plan_invoice_items[0][key], value)
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=30):
update_license_ledger_if_needed(user.realm, add_months(self.next_month, 1))
invoice_plans_as_needed(add_months(self.next_month, 1))
invoices = [invoice for invoice in stripe.Invoice.list(customer=customer.stripe_customer_id)]
self.assertEqual(len(invoices), 4)
monthly_plan_invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(monthly_plan_invoice_items), 1)
monthly_plan_invoice_item_params = {
"amount": 5 * 7366,
"description": "Additional license (Mar 2, 2012 - Feb 2, 2013)",
"plan": None, "quantity": 5, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(add_months(self.next_month, 1)),
"end": datetime_to_timestamp(add_months(self.next_month, 12))
},
}
for key, value in monthly_plan_invoice_item_params.items():
self.assertEqual(monthly_plan_invoice_items[0][key], value)
invoice_plans_as_needed(add_months(self.now, 13))
invoices = [invoice for invoice in stripe.Invoice.list(customer=customer.stripe_customer_id)]
self.assertEqual(len(invoices), 5)
annual_plan_invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(annual_plan_invoice_items), 1)
annual_plan_invoice_item_params = {
"amount": 30 * 80 * 100,
"description": "Zulip Standard - renewal",
"plan": None, "quantity": 30, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(add_months(self.next_month, 12)),
"end": datetime_to_timestamp(add_months(self.next_month, 24))
},
}
for key, value in annual_plan_invoice_item_params.items():
self.assertEqual(annual_plan_invoice_items[0][key], value)
@mock_stripe()
@patch("corporate.lib.stripe.billing_logger.info")
def test_switch_from_monthly_plan_to_annual_plan_for_manual_license_management(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
num_licenses = 35
self.login_user(user)
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade(schedule='monthly', license_management='manual', licenses=num_licenses)
monthly_plan = get_current_plan_by_realm(user.realm)
assert(monthly_plan is not None)
self.assertEqual(monthly_plan.automanage_licenses, False)
self.assertEqual(monthly_plan.billing_schedule, CustomerPlan.MONTHLY)
response = self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.SWITCH_TO_ANNUAL_AT_END_OF_CYCLE})
self.assert_json_success(response)
monthly_plan.refresh_from_db()
self.assertEqual(monthly_plan.status, CustomerPlan.SWITCH_TO_ANNUAL_AT_END_OF_CYCLE)
with patch('corporate.views.timezone_now', return_value=self.now):
response = self.client_get("/billing/")
self.assert_in_success_response(["be switched from monthly to annual billing on <strong>February 2, 2012"], response)
invoice_plans_as_needed(self.next_month)
self.assertEqual(LicenseLedger.objects.filter(plan=monthly_plan).count(), 1)
customer = get_customer_by_realm(user.realm)
assert(customer is not None)
self.assertEqual(CustomerPlan.objects.filter(customer=customer).count(), 2)
monthly_plan.refresh_from_db()
self.assertEqual(monthly_plan.status, CustomerPlan.ENDED)
self.assertEqual(monthly_plan.next_invoice_date, None)
annual_plan = get_current_plan_by_realm(user.realm)
assert(annual_plan is not None)
self.assertEqual(annual_plan.status, CustomerPlan.ACTIVE)
self.assertEqual(annual_plan.billing_schedule, CustomerPlan.ANNUAL)
self.assertEqual(annual_plan.invoicing_status, CustomerPlan.INITIAL_INVOICE_TO_BE_SENT)
self.assertEqual(annual_plan.billing_cycle_anchor, self.next_month)
self.assertEqual(annual_plan.next_invoice_date, self.next_month)
annual_ledger_entries = LicenseLedger.objects.filter(plan=annual_plan).order_by('id')
self.assertEqual(len(annual_ledger_entries), 1)
self.assertEqual(annual_ledger_entries[0].is_renewal, True)
self.assertEqual(annual_ledger_entries.values_list('licenses', 'licenses_at_next_renewal')[0], (num_licenses, num_licenses))
self.assertEqual(annual_plan.invoiced_through, None)
# First call of invoice_plans_as_needed creates the new plan. Second call
# calls invoice_plan on the newly created plan.
invoice_plans_as_needed(self.next_month + timedelta(days=1))
annual_plan.refresh_from_db()
self.assertEqual(annual_plan.invoiced_through, annual_ledger_entries[0])
self.assertEqual(annual_plan.next_invoice_date, add_months(self.next_month, 12))
self.assertEqual(annual_plan.invoicing_status, CustomerPlan.DONE)
invoices = [invoice for invoice in stripe.Invoice.list(customer=customer.stripe_customer_id)]
self.assertEqual(len(invoices), 2)
annual_plan_invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(annual_plan_invoice_items), 1)
annual_plan_invoice_item_params = {
"amount": num_licenses * 80 * 100, "description": "Zulip Standard - renewal",
"plan": None, "quantity": num_licenses, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(self.next_month),
"end": datetime_to_timestamp(add_months(self.next_month, 12))
},
}
for key, value in annual_plan_invoice_item_params.items():
self.assertEqual(annual_plan_invoice_items[0][key], value)
with patch('corporate.lib.stripe.invoice_plan') as m:
invoice_plans_as_needed(add_months(self.now, 2))
m.assert_not_called()
invoice_plans_as_needed(add_months(self.now, 13))
invoices = [invoice for invoice in stripe.Invoice.list(customer=customer.stripe_customer_id)]
self.assertEqual(len(invoices), 3)
annual_plan_invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(annual_plan_invoice_items), 1)
annual_plan_invoice_item_params = {
"amount": num_licenses * 80 * 100,
"description": "Zulip Standard - renewal",
"plan": None, "quantity": num_licenses, "subscription": None, "discountable": False,
"period": {
"start": datetime_to_timestamp(add_months(self.next_month, 12)),
"end": datetime_to_timestamp(add_months(self.next_month, 24))
},
}
for key, value in annual_plan_invoice_item_params.items():
self.assertEqual(annual_plan_invoice_items[0][key], value)
@patch("corporate.lib.stripe.billing_logger.info")
def test_reupgrade_after_plan_status_changed_to_downgrade_at_end_of_cycle(self, mock_: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
response = self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE})
self.assert_json_success(response)
self.assertEqual(CustomerPlan.objects.first().status, CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE)
response = self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.ACTIVE})
self.assert_json_success(response)
self.assertEqual(CustomerPlan.objects.first().status, CustomerPlan.ACTIVE)
@patch("corporate.lib.stripe.billing_logger.info")
@patch("stripe.Invoice.create")
@patch("stripe.Invoice.finalize_invoice")
@patch("stripe.InvoiceItem.create")
def test_downgrade_during_invoicing(self, *mocks: Mock) -> None:
# The difference between this test and test_downgrade is that
# CustomerPlan.status is DOWNGRADE_AT_END_OF_CYCLE rather than ENDED
# when we call invoice_plans_as_needed
# This test is essentially checking that we call make_end_of_cycle_updates_if_needed
# during the invoicing process.
user = self.example_user("hamlet")
self.login_user(user)
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE})
plan = CustomerPlan.objects.first()
self.assertIsNotNone(plan.next_invoice_date)
self.assertEqual(plan.status, CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE)
invoice_plans_as_needed(self.next_year)
plan = CustomerPlan.objects.first()
self.assertIsNone(plan.next_invoice_date)
self.assertEqual(plan.status, CustomerPlan.ENDED)
@patch("corporate.lib.stripe.billing_logger.info")
def test_downgrade_free_trial(self, mock_: Mock) -> None:
user = self.example_user("hamlet")
free_trial_end_date = self.now + timedelta(days=60)
with self.settings(FREE_TRIAL_DAYS=60):
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
plan = CustomerPlan.objects.get()
self.assertEqual(plan.next_invoice_date, free_trial_end_date)
self.assertEqual(get_realm('zulip').plan_type, Realm.STANDARD)
self.assertEqual(plan.status, CustomerPlan.FREE_TRIAL)
# Add some extra users before the realm is deactivated
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=21):
update_license_ledger_if_needed(user.realm, self.now)
last_ledger_entry = LicenseLedger.objects.order_by('id').last()
self.assertEqual(last_ledger_entry.licenses, 21)
self.assertEqual(last_ledger_entry.licenses_at_next_renewal, 21)
self.login_user(user)
self.client_post("/json/billing/plan/change", {'status': CustomerPlan.ENDED})
plan.refresh_from_db()
self.assertEqual(get_realm('zulip').plan_type, Realm.LIMITED)
self.assertEqual(plan.status, CustomerPlan.ENDED)
self.assertEqual(plan.invoiced_through, last_ledger_entry)
self.assertIsNone(plan.next_invoice_date)
self.login_user(user)
response = self.client_get("/billing/")
self.assert_in_success_response(["Your organization is on the <b>Zulip Free</b>"], response)
# The extra users added in the final month are not charged
with patch("corporate.lib.stripe.invoice_plan") as mocked:
invoice_plans_as_needed(self.next_month)
mocked.assert_not_called()
# The plan is not renewed after an year
with patch("corporate.lib.stripe.invoice_plan") as mocked:
invoice_plans_as_needed(self.next_year)
mocked.assert_not_called()
@patch("corporate.lib.stripe.billing_logger.warning")
@patch("corporate.lib.stripe.billing_logger.info")
def test_reupgrade_by_billing_admin_after_downgrade(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.login_user(user)
self.client_post("/json/billing/plan/change",
{'status': CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE})
with self.assertRaises(BillingError) as context:
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.assertEqual(context.exception.description, "subscribing with existing subscription")
invoice_plans_as_needed(self.next_year)
response = self.client_get("/billing/")
self.assert_in_success_response(["Your organization is on the <b>Zulip Free</b>"], response)
with patch("corporate.lib.stripe.timezone_now", return_value=self.next_year):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.assertEqual(Customer.objects.count(), 1)
self.assertEqual(CustomerPlan.objects.count(), 2)
current_plan = CustomerPlan.objects.all().order_by("id").last()
next_invoice_date = add_months(self.next_year, 1)
self.assertEqual(current_plan.next_invoice_date, next_invoice_date)
self.assertEqual(get_realm('zulip').plan_type, Realm.STANDARD)
self.assertEqual(current_plan.status, CustomerPlan.ACTIVE)
old_plan = CustomerPlan.objects.all().order_by("id").first()
self.assertEqual(old_plan.next_invoice_date, None)
self.assertEqual(old_plan.status, CustomerPlan.ENDED)
@patch("corporate.lib.stripe.billing_logger.info")
def test_deactivate_realm(self, mock_: Mock) -> None:
user = self.example_user("hamlet")
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
plan = CustomerPlan.objects.get()
self.assertEqual(plan.next_invoice_date, self.next_month)
self.assertEqual(get_realm('zulip').plan_type, Realm.STANDARD)
self.assertEqual(plan.status, CustomerPlan.ACTIVE)
# Add some extra users before the realm is deactivated
with patch("corporate.lib.stripe.get_latest_seat_count", return_value=20):
update_license_ledger_if_needed(user.realm, self.now)
last_ledger_entry = LicenseLedger.objects.order_by('id').last()
self.assertEqual(last_ledger_entry.licenses, 20)
self.assertEqual(last_ledger_entry.licenses_at_next_renewal, 20)
do_deactivate_realm(get_realm("zulip"))
plan.refresh_from_db()
self.assertTrue(get_realm('zulip').deactivated)
self.assertEqual(get_realm('zulip').plan_type, Realm.LIMITED)
self.assertEqual(plan.status, CustomerPlan.ENDED)
self.assertEqual(plan.invoiced_through, last_ledger_entry)
self.assertIsNone(plan.next_invoice_date)
do_reactivate_realm(get_realm('zulip'))
self.login_user(user)
response = self.client_get("/billing/")
self.assert_in_success_response(["Your organization is on the <b>Zulip Free</b>"], response)
# The extra users added in the final month are not charged
with patch("corporate.lib.stripe.invoice_plan") as mocked:
invoice_plans_as_needed(self.next_month)
mocked.assert_not_called()
# The plan is not renewed after an year
with patch("corporate.lib.stripe.invoice_plan") as mocked:
invoice_plans_as_needed(self.next_year)
mocked.assert_not_called()
@patch("corporate.lib.stripe.billing_logger.info")
def test_reupgrade_by_billing_admin_after_realm_deactivation(self, mock_: Mock) -> None:
user = self.example_user("hamlet")
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
do_deactivate_realm(get_realm("zulip"))
self.assertTrue(get_realm('zulip').deactivated)
do_reactivate_realm(get_realm('zulip'))
self.login_user(user)
response = self.client_get("/billing/")
self.assert_in_success_response(["Your organization is on the <b>Zulip Free</b>"], response)
with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.assertEqual(Customer.objects.count(), 1)
self.assertEqual(CustomerPlan.objects.count(), 2)
current_plan = CustomerPlan.objects.all().order_by("id").last()
self.assertEqual(current_plan.next_invoice_date, self.next_month)
self.assertEqual(get_realm('zulip').plan_type, Realm.STANDARD)
self.assertEqual(current_plan.status, CustomerPlan.ACTIVE)
old_plan = CustomerPlan.objects.all().order_by("id").first()
self.assertEqual(old_plan.next_invoice_date, None)
self.assertEqual(old_plan.status, CustomerPlan.ENDED)
class RequiresBillingAccessTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
hamlet = self.example_user("hamlet")
hamlet.is_billing_admin = True
hamlet.save(update_fields=["is_billing_admin"])
desdemona = self.example_user('desdemona')
desdemona.role = UserProfile.ROLE_REALM_OWNER
desdemona.save(update_fields=["role"])
def test_who_can_access_json_endpoints(self) -> None:
# Billing admins have access
self.login_user(self.example_user('hamlet'))
with patch("corporate.views.do_replace_payment_source") as mocked1:
response = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps('token')})
self.assert_json_success(response)
mocked1.assert_called()
# Realm owners have access, even if they are not billing admins
self.login_user(self.example_user('desdemona'))
with patch("corporate.views.do_replace_payment_source") as mocked2:
response = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps('token')})
self.assert_json_success(response)
mocked2.assert_called()
def test_who_cant_access_json_endpoints(self) -> None:
def verify_user_cant_access_endpoint(username: str, endpoint: str, request_data: Dict[str, str], error_message: str) -> None:
self.login_user(self.example_user(username))
response = self.client_post(endpoint, request_data)
self.assert_json_error_contains(response, error_message)
verify_user_cant_access_endpoint("polonius", "/json/billing/upgrade",
{'billing_modality': ujson.dumps("charge_automatically"), 'schedule': ujson.dumps("annual"),
'signed_seat_count': ujson.dumps('signed count'), 'salt': ujson.dumps('salt')},
"Must be an organization member")
verify_user_cant_access_endpoint("polonius", "/json/billing/sponsorship",
{'organization-type': ujson.dumps("event"), 'description': ujson.dumps("event description"),
'website': ujson.dumps("example.com")},
"Must be an organization member")
for username in ["cordelia", "iago"]:
self.login_user(self.example_user(username))
verify_user_cant_access_endpoint(username, "/json/billing/sources/change", {'stripe_token': ujson.dumps('token')},
"Must be a billing administrator or an organization owner")
verify_user_cant_access_endpoint(username, "/json/billing/plan/change", {'status': ujson.dumps(1)},
"Must be a billing administrator or an organization owner")
# Make sure that we are testing all the JSON endpoints
# Quite a hack, but probably fine for now
string_with_all_endpoints = str(get_resolver('corporate.urls').reverse_dict)
json_endpoints = {word.strip("\"'()[],$") for word in string_with_all_endpoints.split()
if 'json/' in word}
self.assertEqual(len(json_endpoints), 4)
class BillingHelpersTest(ZulipTestCase):
def test_next_month(self) -> None:
anchor = datetime(2019, 12, 31, 1, 2, 3, tzinfo=timezone.utc)
period_boundaries = [
anchor,
datetime(2020, 1, 31, 1, 2, 3, tzinfo=timezone.utc),
# Test that this is the 28th even during leap years
datetime(2020, 2, 28, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 3, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 4, 30, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 5, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 6, 30, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 7, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 8, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 9, 30, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 10, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 11, 30, 1, 2, 3, tzinfo=timezone.utc),
datetime(2020, 12, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2021, 1, 31, 1, 2, 3, tzinfo=timezone.utc),
datetime(2021, 2, 28, 1, 2, 3, tzinfo=timezone.utc)]
with self.assertRaises(AssertionError):
add_months(anchor, -1)
# Explicitly test add_months for each value of MAX_DAY_FOR_MONTH and
# for crossing a year boundary
for i, boundary in enumerate(period_boundaries):
self.assertEqual(add_months(anchor, i), boundary)
# Test next_month for small values
for last, next_ in zip(period_boundaries[:-1], period_boundaries[1:]):
self.assertEqual(next_month(anchor, last), next_)
# Test next_month for large values
period_boundaries = [dt.replace(year=dt.year+100) for dt in period_boundaries]
for last, next_ in zip(period_boundaries[:-1], period_boundaries[1:]):
self.assertEqual(next_month(anchor, last), next_)
def test_compute_plan_parameters(self) -> None:
# TODO: test rounding down microseconds
anchor = datetime(2019, 12, 31, 1, 2, 3, tzinfo=timezone.utc)
month_later = datetime(2020, 1, 31, 1, 2, 3, tzinfo=timezone.utc)
year_later = datetime(2020, 12, 31, 1, 2, 3, tzinfo=timezone.utc)
test_cases = [
# test all possibilities, since there aren't that many
((True, CustomerPlan.ANNUAL, None), (anchor, month_later, year_later, 8000)),
((True, CustomerPlan.ANNUAL, 85), (anchor, month_later, year_later, 1200)),
((True, CustomerPlan.MONTHLY, None), (anchor, month_later, month_later, 800)),
((True, CustomerPlan.MONTHLY, 85), (anchor, month_later, month_later, 120)),
((False, CustomerPlan.ANNUAL, None), (anchor, year_later, year_later, 8000)),
((False, CustomerPlan.ANNUAL, 85), (anchor, year_later, year_later, 1200)),
((False, CustomerPlan.MONTHLY, None), (anchor, month_later, month_later, 800)),
((False, CustomerPlan.MONTHLY, 85), (anchor, month_later, month_later, 120)),
# test exact math of Decimals; 800 * (1 - 87.25) = 101.9999999..
((False, CustomerPlan.MONTHLY, 87.25), (anchor, month_later, month_later, 102)),
# test dropping of fractional cents; without the int it's 102.8
((False, CustomerPlan.MONTHLY, 87.15), (anchor, month_later, month_later, 102)),
]
with patch('corporate.lib.stripe.timezone_now', return_value=anchor):
for (automanage_licenses, discount, free_trial), output in test_cases:
output_ = compute_plan_parameters(
automanage_licenses,
discount,
None if free_trial is None else Decimal(free_trial),
)
self.assertEqual(output_, output)
def test_update_or_create_stripe_customer_logic(self) -> None:
user = self.example_user('hamlet')
# No existing Customer object
with patch('corporate.lib.stripe.do_create_stripe_customer', return_value='returned') as mocked1:
returned = update_or_create_stripe_customer(user, stripe_token='token')
mocked1.assert_called()
self.assertEqual(returned, 'returned')
customer = Customer.objects.create(realm=get_realm('zulip'))
# Customer exists but stripe_customer_id is None
with patch('corporate.lib.stripe.do_create_stripe_customer', return_value='returned') as mocked2:
returned = update_or_create_stripe_customer(user, stripe_token='token')
mocked2.assert_called()
self.assertEqual(returned, 'returned')
customer.stripe_customer_id = 'cus_12345'
customer.save()
# Customer exists, replace payment source
with patch('corporate.lib.stripe.do_replace_payment_source') as mocked3:
returned_customer = update_or_create_stripe_customer(self.example_user('hamlet'), 'token')
mocked3.assert_called()
self.assertEqual(returned_customer, customer)
# Customer exists, do nothing
with patch('corporate.lib.stripe.do_replace_payment_source') as mocked4:
returned_customer = update_or_create_stripe_customer(self.example_user('hamlet'), None)
mocked4.assert_not_called()
self.assertEqual(returned_customer, customer)
def test_get_customer_by_realm(self) -> None:
realm = get_realm('zulip')
self.assertEqual(get_customer_by_realm(realm), None)
customer = Customer.objects.create(realm=realm, stripe_customer_id='cus_12345')
self.assertEqual(get_customer_by_realm(realm), customer)
def test_get_current_plan_by_customer(self) -> None:
realm = get_realm("zulip")
customer = Customer.objects.create(realm=realm, stripe_customer_id='cus_12345')
self.assertEqual(get_current_plan_by_customer(customer), None)
plan = CustomerPlan.objects.create(customer=customer, status=CustomerPlan.ACTIVE,
billing_cycle_anchor=timezone_now(),
billing_schedule=CustomerPlan.ANNUAL,
tier=CustomerPlan.STANDARD)
self.assertEqual(get_current_plan_by_customer(customer), plan)
plan.status = CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE
plan.save(update_fields=["status"])
self.assertEqual(get_current_plan_by_customer(customer), plan)
plan.status = CustomerPlan.ENDED
plan.save(update_fields=["status"])
self.assertEqual(get_current_plan_by_customer(customer), None)
plan.status = CustomerPlan.NEVER_STARTED
plan.save(update_fields=["status"])
self.assertEqual(get_current_plan_by_customer(customer), None)
def test_get_current_plan_by_realm(self) -> None:
realm = get_realm("zulip")
self.assertEqual(get_current_plan_by_realm(realm), None)
customer = Customer.objects.create(realm=realm, stripe_customer_id='cus_12345')
self.assertEqual(get_current_plan_by_realm(realm), None)
plan = CustomerPlan.objects.create(customer=customer, status=CustomerPlan.ACTIVE,
billing_cycle_anchor=timezone_now(),
billing_schedule=CustomerPlan.ANNUAL,
tier=CustomerPlan.STANDARD)
self.assertEqual(get_current_plan_by_realm(realm), plan)
class LicenseLedgerTest(StripeTestCase):
def test_add_plan_renewal_if_needed(self) -> None:
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
self.assertEqual(LicenseLedger.objects.count(), 1)
plan = CustomerPlan.objects.get()
# Plan hasn't renewed yet
make_end_of_cycle_updates_if_needed(plan, self.next_year - timedelta(days=1))
self.assertEqual(LicenseLedger.objects.count(), 1)
# Plan needs to renew
# TODO: do_deactivate_user for a user, so that licenses_at_next_renewal != licenses
new_plan, ledger_entry = make_end_of_cycle_updates_if_needed(plan, self.next_year)
self.assertIsNone(new_plan)
self.assertEqual(LicenseLedger.objects.count(), 2)
ledger_params = {
'plan': plan, 'is_renewal': True, 'event_time': self.next_year,
'licenses': self.seat_count, 'licenses_at_next_renewal': self.seat_count}
for key, value in ledger_params.items():
self.assertEqual(getattr(ledger_entry, key), value)
# Plan needs to renew, but we already added the plan_renewal ledger entry
make_end_of_cycle_updates_if_needed(plan, self.next_year + timedelta(days=1))
self.assertEqual(LicenseLedger.objects.count(), 2)
def test_update_license_ledger_if_needed(self) -> None:
realm = get_realm('zulip')
# Test no Customer
update_license_ledger_if_needed(realm, self.now)
self.assertFalse(LicenseLedger.objects.exists())
# Test plan not automanaged
self.local_upgrade(self.seat_count + 1, False, CustomerPlan.ANNUAL, 'token')
self.assertEqual(LicenseLedger.objects.count(), 1)
update_license_ledger_if_needed(realm, self.now)
self.assertEqual(LicenseLedger.objects.count(), 1)
# Test no active plan
plan = CustomerPlan.objects.get()
plan.automanage_licenses = True
plan.status = CustomerPlan.ENDED
plan.save(update_fields=['automanage_licenses', 'status'])
update_license_ledger_if_needed(realm, self.now)
self.assertEqual(LicenseLedger.objects.count(), 1)
# Test update needed
plan.status = CustomerPlan.ACTIVE
plan.save(update_fields=['status'])
update_license_ledger_if_needed(realm, self.now)
self.assertEqual(LicenseLedger.objects.count(), 2)
def test_update_license_ledger_for_automanaged_plan(self) -> None:
realm = get_realm('zulip')
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
plan = CustomerPlan.objects.first()
# Simple increase
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=23):
update_license_ledger_for_automanaged_plan(realm, plan, self.now)
# Decrease
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=20):
update_license_ledger_for_automanaged_plan(realm, plan, self.now)
# Increase, but not past high watermark
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=21):
update_license_ledger_for_automanaged_plan(realm, plan, self.now)
# Increase, but after renewal date, and below last year's high watermark
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=22):
update_license_ledger_for_automanaged_plan(realm, plan, self.next_year + timedelta(seconds=1))
ledger_entries = list(LicenseLedger.objects.values_list(
'is_renewal', 'event_time', 'licenses', 'licenses_at_next_renewal').order_by('id'))
self.assertEqual(ledger_entries,
[(True, self.now, self.seat_count, self.seat_count),
(False, self.now, 23, 23),
(False, self.now, 23, 20),
(False, self.now, 23, 21),
(True, self.next_year, 21, 21),
(False, self.next_year + timedelta(seconds=1), 22, 22)])
def test_user_changes(self) -> None:
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
user = do_create_user('email', 'password', get_realm('zulip'), 'name')
do_deactivate_user(user)
do_reactivate_user(user)
# Not a proper use of do_activate_user, but fine for this test
do_activate_user(user)
ledger_entries = list(LicenseLedger.objects.values_list(
'is_renewal', 'licenses', 'licenses_at_next_renewal').order_by('id'))
self.assertEqual(ledger_entries,
[(True, self.seat_count, self.seat_count),
(False, self.seat_count + 1, self.seat_count + 1),
(False, self.seat_count + 1, self.seat_count),
(False, self.seat_count + 1, self.seat_count + 1),
(False, self.seat_count + 1, self.seat_count + 1)])
class InvoiceTest(StripeTestCase):
def test_invoicing_status_is_started(self) -> None:
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
plan = CustomerPlan.objects.first()
plan.invoicing_status = CustomerPlan.STARTED
plan.save(update_fields=['invoicing_status'])
with self.assertRaises(NotImplementedError):
invoice_plan(CustomerPlan.objects.first(), self.now)
@mock_stripe()
def test_invoice_plan(self, *mocks: Mock) -> None:
user = self.example_user("hamlet")
self.login_user(user)
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade()
# Increase
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=self.seat_count + 3):
update_license_ledger_if_needed(get_realm('zulip'), self.now + timedelta(days=100))
# Decrease
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=self.seat_count):
update_license_ledger_if_needed(get_realm('zulip'), self.now + timedelta(days=200))
# Increase, but not past high watermark
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=self.seat_count + 1):
update_license_ledger_if_needed(get_realm('zulip'), self.now + timedelta(days=300))
# Increase, but after renewal date, and below last year's high watermark
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=self.seat_count + 2):
update_license_ledger_if_needed(get_realm('zulip'), self.now + timedelta(days=400))
# Increase, but after event_time
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=self.seat_count + 3):
update_license_ledger_if_needed(get_realm('zulip'), self.now + timedelta(days=500))
plan = CustomerPlan.objects.first()
invoice_plan(plan, self.now + timedelta(days=400))
stripe_invoices = [invoice for invoice in stripe.Invoice.list(
customer=plan.customer.stripe_customer_id)]
self.assertEqual(len(stripe_invoices), 2)
self.assertIsNotNone(stripe_invoices[0].status_transitions.finalized_at)
stripe_line_items = [item for item in stripe_invoices[0].lines]
self.assertEqual(len(stripe_line_items), 3)
line_item_params = {
'amount': int(8000 * (1 - ((400-366) / 365)) + .5),
'description': 'Additional license (Feb 5, 2013 - Jan 2, 2014)',
'discountable': False,
'period': {
'start': datetime_to_timestamp(self.now + timedelta(days=400)),
'end': datetime_to_timestamp(self.now + timedelta(days=2*365 + 1))},
'quantity': 1}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[0].get(key), value)
line_item_params = {
'amount': 8000 * (self.seat_count + 1),
'description': 'Zulip Standard - renewal',
'discountable': False,
'period': {
'start': datetime_to_timestamp(self.now + timedelta(days=366)),
'end': datetime_to_timestamp(self.now + timedelta(days=2*365 + 1))},
'quantity': (self.seat_count + 1)}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[1].get(key), value)
line_item_params = {
'amount': 3 * int(8000 * (366-100) / 366 + .5),
'description': 'Additional license (Apr 11, 2012 - Jan 2, 2013)',
'discountable': False,
'period': {
'start': datetime_to_timestamp(self.now + timedelta(days=100)),
'end': datetime_to_timestamp(self.now + timedelta(days=366))},
'quantity': 3}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[2].get(key), value)
@mock_stripe()
def test_fixed_price_plans(self, *mocks: Mock) -> None:
# Also tests charge_automatically=False
user = self.example_user("hamlet")
self.login_user(user)
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.upgrade(invoice=True)
plan = CustomerPlan.objects.first()
plan.fixed_price = 100
plan.price_per_license = 0
plan.save(update_fields=['fixed_price', 'price_per_license'])
invoice_plan(plan, self.next_year)
stripe_invoices = [invoice for invoice in stripe.Invoice.list(
customer=plan.customer.stripe_customer_id)]
self.assertEqual(len(stripe_invoices), 2)
self.assertEqual(stripe_invoices[0].billing, 'send_invoice')
stripe_line_items = [item for item in stripe_invoices[0].lines]
self.assertEqual(len(stripe_line_items), 1)
line_item_params = {
'amount': 100,
'description': 'Zulip Standard - renewal',
'discountable': False,
'period': {
'start': datetime_to_timestamp(self.next_year),
'end': datetime_to_timestamp(self.next_year + timedelta(days=365))},
'quantity': 1}
for key, value in line_item_params.items():
self.assertEqual(stripe_line_items[0].get(key), value)
def test_no_invoice_needed(self) -> None:
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
plan = CustomerPlan.objects.first()
self.assertEqual(plan.next_invoice_date, self.next_month)
# Test this doesn't make any calls to stripe.Invoice or stripe.InvoiceItem
invoice_plan(plan, self.next_month)
plan = CustomerPlan.objects.first()
# Test that we still update next_invoice_date
self.assertEqual(plan.next_invoice_date, self.next_month + timedelta(days=29))
def test_invoice_plans_as_needed(self) -> None:
with patch('corporate.lib.stripe.timezone_now', return_value=self.now):
self.local_upgrade(self.seat_count, True, CustomerPlan.ANNUAL, 'token')
plan = CustomerPlan.objects.first()
self.assertEqual(plan.next_invoice_date, self.next_month)
# Test nothing needed to be done
with patch('corporate.lib.stripe.invoice_plan') as mocked:
invoice_plans_as_needed(self.next_month - timedelta(days=1))
mocked.assert_not_called()
# Test something needing to be done
invoice_plans_as_needed(self.next_month)
plan = CustomerPlan.objects.first()
self.assertEqual(plan.next_invoice_date, self.next_month + timedelta(days=29))
| 54.214669
| 134
| 0.661659
|
175f008b2c845563bfee0de9b01f1379b9753ee1
| 17,501
|
py
|
Python
|
tempest/api/compute/test_authorization.py
|
ePlusPS/tempest
|
c32dac02bf237468ecda1f093333747bc9c992b1
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/compute/test_authorization.py
|
ePlusPS/tempest
|
c32dac02bf237468ecda1f093333747bc9c992b1
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/compute/test_authorization.py
|
ePlusPS/tempest
|
c32dac02bf237468ecda1f093333747bc9c992b1
|
[
"Apache-2.0"
] | 1
|
2018-10-09T06:32:04.000Z
|
2018-10-09T06:32:04.000Z
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
from tempest.api.compute import base
from tempest import clients
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class AuthorizationTestJSON(base.BaseV2ComputeTest):
@classmethod
def resource_setup(cls):
if not CONF.service_available.glance:
raise cls.skipException('Glance is not available.')
# No network resources required for this test
cls.set_network_resources()
super(AuthorizationTestJSON, cls).resource_setup()
if not cls.multi_user:
msg = "Need >1 user"
raise cls.skipException(msg)
cls.client = cls.os.servers_client
cls.images_client = cls.os.images_client
cls.glance_client = cls.os.image_client
cls.keypairs_client = cls.os.keypairs_client
cls.security_client = cls.os.security_groups_client
creds = cls.isolated_creds.get_alt_creds()
cls.alt_manager = clients.Manager(credentials=creds)
cls.alt_client = cls.alt_manager.servers_client
cls.alt_images_client = cls.alt_manager.images_client
cls.alt_keypairs_client = cls.alt_manager.keypairs_client
cls.alt_security_client = cls.alt_manager.security_groups_client
resp, server = cls.create_test_server(wait_until='ACTIVE')
resp, cls.server = cls.client.get_server(server['id'])
name = data_utils.rand_name('image')
resp, body = cls.glance_client.create_image(name=name,
container_format='bare',
disk_format='raw',
is_public=False)
image_id = body['id']
image_file = StringIO.StringIO(('*' * 1024))
resp, body = cls.glance_client.update_image(image_id, data=image_file)
cls.glance_client.wait_for_image_status(image_id, 'active')
resp, cls.image = cls.images_client.get_image(image_id)
cls.keypairname = data_utils.rand_name('keypair')
resp, keypair = \
cls.keypairs_client.create_keypair(cls.keypairname)
name = data_utils.rand_name('security')
description = data_utils.rand_name('description')
resp, cls.security_group = cls.security_client.create_security_group(
name, description)
parent_group_id = cls.security_group['id']
ip_protocol = 'tcp'
from_port = 22
to_port = 22
resp, cls.rule = cls.security_client.create_security_group_rule(
parent_group_id, ip_protocol, from_port, to_port)
@classmethod
def resource_cleanup(cls):
if cls.multi_user:
cls.images_client.delete_image(cls.image['id'])
cls.keypairs_client.delete_keypair(cls.keypairname)
cls.security_client.delete_security_group(cls.security_group['id'])
super(AuthorizationTestJSON, cls).resource_cleanup()
@test.attr(type='gate')
def test_get_server_for_alt_account_fails(self):
# A GET request for a server on another user's account should fail
self.assertRaises(exceptions.NotFound, self.alt_client.get_server,
self.server['id'])
@test.attr(type='gate')
def test_delete_server_for_alt_account_fails(self):
# A DELETE request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.delete_server,
self.server['id'])
@test.attr(type='gate')
def test_update_server_for_alt_account_fails(self):
# An update server request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.update_server,
self.server['id'], name='test')
@test.attr(type='gate')
def test_list_server_addresses_for_alt_account_fails(self):
# A list addresses request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.list_addresses,
self.server['id'])
@test.attr(type='gate')
def test_list_server_addresses_by_network_for_alt_account_fails(self):
# A list address/network request for another user's server should fail
server_id = self.server['id']
self.assertRaises(exceptions.NotFound,
self.alt_client.list_addresses_by_network, server_id,
'public')
@test.attr(type='gate')
def test_list_servers_with_alternate_tenant(self):
# A list on servers from one tenant should not
# show on alternate tenant
# Listing servers from alternate tenant
alt_server_ids = []
resp, body = self.alt_client.list_servers()
alt_server_ids = [s['id'] for s in body['servers']]
self.assertNotIn(self.server['id'], alt_server_ids)
@test.attr(type='gate')
def test_change_password_for_alt_account_fails(self):
# A change password request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.change_password,
self.server['id'], 'newpass')
@test.attr(type='gate')
def test_reboot_server_for_alt_account_fails(self):
# A reboot request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.reboot,
self.server['id'], 'HARD')
@test.attr(type='gate')
def test_rebuild_server_for_alt_account_fails(self):
# A rebuild request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.rebuild,
self.server['id'], self.image_ref_alt)
@test.attr(type='gate')
def test_resize_server_for_alt_account_fails(self):
# A resize request for another user's server should fail
self.assertRaises(exceptions.NotFound, self.alt_client.resize,
self.server['id'], self.flavor_ref_alt)
@test.attr(type='gate')
def test_create_image_for_alt_account_fails(self):
# A create image request for another user's server should fail
self.assertRaises(exceptions.NotFound,
self.alt_images_client.create_image,
self.server['id'], 'testImage')
@test.attr(type='gate')
def test_create_server_with_unauthorized_image(self):
# Server creation with another user's image should fail
self.assertRaises(exceptions.BadRequest, self.alt_client.create_server,
'test', self.image['id'], self.flavor_ref)
@test.attr(type='gate')
def test_create_server_fails_when_tenant_incorrect(self):
# A create server request should fail if the tenant id does not match
# the current user
# Change the base URL to impersonate another user
self.alt_client.auth_provider.set_alt_auth_data(
request_part='url',
auth_data=self.client.auth_provider.auth_data
)
self.assertRaises(exceptions.BadRequest,
self.alt_client.create_server, 'test',
self.image['id'], self.flavor_ref)
@test.attr(type='gate')
def test_create_keypair_in_analt_user_tenant(self):
# A create keypair request should fail if the tenant id does not match
# the current user
# POST keypair with other user tenant
k_name = data_utils.rand_name('keypair-')
try:
# Change the base URL to impersonate another user
self.alt_keypairs_client.auth_provider.set_alt_auth_data(
request_part='url',
auth_data=self.keypairs_client.auth_provider.auth_data
)
resp = {}
resp['status'] = None
self.assertRaises(exceptions.BadRequest,
self.alt_keypairs_client.create_keypair, k_name)
finally:
# Next request the base_url is back to normal
if (resp['status'] is not None):
resp, _ = self.alt_keypairs_client.delete_keypair(k_name)
LOG.error("Create keypair request should not happen "
"if the tenant id does not match the current user")
@test.attr(type='gate')
def test_get_keypair_of_alt_account_fails(self):
# A GET request for another user's keypair should fail
self.assertRaises(exceptions.NotFound,
self.alt_keypairs_client.get_keypair,
self.keypairname)
@test.attr(type='gate')
def test_delete_keypair_of_alt_account_fails(self):
# A DELETE request for another user's keypair should fail
self.assertRaises(exceptions.NotFound,
self.alt_keypairs_client.delete_keypair,
self.keypairname)
@test.attr(type='gate')
def test_get_image_for_alt_account_fails(self):
# A GET request for an image on another user's account should fail
self.assertRaises(exceptions.NotFound,
self.alt_images_client.get_image, self.image['id'])
@test.attr(type='gate')
def test_delete_image_for_alt_account_fails(self):
# A DELETE request for another user's image should fail
self.assertRaises(exceptions.NotFound,
self.alt_images_client.delete_image,
self.image['id'])
@test.attr(type='gate')
def test_create_security_group_in_analt_user_tenant(self):
# A create security group request should fail if the tenant id does not
# match the current user
# POST security group with other user tenant
s_name = data_utils.rand_name('security-')
s_description = data_utils.rand_name('security')
try:
# Change the base URL to impersonate another user
self.alt_security_client.auth_provider.set_alt_auth_data(
request_part='url',
auth_data=self.security_client.auth_provider.auth_data
)
resp = {}
resp['status'] = None
self.assertRaises(exceptions.BadRequest,
self.alt_security_client.create_security_group,
s_name, s_description)
finally:
# Next request the base_url is back to normal
if resp['status'] is not None:
self.alt_security_client.delete_security_group(resp['id'])
LOG.error("Create Security Group request should not happen if"
"the tenant id does not match the current user")
@test.attr(type='gate')
def test_get_security_group_of_alt_account_fails(self):
# A GET request for another user's security group should fail
self.assertRaises(exceptions.NotFound,
self.alt_security_client.get_security_group,
self.security_group['id'])
@test.attr(type='gate')
def test_delete_security_group_of_alt_account_fails(self):
# A DELETE request for another user's security group should fail
self.assertRaises(exceptions.NotFound,
self.alt_security_client.delete_security_group,
self.security_group['id'])
@test.attr(type='gate')
def test_create_security_group_rule_in_analt_user_tenant(self):
# A create security group rule request should fail if the tenant id
# does not match the current user
# POST security group rule with other user tenant
parent_group_id = self.security_group['id']
ip_protocol = 'icmp'
from_port = -1
to_port = -1
try:
# Change the base URL to impersonate another user
self.alt_security_client.auth_provider.set_alt_auth_data(
request_part='url',
auth_data=self.security_client.auth_provider.auth_data
)
resp = {}
resp['status'] = None
self.assertRaises(exceptions.BadRequest,
self.alt_security_client.
create_security_group_rule,
parent_group_id, ip_protocol, from_port,
to_port)
finally:
# Next request the base_url is back to normal
if resp['status'] is not None:
self.alt_security_client.delete_security_group_rule(resp['id'])
LOG.error("Create security group rule request should not "
"happen if the tenant id does not match the"
" current user")
@test.attr(type='gate')
def test_delete_security_group_rule_of_alt_account_fails(self):
# A DELETE request for another user's security group rule
# should fail
self.assertRaises(exceptions.NotFound,
self.alt_security_client.delete_security_group_rule,
self.rule['id'])
@test.attr(type='gate')
def test_set_metadata_of_alt_account_server_fails(self):
# A set metadata for another user's server should fail
req_metadata = {'meta1': 'data1', 'meta2': 'data2'}
self.assertRaises(exceptions.NotFound,
self.alt_client.set_server_metadata,
self.server['id'],
req_metadata)
@test.attr(type='gate')
def test_set_metadata_of_alt_account_image_fails(self):
# A set metadata for another user's image should fail
req_metadata = {'meta1': 'value1', 'meta2': 'value2'}
self.assertRaises(exceptions.NotFound,
self.alt_images_client.set_image_metadata,
self.image['id'], req_metadata)
@test.attr(type='gate')
def test_get_metadata_of_alt_account_server_fails(self):
# A get metadata for another user's server should fail
req_metadata = {'meta1': 'data1'}
self.client.set_server_metadata(self.server['id'], req_metadata)
self.addCleanup(self.client.delete_server_metadata_item,
self.server['id'], 'meta1')
self.assertRaises(exceptions.NotFound,
self.alt_client.get_server_metadata_item,
self.server['id'], 'meta1')
@test.attr(type='gate')
def test_get_metadata_of_alt_account_image_fails(self):
# A get metadata for another user's image should fail
req_metadata = {'meta1': 'value1'}
self.addCleanup(self.images_client.delete_image_metadata_item,
self.image['id'], 'meta1')
self.images_client.set_image_metadata(self.image['id'],
req_metadata)
self.assertRaises(exceptions.NotFound,
self.alt_images_client.get_image_metadata_item,
self.image['id'], 'meta1')
@test.attr(type='gate')
def test_delete_metadata_of_alt_account_server_fails(self):
# A delete metadata for another user's server should fail
req_metadata = {'meta1': 'data1'}
self.addCleanup(self.client.delete_server_metadata_item,
self.server['id'], 'meta1')
self.client.set_server_metadata(self.server['id'], req_metadata)
self.assertRaises(exceptions.NotFound,
self.alt_client.delete_server_metadata_item,
self.server['id'], 'meta1')
@test.attr(type='gate')
def test_delete_metadata_of_alt_account_image_fails(self):
# A delete metadata for another user's image should fail
req_metadata = {'meta1': 'data1'}
self.addCleanup(self.images_client.delete_image_metadata_item,
self.image['id'], 'meta1')
self.images_client.set_image_metadata(self.image['id'],
req_metadata)
self.assertRaises(exceptions.NotFound,
self.alt_images_client.delete_image_metadata_item,
self.image['id'], 'meta1')
@test.attr(type='gate')
def test_get_console_output_of_alt_account_server_fails(self):
# A Get Console Output for another user's server should fail
self.assertRaises(exceptions.NotFound,
self.alt_client.get_console_output,
self.server['id'], 10)
class AuthorizationTestXML(AuthorizationTestJSON):
_interface = 'xml'
| 45.339378
| 79
| 0.632764
|
21d9ee208648e2764d82c2416cf0538d1b4f472b
| 2,435
|
py
|
Python
|
docs/source/conf.py
|
rxavier/poniard
|
35f9e9cc1adbb58d011deb11e12a2581a647d0ff
|
[
"MIT"
] | null | null | null |
docs/source/conf.py
|
rxavier/poniard
|
35f9e9cc1adbb58d011deb11e12a2581a647d0ff
|
[
"MIT"
] | null | null | null |
docs/source/conf.py
|
rxavier/poniard
|
35f9e9cc1adbb58d011deb11e12a2581a647d0ff
|
[
"MIT"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import sphinx_bootstrap_theme
import sphinx_autodoc_typehints
from shutil import copyfile
sys.path.insert(0, os.path.abspath("../../"))
from poniard._version import __version__
copyfile("../../README.md", "README.md")
# -- Project information -----------------------------------------------------
project = "Poniard"
copyright = "2022, Rafael Xavier"
author = "Rafael Xavier"
# The full version, including alpha/beta/rc tags
release = __version__
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
"recommonmark",
"sphinx_toolbox.more_autodoc.typehints",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "bootstrap"
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
html_logo = "_static/logo.png"
html_theme_options = {
"navbar_title": "Poniard",
"navbar_site_name": "Sections",
"bootswatch_theme": "lumen",
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
| 32.039474
| 79
| 0.682957
|
140b42f1f05b042b023d1c9e3dd121a6f00bcb5b
| 2,076
|
py
|
Python
|
scons-local-0.96.90/SCons/Tool/386asm.py
|
luaman/twilight
|
b4853f3e0c828a5aaff23e44c17d70410f7781c1
|
[
"MIT"
] | 3
|
2018-01-14T10:43:27.000Z
|
2020-03-02T08:38:23.000Z
|
scons-local-0.96.90/SCons/Tool/386asm.py
|
luaman/twilight
|
b4853f3e0c828a5aaff23e44c17d70410f7781c1
|
[
"MIT"
] | null | null | null |
scons-local-0.96.90/SCons/Tool/386asm.py
|
luaman/twilight
|
b4853f3e0c828a5aaff23e44c17d70410f7781c1
|
[
"MIT"
] | null | null | null |
"""SCons.Tool.386asm
Tool specification for the 386ASM assembler for the Phar Lap ETS embedded
operating system.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "/home/scons/scons/branch.0/branch.96/baseline/src/engine/SCons/Tool/386asm.py 0.96.90.D001 2005/02/15 20:11:37 knight"
from SCons.Tool.PharLapCommon import addPharLapPaths
import SCons.Util
import as
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
as.generate(env)
env['AS'] = '386asm'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS $SOURCES -o $TARGET'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $SOURCES -o $TARGET'
addPharLapPaths(env)
def exists(env):
return env.Detect('386asm')
| 37.071429
| 134
| 0.74422
|
333653eae28a988432cd2df3998d821003564ec4
| 662
|
py
|
Python
|
201802Socket/server.py
|
richard-ma/weekendProject
|
43379bc8a2854132f52e5fc007ef7c721b0e7f4e
|
[
"MIT"
] | null | null | null |
201802Socket/server.py
|
richard-ma/weekendProject
|
43379bc8a2854132f52e5fc007ef7c721b0e7f4e
|
[
"MIT"
] | null | null | null |
201802Socket/server.py
|
richard-ma/weekendProject
|
43379bc8a2854132f52e5fc007ef7c721b0e7f4e
|
[
"MIT"
] | null | null | null |
from socket import *
from fib import fib
from threading import Thread
def fibServer(address):
sock = socket(AF_INET, SOCK_STREAM)
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
sock.bind(address)
sock.listen(5)
while True:
client, addr = sock.accept()
print("Connection", addr)
Thread(target=fib_handler, args=(client,), daemon=True).start()
def fib_handler(client):
while True:
req = client.recv(100)
if not req:
break
n = int(req)
result = fib(n)
resp = str(result).encode('ascii') + b'\n'
client.send(resp)
print("Closed")
fibServer(('', 25000))
| 24.518519
| 71
| 0.608761
|
fad5d21fb16ef0bf2f1b00de9dd7cae4c2858029
| 1,902
|
py
|
Python
|
python/tensorflow_sdk.py
|
lanking520/djlsdk
|
03b9020fbef35740aca0b41628e5675d484a2fb7
|
[
"Apache-2.0"
] | null | null | null |
python/tensorflow_sdk.py
|
lanking520/djlsdk
|
03b9020fbef35740aca0b41628e5675d484a2fb7
|
[
"Apache-2.0"
] | null | null | null |
python/tensorflow_sdk.py
|
lanking520/djlsdk
|
03b9020fbef35740aca0b41628e5675d484a2fb7
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
from typing import Dict
import tensorflow as tf
from base import get_djl_template, zip_dir
def export_tensorflow(model: tf.Module, sample_input: Dict[str, tf.Tensor]):
directory = "build"
if os.path.exists(directory) and os.path.isdir(directory):
shutil.rmtree(directory)
os.mkdir(directory)
model_path = os.path.join(directory, "tfmodel")
# Create and stored the model
tf.saved_model.save(model, model_path + "/")
zip_dir(model_path, model_path)
# verify
saved_model = tf.saved_model.load(model_path + "/")
print("Current supported signature: " + str(list(saved_model.signatures.keys())))
print("Try to apply serving_default...")
infer = saved_model.signatures["serving_default"]
input_names = []
for element in infer.structured_input_signature:
if isinstance(element, Dict):
input_names += list(element.keys())
print("Input names: " + str(input_names))
infer(*sample_input.values())
# Start creating template
numpy_dict = {}
for name, tensor in sample_input.items():
numpy_dict[name] = tensor_to_numpy(tensor)
dest = get_djl_template("tfmodel.zip", numpy_dict, add_deps())
shutil.copy(model_path + ".zip", dest)
def tensor_to_numpy(tensor: tf.Tensor):
return tensor.numpy()
def add_deps():
deps = ["dependencies {",
" runtimeOnly \"org.apache.logging.log4j:log4j-slf4j-impl:2.12.1\"",
" implementation \"ai.djl:api:0.10.0\"",
" runtimeOnly \"ai.djl.tensorflow:tensorflow-model-zoo:0.10.0\"",
" runtimeOnly \"ai.djl.tensorflow:tensorflow-native-auto:2.3.1\"", "}"]
return "\n".join(deps)
if __name__ == '__main__':
tensors = [tf.ones((1, 3, 224)), tf.ones((1, 3, 2))]
nd = [tensor_to_numpy(x) for x in tensors]
get_djl_template("model/tensorflow.zip", nd, add_deps())
| 35.222222
| 85
| 0.662986
|
8431116d6b24e93ae39c2bfc4a9449533f43be2e
| 2,606
|
py
|
Python
|
data_structures/heap/heap.py
|
Shuai-Xie/Python
|
b9d28fd5a00719e002de37f592406d89ff037082
|
[
"MIT"
] | 1
|
2019-12-14T03:08:29.000Z
|
2019-12-14T03:08:29.000Z
|
data_structures/heap/heap.py
|
caofanCPU/Python
|
57d290918fd3f6af1e5c0940aed95642f931d32e
|
[
"MIT"
] | null | null | null |
data_structures/heap/heap.py
|
caofanCPU/Python
|
57d290918fd3f6af1e5c0940aed95642f931d32e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
from __future__ import print_function, division
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
# This heap class start from here.
class Heap:
def __init__(self): # Default constructor of heap class.
self.h = []
self.currsize = 0
def leftChild(self, i):
if 2 * i + 1 < self.currsize:
return 2 * i + 1
return None
def rightChild(self, i):
if 2 * i + 2 < self.currsize:
return 2 * i + 2
return None
def maxHeapify(self, node):
if node < self.currsize:
m = node
lc = self.leftChild(node)
rc = self.rightChild(node)
if lc is not None and self.h[lc] > self.h[m]:
m = lc
if rc is not None and self.h[rc] > self.h[m]:
m = rc
if m != node:
temp = self.h[node]
self.h[node] = self.h[m]
self.h[m] = temp
self.maxHeapify(m)
def buildHeap(self, a): # This function is used to build the heap from the data container 'a'.
self.currsize = len(a)
self.h = list(a)
for i in range(self.currsize // 2, -1, -1):
self.maxHeapify(i)
def getMax(self): # This function is used to get maximum value from the heap.
if self.currsize >= 1:
me = self.h[0]
temp = self.h[0]
self.h[0] = self.h[self.currsize - 1]
self.h[self.currsize - 1] = temp
self.currsize -= 1
self.maxHeapify(0)
return me
return None
def heapSort(self): # This function is used to sort the heap.
size = self.currsize
while self.currsize - 1 >= 0:
temp = self.h[0]
self.h[0] = self.h[self.currsize - 1]
self.h[self.currsize - 1] = temp
self.currsize -= 1
self.maxHeapify(0)
self.currsize = size
def insert(self, data): # This function is used to insert data in the heap.
self.h.append(data)
curr = self.currsize
self.currsize += 1
while self.h[curr] > self.h[curr / 2]:
temp = self.h[curr / 2]
self.h[curr / 2] = self.h[curr]
self.h[curr] = temp
curr = curr / 2
def display(self): # This function is used to print the heap.
print(self.h)
def main():
l = list(map(int, raw_input().split()))
h = Heap()
h.buildHeap(l)
h.heapSort()
h.display()
if __name__ == '__main__':
main()
| 28.021505
| 99
| 0.515733
|
6fd101b24a2a78b8359954b6f69d7515302f1ffe
| 384
|
py
|
Python
|
main.py
|
Nakxxgit/riss_crawler
|
38c259387c843fda7a9c22974bc1b87ccab9edc8
|
[
"MIT"
] | null | null | null |
main.py
|
Nakxxgit/riss_crawler
|
38c259387c843fda7a9c22974bc1b87ccab9edc8
|
[
"MIT"
] | null | null | null |
main.py
|
Nakxxgit/riss_crawler
|
38c259387c843fda7a9c22974bc1b87ccab9edc8
|
[
"MIT"
] | null | null | null |
import journal_csv
import book_csv
import degree_csv
import etc_csv
import pandas as pd
df1 = journal_csv.convert_to_csv()
df2 = book_csv.convert_to_csv()
df3 = degree_csv.convert_to_csv()
df4 = etc_csv.convert_to_csv()
df = pd.concat([df1, df2, df3, df4])
cols = ['제목', '저자', '학술지명', '권호사항', '발행처', '자료유형', '수록면', '발행년도', 'KDC', '초록']
df = df[cols]
df.to_csv('output.csv', sep=',')
| 25.6
| 78
| 0.690104
|
45797680c7a890142c0ffe6643a8a445d3315d51
| 726
|
py
|
Python
|
graphgallery/functional/sparse/to_edge.py
|
dongzizhu/GraphGallery
|
c65eab42daeb52de5019609fe7b368e30863b4ae
|
[
"MIT"
] | 1
|
2020-07-29T08:00:32.000Z
|
2020-07-29T08:00:32.000Z
|
graphgallery/functional/sparse/to_edge.py
|
dongzizhu/GraphGallery
|
c65eab42daeb52de5019609fe7b368e30863b4ae
|
[
"MIT"
] | null | null | null |
graphgallery/functional/sparse/to_edge.py
|
dongzizhu/GraphGallery
|
c65eab42daeb52de5019609fe7b368e30863b4ae
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.sparse as sp
from ..transform import SparseTransform
from ..transform import Transform
from ..decorators import multiple
__all__ = ['SparseAdjToEdge', 'sparse_adj_to_edge']
@Transform.register()
class SparseAdjToEdge(SparseTransform):
def __call__(self, adj_matrix: sp.csr_matrix):
return sparse_adj_to_edge(adj_matrix)
@multiple()
def sparse_adj_to_edge(adj_matrix: sp.csr_matrix):
"""Convert a Scipy sparse matrix to (edge_index, edge_weight) representation"""
adj_matrix = adj_matrix.tocoo(copy=False)
edge_index = np.asarray((adj_matrix.row, adj_matrix.col))
edge_weight = adj_matrix.data.copy()
return edge_index, edge_weight
| 29.04
| 84
| 0.739669
|
a105a13aa1fee510551f9bf3d47c9c76323fadf0
| 2,054
|
py
|
Python
|
process/score.py
|
eschnou/unicrawl
|
ec39d4183c448a24ebeaa991d291d0da8aa0eb75
|
[
"MIT"
] | null | null | null |
process/score.py
|
eschnou/unicrawl
|
ec39d4183c448a24ebeaa991d291d0da8aa0eb75
|
[
"MIT"
] | 2
|
2020-06-30T22:31:04.000Z
|
2020-11-09T17:05:41.000Z
|
process/score.py
|
eschnou/unicrawl
|
ec39d4183c448a24ebeaa991d291d0da8aa0eb75
|
[
"MIT"
] | 4
|
2020-05-27T08:02:44.000Z
|
2021-06-09T14:53:11.000Z
|
import json
import re
import csv
import argparse
from functools import reduce
class Scoring:
dictionary = ('environnement',
'climat', 'climatique',
'énergie',
'durable', 'développement',
'écosystème',
'nature', 'naturelle', 'naturelles',
'co2', 'gaz', 'serre',
'écologie', 'ecolo', 'écologique',
'resources'
)
results = list()
def load(self, data, key_field, score_field):
for item in data:
self.process(item[key_field], item[score_field])
def process(self, id, text):
occurences = dict()
words = re.findall(r'\w+', text)
for word in words:
word = word.lower()
if word in self.dictionary:
if not word in occurences:
occurences[word] = 1
else:
occurences[word] = occurences[word] + 1
if len(occurences) > 0:
score = reduce(lambda x, value: x * value, occurences.values(), 1)
if score > 1:
self.results.append({'id': id, 'score': score, 'words': occurences})
def dump(self, output):
with open(output,'w',newline='',encoding='utf-8-sig') as f:
w = csv.writer(f)
for result in self.results:
w.writerow([result['id'], result['score'], result['words']])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Filter entries with scoring rules.')
parser.add_argument("--input", type=str, help="Input file")
parser.add_argument("--output", type=str, help="Output file")
parser.add_argument("--key", type=str, help="Field used as unique key")
parser.add_argument("--field", type=str, help="Field used to run the scoring")
args = parser.parse_args()
with open(args.input, 'r') as fd:
scoring = Scoring()
scoring.load(json.load(fd), args.key, args.field)
scoring.dump(args.output)
| 33.672131
| 86
| 0.55258
|
29a6975329ab47ff87894b4e760ffd5b6cff7683
| 4,270
|
py
|
Python
|
bitmovin_api_sdk/models/dash_mp4_drm_representation.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/models/dash_mp4_drm_representation.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/models/dash_mp4_drm_representation.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
# coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
from bitmovin_api_sdk.models.dash_mp4_representation import DashMp4Representation
from bitmovin_api_sdk.models.dash_on_demand_representation_type import DashOnDemandRepresentationType
import pprint
import six
class DashMp4DrmRepresentation(DashMp4Representation):
@poscheck_model
def __init__(self,
id_=None,
encoding_id=None,
muxing_id=None,
dependency_id=None,
file_path=None,
type_=None,
drm_id=None):
# type: (string_types, string_types, string_types, string_types, string_types, DashOnDemandRepresentationType, string_types) -> None
super(DashMp4DrmRepresentation, self).__init__(id_=id_, encoding_id=encoding_id, muxing_id=muxing_id, dependency_id=dependency_id, file_path=file_path, type_=type_)
self._drm_id = None
self.discriminator = None
if drm_id is not None:
self.drm_id = drm_id
@property
def openapi_types(self):
types = {}
if hasattr(super(DashMp4DrmRepresentation, self), 'openapi_types'):
types = getattr(super(DashMp4DrmRepresentation, self), 'openapi_types')
types.update({
'drm_id': 'string_types'
})
return types
@property
def attribute_map(self):
attributes = {}
if hasattr(super(DashMp4DrmRepresentation, self), 'attribute_map'):
attributes = getattr(super(DashMp4DrmRepresentation, self), 'attribute_map')
attributes.update({
'drm_id': 'drmId'
})
return attributes
@property
def drm_id(self):
# type: () -> string_types
"""Gets the drm_id of this DashMp4DrmRepresentation.
DRM Id (required)
:return: The drm_id of this DashMp4DrmRepresentation.
:rtype: string_types
"""
return self._drm_id
@drm_id.setter
def drm_id(self, drm_id):
# type: (string_types) -> None
"""Sets the drm_id of this DashMp4DrmRepresentation.
DRM Id (required)
:param drm_id: The drm_id of this DashMp4DrmRepresentation.
:type: string_types
"""
if drm_id is not None:
if not isinstance(drm_id, string_types):
raise TypeError("Invalid type for `drm_id`, type has to be `string_types`")
self._drm_id = drm_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
if hasattr(super(DashMp4DrmRepresentation, self), "to_dict"):
result = super(DashMp4DrmRepresentation, self).to_dict()
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if value is None:
continue
if isinstance(value, list):
if len(value) == 0:
continue
result[self.attribute_map.get(attr)] = [y.value if isinstance(y, Enum) else y for y in [x.to_dict() if hasattr(x, "to_dict") else x for x in value]]
elif hasattr(value, "to_dict"):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, "to_dict") else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DashMp4DrmRepresentation):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 33.359375
| 172
| 0.61733
|
49cb9e673b8867dd67d6e6f24f2e32767a75594a
| 578
|
py
|
Python
|
forumsApp/migrations/0004_auto_20210307_1022.py
|
glen-s-abraham/Elearning-platform
|
3122cfcef9dd920fffb474da2a63c475fe08c27b
|
[
"MIT"
] | null | null | null |
forumsApp/migrations/0004_auto_20210307_1022.py
|
glen-s-abraham/Elearning-platform
|
3122cfcef9dd920fffb474da2a63c475fe08c27b
|
[
"MIT"
] | null | null | null |
forumsApp/migrations/0004_auto_20210307_1022.py
|
glen-s-abraham/Elearning-platform
|
3122cfcef9dd920fffb474da2a63c475fe08c27b
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-07 04:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('forumsApp', '0003_replies_author'),
]
operations = [
migrations.AlterField(
model_name='replies',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 26.272727
| 110
| 0.676471
|
4548ddd901b611f410ca83773b6937fe0eb90fda
| 8,363
|
py
|
Python
|
pytype/datatypes_test.py
|
ashwinprasadme/pytype
|
fed209c73aacfcab15efc33deef3b4016a67cfe5
|
[
"Apache-2.0"
] | 3,882
|
2015-03-22T12:17:15.000Z
|
2022-03-31T17:13:20.000Z
|
pytype/datatypes_test.py
|
ashwinprasadme/pytype
|
fed209c73aacfcab15efc33deef3b4016a67cfe5
|
[
"Apache-2.0"
] | 638
|
2015-11-03T06:34:44.000Z
|
2022-03-31T23:41:48.000Z
|
pytype/datatypes_test.py
|
ashwinprasadme/pytype
|
fed209c73aacfcab15efc33deef3b4016a67cfe5
|
[
"Apache-2.0"
] | 301
|
2015-08-14T10:21:17.000Z
|
2022-03-08T11:03:40.000Z
|
"""Tests for datatypes.py."""
import argparse
from pytype import datatypes
from pytype.typegraph import cfg
import unittest
class AccessTrackingDictTest(unittest.TestCase):
"""Test AccessTrackingDict."""
def setUp(self):
super().setUp()
self.d = datatypes.AccessTrackingDict({"a": 1, "b": 2})
def test_get(self):
v = self.d["a"]
item, = self.d.accessed_subset.items()
self.assertEqual(item, ("a", 1))
self.assertEqual(v, 1)
def test_set(self):
self.d["a"] = 3
item, = self.d.accessed_subset.items()
self.assertEqual(item, ("a", 1))
self.assertEqual(self.d["a"], 3)
def test_set_new(self):
self.d["c"] = 3
self.assertFalse(self.d.accessed_subset)
def test_del(self):
del self.d["a"]
item, = self.d.accessed_subset.items()
self.assertEqual(item, ("a", 1))
with self.assertRaises(KeyError):
_ = self.d["a"]
def test_repeat_access(self):
self.d["a"] = 3
v = self.d["a"]
item, = self.d.accessed_subset.items()
self.assertEqual(item, ("a", 1))
self.assertEqual(v, 3)
class DatatypesTest(unittest.TestCase):
"""Test datatypes."""
def setUp(self):
super().setUp()
self.prog = cfg.Program()
def test_monitor_dict(self):
d = datatypes.MonitorDict()
changestamp = d.changestamp
var = self.prog.NewVariable()
d["key"] = var
self.assertGreater(d.changestamp, changestamp)
changestamp = d.changestamp
var.AddBinding("data")
self.assertGreater(d.changestamp, changestamp)
changestamp = d.changestamp
var.AddBinding("data") # No change because this is duplicate data
self.assertEqual(d.changestamp, changestamp)
changestamp = d.changestamp
def test_aliasing_dict(self):
d = datatypes.AliasingDict()
# To avoid surprising behavior, we require desired dict functionality to be
# explicitly overridden
with self.assertRaises(NotImplementedError):
d.viewitems()
d.add_alias("alias", "name")
self.assertNotIn("alias", d)
self.assertNotIn("name", d)
var1 = self.prog.NewVariable()
d["alias"] = var1
self.assertIn("name", d)
self.assertIn("alias", d)
self.assertEqual(var1, d["name"])
self.assertEqual(d["name"], d["alias"])
self.assertEqual(d["alias"], d.get("alias"))
self.assertEqual(d["name"], d.get("name"))
self.assertIsNone(d.get("other_name"))
var2 = self.prog.NewVariable()
d["name"] = var2
self.assertEqual(var2, d["name"])
self.assertEqual(d["name"], d["alias"])
def test_aliasing_dict_realiasing(self):
d = datatypes.AliasingDict()
d.add_alias("alias1", "name")
d.add_alias("alias2", "name")
d.add_alias("alias1", "name")
d.add_alias("alias2", "alias1")
d.add_alias("alias1", "alias2")
# Check that the name, alias1, and alias2 still all refer to the same key
var = self.prog.NewVariable()
d["alias1"] = var
self.assertEqual(len(d), 1)
self.assertEqual(var, d["name"])
self.assertEqual(var, d["alias1"])
self.assertEqual(var, d["alias2"])
def test_nonempty_aliasing_dict_realiasing(self):
d = datatypes.AliasingDict()
d.add_alias("alias", "name")
d["name"] = "hello"
d["name2"] = "world"
self.assertEqual("hello", d["alias"])
self.assertEqual("hello", d["name"])
self.assertEqual("world", d["name2"])
d.add_alias("name", "name2", op=lambda x, y: x + " " + y)
self.assertEqual("hello world", d["name"])
self.assertEqual("hello world", d["name2"])
self.assertEqual("hello world", d["alias"])
def test_aliasing_dict_transitive(self):
d = datatypes.AliasingDict()
d.add_alias("alias1", "name")
d.add_alias("alias2", "alias1")
d["name"] = self.prog.NewVariable()
self.assertEqual(len(d), 1)
self.assertEqual(d["name"], d["alias1"])
self.assertEqual(d["alias1"], d["alias2"])
def test_aliasing_dict_value_move(self):
d = datatypes.AliasingDict()
v = self.prog.NewVariable()
d["alias"] = v
d.add_alias("alias", "name")
self.assertEqual(d["name"], v)
self.assertEqual(d["alias"], d["name"])
def test_aliasing_dict_transitive_value_move(self):
d = datatypes.AliasingDict()
d.add_alias("alias2", "name")
v = self.prog.NewVariable()
d["alias1"] = v
d.add_alias("alias1", "alias2")
self.assertEqual(d["name"], v)
self.assertEqual(d["alias2"], d["name"])
self.assertEqual(d["alias1"], d["alias2"])
def test_aliasing_dict_with_union_find(self):
d = datatypes.AliasingDict()
d["alias1"] = "1"
d["alias3"] = "2"
d.add_alias("alias1", "alias2")
d.add_alias("alias4", "alias3")
self.assertEqual(d["alias1"], "1")
self.assertEqual(d["alias2"], "1")
self.assertEqual(d["alias3"], "2")
self.assertEqual(d["alias4"], "2")
d.add_alias("alias1", "alias3", str.__add__)
self.assertEqual(d["alias1"], "12")
self.assertEqual(d["alias2"], "12")
self.assertEqual(d["alias3"], "12")
self.assertEqual(d["alias4"], "12")
d["alias5"] = "34"
d.add_alias("alias5", "alias6")
d.add_alias("alias6", "alias7")
d.add_alias("alias7", "alias8")
self.assertEqual(d["alias5"], "34")
self.assertEqual(d["alias6"], "34")
self.assertEqual(d["alias7"], "34")
self.assertEqual(d["alias8"], "34")
d.add_alias("alias1", "alias8", str.__add__)
for i in range(1, 9):
self.assertEqual(d["alias" + str(i)], "1234")
def test_aliasing_dict_get(self):
d = datatypes.AliasingDict()
d["alias1"] = "1"
d.add_alias("alias1", "alias2")
self.assertEqual(d.get("alias1"), "1")
self.assertEqual(d.get("alias2"), "1")
self.assertEqual(d.get("alias3", "2"), "2")
self.assertIsNone(d.get("alias3"))
def test_add_alias_for_aliasing_monitor_dict(self):
def merge_value(v0, v1, name):
if v0 == v1:
return v0
raise datatypes.AliasingDictConflictError(name)
d = datatypes.AliasingMonitorDict()
d["alias1"] = "1"
d["alias2"] = "1"
self.assertEqual(len(d), 2)
# Merge with same values
d.add_alias("alias1", "alias2", merge_value)
self.assertEqual(len(d), 1)
self.assertEqual(d["alias1"], "1")
self.assertEqual(d["alias2"], "1")
# Merge with different values
d["alias3"] = "2"
with self.assertRaises(datatypes.AliasingDictConflictError):
d.add_alias("alias1", "alias3", merge_value)
# Neither of names has value
d.add_alias("alias5", "alias6", merge_value)
# The first name is in dict
d.add_alias("alias3", "alias4", merge_value)
# The second name is in dict
d.add_alias("alias5", "alias3", merge_value)
self.assertEqual(d["alias3"], "2")
self.assertEqual(d["alias4"], "2")
self.assertEqual(d["alias5"], "2")
self.assertEqual(d["alias6"], "2")
def test_aliasing_monitor_dict_merge(self):
def merge_value(v0, v1, name):
if v0 == v1:
return v0
raise datatypes.AliasingDictConflictError(name)
d1 = datatypes.AliasingMonitorDict()
d1["alias1"] = "1"
d1.add_alias("alias1", "alias2", merge_value)
d2 = datatypes.AliasingMonitorDict()
d2["alias3"] = "1"
d2.add_alias("alias3", "alias4", merge_value)
d1.merge_from(d2, merge_value)
self.assertEqual(d1["alias1"], "1")
self.assertEqual(d1["alias2"], "1")
self.assertEqual(d1["alias3"], "1")
self.assertEqual(d1["alias4"], "1")
self.assertEqual(d1.same_name("alias3", "alias4"), True)
d4 = datatypes.AliasingMonitorDict()
d4["alias2"] = 3
with self.assertRaises(datatypes.AliasingDictConflictError):
d1.merge_from(d4, merge_value)
d3 = datatypes.AliasingMonitorDict()
d3.add_alias("alias2", "alias5", merge_value)
d3["alias5"] = 3
with self.assertRaises(datatypes.AliasingDictConflictError):
d1.merge_from(d3, merge_value)
class ParserWrapperTest(unittest.TestCase):
"""Test parser wrapper."""
def test_group(self):
parser = argparse.ArgumentParser()
wrapper = datatypes.ParserWrapper(parser)
wrapper.add_argument("--foo", dest="foo")
group = wrapper.add_argument_group("test1")
group.add_argument("--bar", dest="bar")
subgroup = wrapper.add_argument_group("test2")
subgroup.add_argument("--baz", dest="baz")
self.assertSetEqual(set(wrapper.actions), {"foo", "bar", "baz"})
if __name__ == "__main__":
unittest.main()
| 31.205224
| 79
| 0.643908
|
94f7d49be725a3cda790db8a2c177c25cac492e0
| 24,745
|
py
|
Python
|
server/paramiko/client.py
|
ludojmj/treelud
|
37dda35000786f359c8ddb3674572f49b8d78c48
|
[
"MIT"
] | 3
|
2015-03-17T09:30:30.000Z
|
2018-05-05T06:27:33.000Z
|
server/paramiko/client.py
|
ludojmj/treelud
|
37dda35000786f359c8ddb3674572f49b8d78c48
|
[
"MIT"
] | null | null | null |
server/paramiko/client.py
|
ludojmj/treelud
|
37dda35000786f359c8ddb3674572f49b8d78c48
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2006-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
SSH client & key policies
"""
from binascii import hexlify
import getpass
import os
import socket
import warnings
from paramiko.agent import Agent
from paramiko.common import DEBUG
from paramiko.config import SSH_PORT
from paramiko.dsskey import DSSKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.hostkeys import HostKeys
from paramiko.py3compat import string_types
from paramiko.resource import ResourceManager
from paramiko.rsakey import RSAKey
from paramiko.ssh_exception import SSHException, BadHostKeyException
from paramiko.transport import Transport
from paramiko.util import retry_on_signal, ClosingContextManager
class SSHClient (ClosingContextManager):
"""
A high-level representation of a session with an SSH server. This class
wraps `.Transport`, `.Channel`, and `.SFTPClient` to take care of most
aspects of authenticating and opening channels. A typical use case is::
client = SSHClient()
client.load_system_host_keys()
client.connect('ssh.example.com')
stdin, stdout, stderr = client.exec_command('ls -l')
You may pass in explicit overrides for authentication and server host key
checking. The default mechanism is to try to use local key files or an
SSH agent (if one is running).
Instances of this class may be used as context managers.
.. versionadded:: 1.6
"""
def __init__(self):
"""
Create a new SSHClient.
"""
self._system_host_keys = HostKeys()
self._host_keys = HostKeys()
self._host_keys_filename = None
self._log_channel = None
self._policy = RejectPolicy()
self._transport = None
self._agent = None
def load_system_host_keys(self, filename=None):
"""
Load host keys from a system (read-only) file. Host keys read with
this method will not be saved back by `save_host_keys`.
This method can be called multiple times. Each new set of host keys
will be merged with the existing set (new replacing old if there are
conflicts).
If ``filename`` is left as ``None``, an attempt will be made to read
keys from the user's local "known hosts" file, as used by OpenSSH,
and no exception will be raised if the file can't be read. This is
probably only useful on posix.
:param str filename: the filename to read, or ``None``
:raises IOError:
if a filename was provided and the file could not be read
"""
if filename is None:
# try the user's .ssh key file, and mask exceptions
filename = os.path.expanduser('~/.ssh/known_hosts')
try:
self._system_host_keys.load(filename)
except IOError:
pass
return
self._system_host_keys.load(filename)
def load_host_keys(self, filename):
"""
Load host keys from a local host-key file. Host keys read with this
method will be checked after keys loaded via `load_system_host_keys`,
but will be saved back by `save_host_keys` (so they can be modified).
The missing host key policy `.AutoAddPolicy` adds keys to this set and
saves them, when connecting to a previously-unknown server.
This method can be called multiple times. Each new set of host keys
will be merged with the existing set (new replacing old if there are
conflicts). When automatically saving, the last hostname is used.
:param str filename: the filename to read
:raises IOError: if the filename could not be read
"""
self._host_keys_filename = filename
self._host_keys.load(filename)
def save_host_keys(self, filename):
"""
Save the host keys back to a file. Only the host keys loaded with
`load_host_keys` (plus any added directly) will be saved -- not any
host keys loaded with `load_system_host_keys`.
:param str filename: the filename to save to
:raises IOError: if the file could not be written
"""
# update local host keys from file (in case other SSH clients
# have written to the known_hosts file meanwhile.
if self._host_keys_filename is not None:
self.load_host_keys(self._host_keys_filename)
with open(filename, 'w') as f:
for hostname, keys in self._host_keys.items():
for keytype, key in keys.items():
f.write('%s %s %s\n' % (hostname, keytype, key.get_base64()))
def get_host_keys(self):
"""
Get the local `.HostKeys` object. This can be used to examine the
local host keys or change them.
:return: the local host keys as a `.HostKeys` object.
"""
return self._host_keys
def set_log_channel(self, name):
"""
Set the channel for logging. The default is ``"paramiko.transport"``
but it can be set to anything you want.
:param str name: new channel name for logging
"""
self._log_channel = name
def set_missing_host_key_policy(self, policy):
"""
Set the policy to use when connecting to a server that doesn't have a
host key in either the system or local `.HostKeys` objects. The
default policy is to reject all unknown servers (using `.RejectPolicy`).
You may substitute `.AutoAddPolicy` or write your own policy class.
:param .MissingHostKeyPolicy policy:
the policy to use when receiving a host key from a
previously-unknown server
"""
self._policy = policy
def connect(self, hostname, port=SSH_PORT, username=None, password=None, pkey=None,
key_filename=None, timeout=None, allow_agent=True, look_for_keys=True,
compress=False, sock=None, gss_auth=False, gss_kex=False,
gss_deleg_creds=True, gss_host=None, banner_timeout=None):
"""
Connect to an SSH server and authenticate to it. The server's host key
is checked against the system host keys (see `load_system_host_keys`)
and any local host keys (`load_host_keys`). If the server's hostname
is not found in either set of host keys, the missing host key policy
is used (see `set_missing_host_key_policy`). The default policy is
to reject the key and raise an `.SSHException`.
Authentication is attempted in the following order of priority:
- The ``pkey`` or ``key_filename`` passed in (if any)
- Any key we can find through an SSH agent
- Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
``~/.ssh/``
- Plain username/password auth, if a password was given
If a private key requires a password to unlock it, and a password is
passed in, that password will be used to attempt to unlock the key.
:param str hostname: the server to connect to
:param int port: the server port to connect to
:param str username:
the username to authenticate as (defaults to the current local
username)
:param str password:
a password to use for authentication or for unlocking a private key
:param .PKey pkey: an optional private key to use for authentication
:param str key_filename:
the filename, or list of filenames, of optional private key(s) to
try for authentication
:param float timeout: an optional timeout (in seconds) for the TCP connect
:param bool allow_agent: set to False to disable connecting to the SSH agent
:param bool look_for_keys:
set to False to disable searching for discoverable private key
files in ``~/.ssh/``
:param bool compress: set to True to turn on compression
:param socket sock:
an open socket or socket-like object (such as a `.Channel`) to use
for communication to the target host
:param bool gss_auth: ``True`` if you want to use GSS-API authentication
:param bool gss_kex: Perform GSS-API Key Exchange and user authentication
:param bool gss_deleg_creds: Delegate GSS-API client credentials or not
:param str gss_host: The targets name in the kerberos database. default: hostname
:param float banner_timeout: an optional timeout (in seconds) to wait
for the SSH banner to be presented.
:raises BadHostKeyException: if the server's host key could not be
verified
:raises AuthenticationException: if authentication failed
:raises SSHException: if there was any other error connecting or
establishing an SSH session
:raises socket.error: if a socket error occurred while connecting
.. versionchanged:: 1.15
Added the ``banner_timeout``, ``gss_auth``, ``gss_kex``,
``gss_deleg_creds`` and ``gss_host`` arguments.
"""
if not sock:
for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM):
if socktype == socket.SOCK_STREAM:
af = family
addr = sockaddr
break
else:
# some OS like AIX don't indicate SOCK_STREAM support, so just guess. :(
af, _, _, _, addr = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
sock = socket.socket(af, socket.SOCK_STREAM)
if timeout is not None:
try:
sock.settimeout(timeout)
except:
pass
retry_on_signal(lambda: sock.connect(addr))
t = self._transport = Transport(sock, gss_kex=gss_kex, gss_deleg_creds=gss_deleg_creds)
t.use_compression(compress=compress)
if gss_kex and gss_host is None:
t.set_gss_host(hostname)
elif gss_kex and gss_host is not None:
t.set_gss_host(gss_host)
else:
pass
if self._log_channel is not None:
t.set_log_channel(self._log_channel)
if banner_timeout is not None:
t.banner_timeout = banner_timeout
t.start_client()
ResourceManager.register(self, t)
server_key = t.get_remote_server_key()
keytype = server_key.get_name()
if port == SSH_PORT:
server_hostkey_name = hostname
else:
server_hostkey_name = "[%s]:%d" % (hostname, port)
# If GSS-API Key Exchange is performed we are not required to check the
# host key, because the host is authenticated via GSS-API / SSPI as
# well as our client.
if not self._transport.use_gss_kex:
our_server_key = self._system_host_keys.get(server_hostkey_name,
{}).get(keytype, None)
if our_server_key is None:
our_server_key = self._host_keys.get(server_hostkey_name,
{}).get(keytype, None)
if our_server_key is None:
# will raise exception if the key is rejected; let that fall out
self._policy.missing_host_key(self, server_hostkey_name,
server_key)
# if the callback returns, assume the key is ok
our_server_key = server_key
if server_key != our_server_key:
raise BadHostKeyException(hostname, server_key, our_server_key)
if username is None:
username = getpass.getuser()
if key_filename is None:
key_filenames = []
elif isinstance(key_filename, string_types):
key_filenames = [key_filename]
else:
key_filenames = key_filename
if gss_host is None:
gss_host = hostname
self._auth(username, password, pkey, key_filenames, allow_agent,
look_for_keys, gss_auth, gss_kex, gss_deleg_creds, gss_host)
def close(self):
"""
Close this SSHClient and its underlying `.Transport`.
"""
if self._transport is None:
return
self._transport.close()
self._transport = None
if self._agent is not None:
self._agent.close()
self._agent = None
def exec_command(self, command, bufsize=-1, timeout=None, get_pty=False):
"""
Execute a command on the SSH server. A new `.Channel` is opened and
the requested command is executed. The command's input and output
streams are returned as Python ``file``-like objects representing
stdin, stdout, and stderr.
:param str command: the command to execute
:param int bufsize:
interpreted the same way as by the built-in ``file()`` function in
Python
:param int timeout:
set command's channel timeout. See `Channel.settimeout`.settimeout
:return:
the stdin, stdout, and stderr of the executing command, as a
3-tuple
:raises SSHException: if the server fails to execute the command
"""
chan = self._transport.open_session()
if get_pty:
chan.get_pty()
chan.settimeout(timeout)
chan.exec_command(command)
stdin = chan.makefile('wb', bufsize)
stdout = chan.makefile('r', bufsize)
stderr = chan.makefile_stderr('r', bufsize)
return stdin, stdout, stderr
def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0,
height_pixels=0):
"""
Start an interactive shell session on the SSH server. A new `.Channel`
is opened and connected to a pseudo-terminal using the requested
terminal type and size.
:param str term:
the terminal type to emulate (for example, ``"vt100"``)
:param int width: the width (in characters) of the terminal window
:param int height: the height (in characters) of the terminal window
:param int width_pixels: the width (in pixels) of the terminal window
:param int height_pixels: the height (in pixels) of the terminal window
:return: a new `.Channel` connected to the remote shell
:raises SSHException: if the server fails to invoke a shell
"""
chan = self._transport.open_session()
chan.get_pty(term, width, height, width_pixels, height_pixels)
chan.invoke_shell()
return chan
def open_sftp(self):
"""
Open an SFTP session on the SSH server.
:return: a new `.SFTPClient` session object
"""
return self._transport.open_sftp_client()
def get_transport(self):
"""
Return the underlying `.Transport` object for this SSH connection.
This can be used to perform lower-level tasks, like opening specific
kinds of channels.
:return: the `.Transport` for this connection
"""
return self._transport
def _auth(self, username, password, pkey, key_filenames, allow_agent,
look_for_keys, gss_auth, gss_kex, gss_deleg_creds, gss_host):
"""
Try, in order:
- The key passed in, if one was passed in.
- Any key we can find through an SSH agent (if allowed).
- Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ~/.ssh/
(if allowed).
- Plain username/password auth, if a password was given.
(The password might be needed to unlock a private key, or for
two-factor authentication [for which it is required].)
"""
saved_exception = None
two_factor = False
allowed_types = []
# If GSS-API support and GSS-PI Key Exchange was performed, we attempt
# authentication with gssapi-keyex.
if gss_kex and self._transport.gss_kex_used:
try:
self._transport.auth_gssapi_keyex(username)
return
except Exception as e:
saved_exception = e
# Try GSS-API authentication (gssapi-with-mic) only if GSS-API Key
# Exchange is not performed, because if we use GSS-API for the key
# exchange, there is already a fully established GSS-API context, so
# why should we do that again?
if gss_auth:
try:
self._transport.auth_gssapi_with_mic(username, gss_host,
gss_deleg_creds)
return
except Exception as e:
saved_exception = e
if pkey is not None:
try:
self._log(DEBUG, 'Trying SSH key %s' % hexlify(pkey.get_fingerprint()))
allowed_types = self._transport.auth_publickey(username, pkey)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
except SSHException as e:
saved_exception = e
if not two_factor:
for key_filename in key_filenames:
for pkey_class in (RSAKey, DSSKey, ECDSAKey):
try:
key = pkey_class.from_private_key_file(key_filename, password)
self._log(DEBUG, 'Trying key %s from %s' % (hexlify(key.get_fingerprint()), key_filename))
self._transport.auth_publickey(username, key)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
break
except SSHException as e:
saved_exception = e
if not two_factor and allow_agent:
if self._agent is None:
self._agent = Agent()
for key in self._agent.get_keys():
try:
self._log(DEBUG, 'Trying SSH agent key %s' % hexlify(key.get_fingerprint()))
# for 2-factor auth a successfully auth'd key will result in ['password']
allowed_types = self._transport.auth_publickey(username, key)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
break
except SSHException as e:
saved_exception = e
if not two_factor:
keyfiles = []
rsa_key = os.path.expanduser('~/.ssh/id_rsa')
dsa_key = os.path.expanduser('~/.ssh/id_dsa')
ecdsa_key = os.path.expanduser('~/.ssh/id_ecdsa')
if os.path.isfile(rsa_key):
keyfiles.append((RSAKey, rsa_key))
if os.path.isfile(dsa_key):
keyfiles.append((DSSKey, dsa_key))
if os.path.isfile(ecdsa_key):
keyfiles.append((ECDSAKey, ecdsa_key))
# look in ~/ssh/ for windows users:
rsa_key = os.path.expanduser('~/ssh/id_rsa')
dsa_key = os.path.expanduser('~/ssh/id_dsa')
ecdsa_key = os.path.expanduser('~/ssh/id_ecdsa')
if os.path.isfile(rsa_key):
keyfiles.append((RSAKey, rsa_key))
if os.path.isfile(dsa_key):
keyfiles.append((DSSKey, dsa_key))
if os.path.isfile(ecdsa_key):
keyfiles.append((ECDSAKey, ecdsa_key))
if not look_for_keys:
keyfiles = []
for pkey_class, filename in keyfiles:
try:
key = pkey_class.from_private_key_file(filename, password)
self._log(DEBUG, 'Trying discovered key %s in %s' % (hexlify(key.get_fingerprint()), filename))
# for 2-factor auth a successfully auth'd key will result in ['password']
allowed_types = self._transport.auth_publickey(username, key)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
break
except (SSHException, IOError) as e:
saved_exception = e
if password is not None:
try:
self._transport.auth_password(username, password)
return
except SSHException as e:
saved_exception = e
elif two_factor:
raise SSHException('Two-factor authentication requires a password')
# if we got an auth-failed exception earlier, re-raise it
if saved_exception is not None:
raise saved_exception
raise SSHException('No authentication methods available')
def _log(self, level, msg):
self._transport._log(level, msg)
class MissingHostKeyPolicy (object):
"""
Interface for defining the policy that `.SSHClient` should use when the
SSH server's hostname is not in either the system host keys or the
application's keys. Pre-made classes implement policies for automatically
adding the key to the application's `.HostKeys` object (`.AutoAddPolicy`),
and for automatically rejecting the key (`.RejectPolicy`).
This function may be used to ask the user to verify the key, for example.
"""
def missing_host_key(self, client, hostname, key):
"""
Called when an `.SSHClient` receives a server key for a server that
isn't in either the system or local `.HostKeys` object. To accept
the key, simply return. To reject, raised an exception (which will
be passed to the calling application).
"""
pass
class AutoAddPolicy (MissingHostKeyPolicy):
"""
Policy for automatically adding the hostname and new host key to the
local `.HostKeys` object, and saving it. This is used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
client._host_keys.add(hostname, key.get_name(), key)
if client._host_keys_filename is not None:
client.save_host_keys(client._host_keys_filename)
client._log(DEBUG, 'Adding %s host key for %s: %s' %
(key.get_name(), hostname, hexlify(key.get_fingerprint())))
class RejectPolicy (MissingHostKeyPolicy):
"""
Policy for automatically rejecting the unknown hostname & key. This is
used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
client._log(DEBUG, 'Rejecting %s host key for %s: %s' %
(key.get_name(), hostname, hexlify(key.get_fingerprint())))
raise SSHException('Server %r not found in known_hosts' % hostname)
class WarningPolicy (MissingHostKeyPolicy):
"""
Policy for logging a Python-style warning for an unknown host key, but
accepting it. This is used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
warnings.warn('Unknown %s host key for %s: %s' %
(key.get_name(), hostname, hexlify(key.get_fingerprint())))
| 42.590361
| 140
| 0.601212
|
8f6eb289c976aec2e338110b7a6421beb4807d89
| 15,133
|
py
|
Python
|
tests/test_instrumentation.py
|
trallnag/prometheus-fastapi-exporter
|
673b922e97a4e943a0e4d23db7767015e4c751dd
|
[
"MIT"
] | 3
|
2020-07-13T15:29:38.000Z
|
2020-07-15T20:55:41.000Z
|
tests/test_instrumentation.py
|
trallnag/prometheus-fastapi-exporter
|
673b922e97a4e943a0e4d23db7767015e4c751dd
|
[
"MIT"
] | null | null | null |
tests/test_instrumentation.py
|
trallnag/prometheus-fastapi-exporter
|
673b922e97a4e943a0e4d23db7767015e4c751dd
|
[
"MIT"
] | null | null | null |
import asyncio
import os
from typing import Any, Dict, Optional
from fastapi import FastAPI, HTTPException
from prometheus_client import CONTENT_TYPE_LATEST, REGISTRY, generate_latest
from requests import Response as TestClientResponse
from starlette.responses import Response
from starlette.testclient import TestClient
from prometheus_fastapi_instrumentator import Instrumentator, metrics
setattr(TestClientResponse, "__test__", False)
# ------------------------------------------------------------------------------
# Setup
CUSTOM_METRICS = ["http_request_duration_seconds"]
def create_app() -> FastAPI:
app = FastAPI()
# Unregister all collectors.
collectors = list(REGISTRY._collector_to_names.keys())
print(f"before unregister collectors={collectors}")
for collector in collectors:
REGISTRY.unregister(collector)
print(f"after unregister collectors={list(REGISTRY._collector_to_names.keys())}")
# Import default collectors.
from prometheus_client import gc_collector, platform_collector, process_collector
# Re-register default collectors.
process_collector.ProcessCollector()
platform_collector.PlatformCollector()
gc_collector.GCCollector()
print(f"after re-register collectors={list(REGISTRY._collector_to_names.keys())}")
@app.get("/")
def read_root():
return "Hello World!"
@app.get("/sleep")
async def sleep(seconds: float):
await asyncio.sleep(seconds)
return f"I have slept for {seconds}s"
@app.get("/always_error")
def read_always_error():
raise HTTPException(status_code=404, detail="Not really error")
@app.get("/ignore")
def read_ignore():
return "Should be ignored"
@app.get("/items/{item_id}")
def read_item(item_id: int, q: Optional[str] = None):
return {"item_id": item_id, "q": q}
@app.get("/just_another_endpoint")
def read_just_another_endpoint():
return "Green is my pepper"
@app.post("/items")
def create_item(item: Dict[Any, Any]):
return None
return app
def expose_metrics(app: FastAPI) -> None:
if "prometheus_multiproc_dir" in os.environ:
pmd = os.environ["prometheus_multiproc_dir"]
print(f"Env var prometheus_multiproc_dir='{pmd}' detected.")
if os.path.isdir(pmd):
print(f"Env var prometheus_multiproc_dir='{pmd}' is a dir.")
from prometheus_client import CollectorRegistry, multiprocess
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
else:
raise ValueError(f"Env var prometheus_multiproc_dir='{pmd}' not a directory.")
else:
registry = REGISTRY
@app.get("/metrics")
def metrics():
return Response(generate_latest(registry), media_type=CONTENT_TYPE_LATEST)
return registry
def get_response(client: TestClient, path: str) -> TestClientResponse:
response = client.get(path)
print(f"\nResponse path='{path}' status='{response.status_code}':\n")
for line in response.content.split(b"\n"):
print(line.decode())
return response
def assert_is_not_multiprocess(response: TestClientResponse) -> None:
assert response.status_code == 200
assert b"Multiprocess" not in response.content
assert b"# HELP process_cpu_seconds_total" in response.content
def assert_request_count(
expected: float,
name: str = "http_request_duration_seconds_count",
handler: str = "/",
method: str = "GET",
status: str = "2xx",
) -> None:
result = REGISTRY.get_sample_value(
name, {"handler": handler, "method": method, "status": status}
)
print(
(
f"{name} handler={handler} method={method} status={status} "
f"result={result} expected={expected}"
)
)
assert result == expected
assert result + 1.0 != expected
# ------------------------------------------------------------------------------
# Tests
def test_app():
app = create_app()
client = TestClient(app)
response = get_response(client, "/")
assert response.status_code == 200
assert b"Hello World!" in response.content
response = get_response(client, "/always_error")
assert response.status_code == 404
assert b"Not really error" in response.content
response = get_response(client, "/items/678?q=43243")
assert response.status_code == 200
assert b"678" in response.content
response = get_response(client, "/items/hallo")
assert response.status_code == 422
assert b"value is not a valid integer" in response.content
response = get_response(client, "/just_another_endpoint")
assert response.status_code == 200
assert b"Green" in response.content
def test_metrics_endpoint_availability():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(2)
# ------------------------------------------------------------------------------
# Test gzip
def test_gzip_accepted():
app = create_app()
Instrumentator().instrument(app).expose(app, should_gzip=True)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/")
response = get_response(client, "/metrics")
assert response.headers["Content-Encoding"] == "gzip"
assert int(response.headers["Content-Length"]) <= 2000
def test_gzip_not_accepted():
app = create_app()
Instrumentator().instrument(app).expose(app, should_gzip=False)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/")
response = get_response(client, "/metrics")
assert response.headers.get("Content-Encoding") is None
assert int(response.headers["Content-Length"]) >= 2000
# ------------------------------------------------------------------------------
# Test metric name
def test_default_metric_name():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1)
assert b"http_request_duration_seconds" in response.content
def test_default_without_add():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1)
assert b"http_request_duration_seconds" in response.content
def test_custom_metric_name():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(
metrics.latency(metric_name="fastapi_latency")
).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1, name="fastapi_latency_count")
assert b"fastapi_latency" in response.content
assert b"http_request_duration_seconds" not in response.content
# ------------------------------------------------------------------------------
# Test grouping of status codes.
def test_grouped_status_codes():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1)
assert b'status="2xx"' in response.content
assert b'status="200"' not in response.content
def test_ungrouped_status_codes():
app = create_app()
Instrumentator(should_group_status_codes=False).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1, status="200")
assert b'status="2xx"' not in response.content
assert b'status="200"' in response.content
# ------------------------------------------------------------------------------
# Test handling of templates / untemplated.
def test_ignore_untemplated():
app = create_app()
Instrumentator(should_ignore_untemplated=True).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/items/678?q=43243")
get_response(client, "/does_not_exist")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1)
assert b'handler="/does_not_exist"' not in response.content
assert b'handler="none"' not in response.content
def test_dont_ignore_untemplated_ungrouped():
app = create_app()
Instrumentator(should_ignore_untemplated=False, should_group_untemplated=False).add(
metrics.latency()
).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/")
get_response(client, "/items/678?q=43243")
get_response(client, "/does_not_exist")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(2)
assert b'handler="/does_not_exist"' in response.content
assert b'handler="none"' not in response.content
def test_grouping_untemplated():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/items/678?q=43243")
get_response(client, "/does_not_exist")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1)
assert b'handler="/does_not_exist"' not in response.content
assert b'handler="none"' in response.content
def test_excluding_handlers():
app = create_app()
Instrumentator(excluded_handlers=["fefefwefwe"]).add(metrics.latency()).instrument(
app
)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/metrics")
get_response(client, "/fefefwefwe")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert_request_count(1)
assert b'handler="/metrics"' in response.content
assert b'handler="/fefefwefwe"' not in response.content
assert b'handler="none"' not in response.content
def test_excluding_handlers_regex():
app = create_app()
Instrumentator(excluded_handlers=["^/$"]).add(metrics.latency()).instrument(app)
expose_metrics(app)
client = TestClient(app)
get_response(client, "/ignore")
get_response(client, "/ignore")
get_response(client, "/")
response = get_response(client, "/metrics")
assert b'handler="/"' not in response.content
assert b'handler="none"' not in response.content
assert b'handler="/ignore"' in response.content
def test_excluded_handlers_none():
app = create_app()
exporter = (
Instrumentator(excluded_handlers=None).add(metrics.latency()).instrument(app)
)
expose_metrics(app)
assert len(exporter.excluded_handlers) == 0
assert isinstance(exporter.excluded_handlers, list)
assert exporter.excluded_handlers is not None
# ------------------------------------------------------------------------------
# Test bucket without infinity.
def test_bucket_without_inf():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(
metrics.latency(
buckets=(
1,
2,
3,
)
)
).instrument(app).expose(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert_is_not_multiprocess(response)
assert b"http_request_duration_seconds" in response.content
# ------------------------------------------------------------------------------
# Test env var option.
def test_should_respect_env_var_existence_exists():
app = create_app()
Instrumentator(should_respect_env_var=True, env_var_name="eoioerwjioGFIUONEIO").add(
metrics.latency()
).instrument(app).expose(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert response.status_code == 404
def test_should_respect_env_var_existence_not_exists():
app = create_app()
os.environ["eoioerwjioGFIUONEIO"] = "true"
Instrumentator(should_respect_env_var=True, env_var_name="eoioerwjioGFIUONEIO").add(
metrics.latency()
).instrument(app).expose(app)
client = TestClient(app)
get_response(client, "/")
response = get_response(client, "/metrics")
assert response.status_code == 200
# ------------------------------------------------------------------------------
# Test decimal rounding.
def calc_entropy(decimal_str: str):
decimals = [int(x) for x in decimal_str]
print(decimals)
entropy = 0
for i in range(len(decimals)):
if i != 0:
entropy += abs(decimals[i] - decimals[i - 1])
return entropy
def test_entropy():
assert calc_entropy([1, 0, 0, 4, 0]) == 9
def test_default_no_rounding():
app = create_app()
Instrumentator(excluded_handlers=["/metrics"]).add(
metrics.latency(
buckets=(
1,
2,
3,
)
)
).instrument(app).expose(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/")
get_response(client, "/")
_ = get_response(client, "/metrics")
result = REGISTRY.get_sample_value(
"http_request_duration_seconds_sum",
{"handler": "/", "method": "GET", "status": "2xx"},
)
entropy = calc_entropy(str(result).split(".")[1][4:])
assert entropy > 15
def test_rounding():
app = create_app()
Instrumentator(should_round_latency_decimals=True).add(
metrics.latency(
buckets=(
1,
2,
3,
)
)
).instrument(app).expose(app)
client = TestClient(app)
get_response(client, "/")
get_response(client, "/")
get_response(client, "/")
_ = get_response(client, "/metrics")
result = REGISTRY.get_sample_value(
"http_request_duration_seconds_sum",
{"handler": "/", "method": "GET", "status": "2xx"},
)
entropy = calc_entropy(str(result).split(".")[1][4:])
assert entropy < 10
| 28.769962
| 90
| 0.650301
|
d3fac9a5c504edb3a2f4832158ea20b5ce5a4da4
| 2,414
|
py
|
Python
|
airmozilla/suggest/tests/test_helpers.py
|
anurag90x/airmozilla
|
c758fb86695e16891e45dc24a2cf1406c54f2686
|
[
"BSD-3-Clause"
] | null | null | null |
airmozilla/suggest/tests/test_helpers.py
|
anurag90x/airmozilla
|
c758fb86695e16891e45dc24a2cf1406c54f2686
|
[
"BSD-3-Clause"
] | null | null | null |
airmozilla/suggest/tests/test_helpers.py
|
anurag90x/airmozilla
|
c758fb86695e16891e45dc24a2cf1406c54f2686
|
[
"BSD-3-Clause"
] | null | null | null |
from nose.tools import eq_
from django.test import TestCase
from django.contrib.auth.models import User
from django.utils import timezone
from funfactory.urlresolvers import reverse
from airmozilla.suggest.helpers import (
next_url,
state_description,
truncate_url
)
from airmozilla.main.models import SuggestedEvent, Event, Location
class TestStateHelpers(TestCase):
def setUp(self):
self.user = User.objects.create_user('nigel', 'n@live.in', 'secret')
def test_get_getting_state(self):
event = SuggestedEvent.objects.create(
user=self.user,
title='Cheese!',
slug='cheese'
)
url = next_url(event)
eq_(url, reverse('suggest:description', args=(event.pk,)))
description = state_description(event)
eq_(description, 'Description not entered')
event.description = 'Some description'
event.save()
url = next_url(event)
eq_(url, reverse('suggest:details', args=(event.pk,)))
description = state_description(event)
eq_(description, 'Details missing')
event.start_time = timezone.now()
event.location = Location.objects.create(
name='Mountain View', timezone='US/Pacific',
)
event.privacy = Event.PRIVACY_PUBLIC
event.save()
url = next_url(event)
eq_(url, reverse('suggest:placeholder', args=(event.pk,)))
description = state_description(event)
eq_(description, 'No image')
event.placeholder_img = 'some/path.png'
event.save()
url = next_url(event)
eq_(url, reverse('suggest:summary', args=(event.pk,)))
description = state_description(event)
eq_(description, 'Not yet submitted')
event.submitted = timezone.now()
event.save()
url = next_url(event)
eq_(url, reverse('suggest:summary', args=(event.pk,)))
description = state_description(event)
eq_(description, 'Submitted')
class TestTruncateURL(TestCase):
def test_truncate_short(self):
url = 'http://www.peterbe.com'
result = truncate_url(url, 30)
eq_(result, url)
assert len(result) <= 30
def test_truncate_long(self):
url = 'http://www.peterbe.com'
result = truncate_url(url, 20)
expect = url[:10] + u'\u2026' + url[-10:]
eq_(result, expect)
| 30.556962
| 76
| 0.630903
|
ee36286bdfaf7b9b18d96dd34ef491a9c0809d53
| 2,977
|
py
|
Python
|
src/mercury/common/task_managers/base/manager.py
|
shaneutt/mercury
|
d8d3c9a86ab3235d4e36583fcee6f656e5209b7e
|
[
"Apache-2.0"
] | 4
|
2017-07-21T20:56:46.000Z
|
2018-04-30T13:37:37.000Z
|
src/mercury/common/task_managers/base/manager.py
|
jr0d/mercury
|
d8d3c9a86ab3235d4e36583fcee6f656e5209b7e
|
[
"Apache-2.0"
] | 11
|
2017-08-24T04:55:58.000Z
|
2021-12-13T19:36:21.000Z
|
src/mercury/common/task_managers/base/manager.py
|
shaneutt/mercury
|
d8d3c9a86ab3235d4e36583fcee6f656e5209b7e
|
[
"Apache-2.0"
] | 6
|
2017-08-18T15:59:15.000Z
|
2018-08-21T19:54:34.000Z
|
import logging
import signal
import threading
import time
from .worker import Worker
log = logging.getLogger(__name__)
class Manager(object):
"""Class managing workers"""
def __init__(self,
task_handler,
number_of_workers=10,
maximum_requests_per_thread=5000,
maximum_age=3600,
handler_args=None,
handler_kwargs=None):
"""Create a new Manager.
:param task_handler: The Task object that will execute tasks.
:param number_of_workers: Maximum number of workers for this manager.
:param maximum_requests_per_thread: Maximum number of tasks per worker.
:param maximum_age: Maximum age of workers.
"""
self.number_of_workers = number_of_workers
self.max_requests = maximum_requests_per_thread
self.max_age = maximum_age
self.task_handler = task_handler
self.handler_args = handler_args or ()
self.handler_kwargs = handler_kwargs or {}
self.workers = []
def spawn(self):
"""Add and start a new worker to handle tasks."""
worker_dict = {}
w = Worker(self.task_handler, self.max_requests, self.max_age,
self.handler_args, self.handler_kwargs)
worker_dict['worker_class'] = w
t = threading.Thread(target=w.start)
t.start()
log.info('Spawned thread: %s' % t.ident)
worker_dict['thread'] = t
self.workers.append(worker_dict)
def kill_all(self):
"""Kill all active workers."""
for worker in self.active_workers:
log.info('Sending kill signal to worker %s'
% worker['thread'].ident)
worker['worker_class'].kill_signal = True
@property
def active_workers(self):
"""Find all active workers and update current list."""
active = []
for idx in range(len(self.workers)):
if self.workers[idx]['thread'].is_alive():
active.append(self.workers[idx])
# An optimization that usually should be present in a property
# "I'm sure you can find a better way to implement this" - Hussam
self.workers = active
return active
def spawn_threads(self):
"""Create new workers until the maximum number is reached."""
count = len(self.active_workers)
while count < self.number_of_workers:
self.spawn()
count += 1
def manage(self):
"""Maintain the maximum number of workers running."""
# noinspection PyUnusedLocal
def term_handler(*args, **kwargs):
log.info('Caught TERM signal, sending kill all')
self.kill_all()
signal.signal(signal.SIGTERM, term_handler)
try:
while True:
self.spawn_threads()
time.sleep(.01)
except KeyboardInterrupt:
term_handler()
| 33.449438
| 79
| 0.603292
|
49e8d7ec228216237faa9f5cff43b3f0ce5419c3
| 751
|
py
|
Python
|
example.py
|
qqaatw/LibJciHitachi
|
b1ff14a5d8e9e64c23085ee9def5df202f1aff86
|
[
"Apache-2.0"
] | 14
|
2021-07-05T18:24:11.000Z
|
2022-03-29T18:15:53.000Z
|
example.py
|
qqaatw/LibJciHitachi
|
b1ff14a5d8e9e64c23085ee9def5df202f1aff86
|
[
"Apache-2.0"
] | 3
|
2022-01-12T15:13:10.000Z
|
2022-01-18T04:42:59.000Z
|
example.py
|
qqaatw/LibJciHitachi
|
b1ff14a5d8e9e64c23085ee9def5df202f1aff86
|
[
"Apache-2.0"
] | 2
|
2021-07-05T15:45:23.000Z
|
2021-09-11T08:53:13.000Z
|
from JciHitachi.api import JciHitachiAPI
# Fill out your Jci Hitachi email address, password, and device name.
EMAIL = "yourname@yourdomain.com"
PASSWORD = "password"
DEVICENAME = "living room"
# Login
api = JciHitachiAPI(EMAIL, PASSWORD, DEVICENAME)
api.login()
# Check device status
device_status = api.get_status()
print(device_status[DEVICENAME].status)
# Set device status
# For available command names and values, please refer to
# Air conditioner: status.py->JciHitachiAC
# Dehumidifier: status.py->JciHitachiDH
if api.set_status('target_temp', 27, DEVICENAME):
print('Success')
else:
print('Failed')
# Check the updated device status
api.refresh_status()
device_status = api.get_status()
print(device_status[DEVICENAME].status)
| 25.896552
| 69
| 0.768309
|
d0e1f0114f6abd1a0c34a3205bab245f4f2f6961
| 2,726
|
py
|
Python
|
tensorflow/compiler/mlir/hlo/tests/lit.cfg.py
|
kim-com/tensorflow
|
4301e3f34b8da528c58bdafe05cd66c8a55fce9e
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/compiler/mlir/hlo/tests/lit.cfg.py
|
kim-com/tensorflow
|
4301e3f34b8da528c58bdafe05cd66c8a55fce9e
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/compiler/mlir/hlo/tests/lit.cfg.py
|
kim-com/tensorflow
|
4301e3f34b8da528c58bdafe05cd66c8a55fce9e
|
[
"Apache-2.0"
] | null | null | null |
"""Lit configuration to drive test in this repo."""
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- Python -*-
# pylint: disable=undefined-variable
import os
import lit.formats
from lit.llvm import llvm_config
from lit.llvm.subst import ToolSubst
import lit.util
# Configuration file for the 'lit' test runner.
# name: The name of this test suite.
config.name = 'MLIR_HLO_OPT'
config.test_format = lit.formats.ShTest(not llvm_config.use_lit_shell)
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.mlir', '.mlir.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
# test_exec_root: The root path where tests should be run.
config.test_exec_root = os.path.join(config.mlir_hlo_obj_root, 'test')
config.substitutions.append(('%PATH%', config.environment['PATH']))
config.substitutions.append(('%shlibext', config.llvm_shlib_ext))
llvm_config.with_system_environment(['HOME', 'INCLUDE', 'LIB', 'TMP', 'TEMP'])
llvm_config.use_default_substitutions()
# excludes: A list of directories to exclude from the testsuite. The 'Inputs'
# subdirectories contain auxiliary inputs for various tests in their parent
# directories.
config.excludes = [
'Inputs', 'Examples', 'CMakeLists.txt', 'README.txt', 'LICENSE.txt'
]
# Disabled for now, see b/223274383.
config.excludes.append('lhlo-legalize-select-and-scatter.mlir')
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
# test_exec_root: The root path where tests should be run.
config.test_exec_root = os.path.join(config.mlir_hlo_obj_root, 'tests')
config.mlir_hlo_tools_dir = os.path.join(config.mlir_hlo_obj_root, 'bin')
# Tweak the PATH to include the tools dir.
llvm_config.with_environment('PATH', config.llvm_tools_dir, append_path=True)
tool_dirs = [
config.mlir_hlo_tools_dir,
config.llvm_tools_dir,
]
tools = [
'mlir-hlo-opt',
'mlir-cpu-runner',
ToolSubst(
'%mlir_runner_utils_dir',
config.mlir_runner_utils_dir,
unresolved='ignore'),
]
llvm_config.add_tool_substitutions(tools, tool_dirs)
| 33.654321
| 78
| 0.757153
|
96b1aeb355bd26712edf903b18e001f844857925
| 1,269
|
pyw
|
Python
|
build/nsis/HH.launch.pyw
|
armohamm/HH---POS-Accounting-and-ERP-Software
|
4bcb7be39e24ed648451d5ad2447fd0840820856
|
[
"MIT"
] | 27
|
2020-04-13T09:05:51.000Z
|
2022-02-02T22:43:30.000Z
|
build/nsis/HH.launch.pyw
|
armohamm/HH---POS-Accounting-and-ERP-Software
|
4bcb7be39e24ed648451d5ad2447fd0840820856
|
[
"MIT"
] | 5
|
2018-07-20T13:17:23.000Z
|
2019-10-23T12:42:39.000Z
|
build/nsis/HH.launch.pyw
|
armohamm/HH---POS-Accounting-and-ERP-Software
|
4bcb7be39e24ed648451d5ad2447fd0840820856
|
[
"MIT"
] | 19
|
2018-07-20T18:21:17.000Z
|
2020-03-03T22:43:29.000Z
|
#!python3.6-32
import sys, os
scriptdir, script = os.path.split(__file__)
pkgdir = os.path.join(scriptdir, 'pkgs')
sys.path.insert(0, pkgdir)
os.environ['PYTHONPATH'] = pkgdir + os.pathsep + os.environ.get('PYTHONPATH', '')
# APPDATA should always be set, but in case it isn't, try user home
# If none of APPDATA, HOME, USERPROFILE or HOMEPATH are set, this will fail.
appdata = os.environ.get('APPDATA', None) or os.path.expanduser('~')
if 'pythonw' in sys.executable:
# Running with no console - send all stdstream output to a file.
kw = {'errors': 'replace'} if (sys.version_info[0] >= 3) else {}
sys.stdout = sys.stderr = open(os.path.join(appdata, script+'.log'), 'w', **kw)
else:
# In a console. But if the console was started just for this program, it
# will close as soon as we exit, so write the traceback to a file as well.
def excepthook(etype, value, tb):
"Write unhandled exceptions to a file and to stderr."
import traceback
traceback.print_exception(etype, value, tb)
with open(os.path.join(appdata, script+'.log'), 'w') as f:
traceback.print_exception(etype, value, tb, file=f)
sys.excepthook = excepthook
if __name__ == '__main__':
from hh import __main__
__main__()
| 39.65625
| 83
| 0.676123
|
54c7709064b573d1340acb2d735e62fe010ff368
| 4,458
|
py
|
Python
|
match.py
|
dykuang/Raman-Spectral-Matching
|
a0a30955bc29c9c124513417b91d68de3c28c1ef
|
[
"MIT"
] | 1
|
2019-09-10T03:14:49.000Z
|
2019-09-10T03:14:49.000Z
|
match.py
|
dykuang/Raman-Spectral-Matching
|
a0a30955bc29c9c124513417b91d68de3c28c1ef
|
[
"MIT"
] | null | null | null |
match.py
|
dykuang/Raman-Spectral-Matching
|
a0a30955bc29c9c124513417b91d68de3c28c1ef
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 22 16:05:16 2018
@author: dykua
This script does the actual matching
"""
import numpy as np
from DataExtraction import extract_data
from Util import preprocess, smooth1D, find_top_peak_loc, multi_gaussian
lib_data = extract_data('../Li-Lin - data/Spectral Data')
test_data = extract_data('../Li-Lin - data/Test Data/Caffeine Labspec Run Used')
#test_data = extract_data('../Li-Lin - data/Test Data/Ephedrine HCL Labspec Run Used')
#test_data = extract_data('../Li-Lin - data/Test Data/Acetaminophen Labspec Run Used')
#test_data = extract_data('../Li-Lin - data/Test Data/Oxycocet Labspec Run Used')
#test_data = extract_data('../Li-Lin - data/Test Data/Benzene derivatives (baseline corrected)')
#test_data = extract_data('../Li-Lin - data/Test Data/Benzene Chloroform')
lb = 200
up = 1800
resolution = 1
sd_prior = 5
filter_width = np.arange(1,20)
peak_num = 6
#------------------------------------------------------------------------------
# Preprocess
#------------------------------------------------------------------------------
test_data = preprocess(test_data, lb, up, resolution)
lib_data = preprocess(lib_data, lb, up, resolution)
name_in_test = list(test_data.keys())
name_in_lib = list(lib_data.keys())
test_ind = 0
test_sample = test_data[name_in_test[test_ind]]
print('test_samle name: {}'.format(name_in_test[test_ind]))
#------------------------------------------------------------------------------
# Get signature from test_sample
#------------------------------------------------------------------------------
test_sample[1,:] = smooth1D(test_sample[1,:])
loc, height = find_top_peak_loc(test_sample, peak_num, filter_width)
sd = sd_prior*np.ones_like(loc)
mg = multi_gaussian(test_sample[0,:], loc, height, sd, use_height = False)
#------------------------------------------------------------------------------
# Get signature also for lib data?
#------------------------------------------------------------------------------
#for key, value in lib_data.items():
# value[1,:] = smooth1D(value[1,:])
# loc, height = find_top_peak_loc(value, peak_num, filter_width)
# lib_data[key]= np.vstack([value[0,:], multi_gaussian(value[0,:], loc, height,
# sd, use_height = False)])
#------------------------------------------------------------------------------
# Calculate matching score
#------------------------------------------------------------------------------
match_score = np.zeros(len(name_in_lib))
from metrics import Dot
for i, name in enumerate(name_in_lib):
match_score[i] = Dot(mg, lib_data[name][1,:])
candidates_order = np.argsort(match_score)[::-1]
for i, c in enumerate(candidates_order[:10]):
print("The top {0} selection are {1} with score: {2:4f}.".
format(i+1, name_in_lib[c], match_score[c]))
#==============================================================================
# The above match works pretty good and fast, tries the histogram method below
# The result is not very good...
#==============================================================================
#from Util import to_hist
#peak_num_hist = 120
#bins = 40
#window_len = 11
#half_window_len = int(window_len/2)
#
#for key, value in test_data.items():
# mask = (value[0,:]>lb) & (value[0,:] < up)
# test_data[key] = np.stack((value[0, mask], smooth1D(value[1,mask]/np.linalg.norm(value[1, mask]), window_len)))
#
#for key, value in lib_data.items():
# mask = (value[0,:]>lb) & (value[0,:] < up)
# lib_data[key] = np.stack((value[0, mask], smooth1D(value[1,mask]/np.linalg.norm(value[1, mask]), window_len)))
#
#test_ind = 6
#test_sample = test_data[name_in_test[test_ind]]
#print('test_samle name: {}'.format(name_in_test[test_ind]))
#test_hist = to_hist(test_sample, peak_num_hist, filter_width, bins)
#
#for key, value in lib_data.items():
## value[1,:] = smooth1D(value[1,:])
# lib_data[key]= to_hist(value, peak_num_hist, filter_width, bins)
#
#match_score = np.zeros(len(name_in_lib))
#from metrics import JSD
#for i, name in enumerate(name_in_lib):
# match_score[i] = JSD(test_hist, lib_data[name])
#
#candidates_order = np.argsort(match_score)
#for i, c in enumerate(candidates_order[:]):
# print("The top {0} selection are {1} with distance: {2:4f}.".
# format(i+1, name_in_lib[c], match_score[c]))
| 35.664
| 120
| 0.561463
|
29f0e94e584bc234e191f2a31828f2915cc1bfbd
| 45
|
py
|
Python
|
solutions/12/src/cormorant/tests/__init__.py
|
larsbratholm/champs_kaggle
|
fda4f213d02fd5e0138a86c52b4140c9f94fec6e
|
[
"MIT"
] | 9
|
2020-08-14T23:11:16.000Z
|
2021-08-09T16:23:43.000Z
|
solutions/12/src/cormorant/tests/__init__.py
|
larsbratholm/champs_kaggle
|
fda4f213d02fd5e0138a86c52b4140c9f94fec6e
|
[
"MIT"
] | 1
|
2020-11-19T09:29:14.000Z
|
2020-11-19T09:29:14.000Z
|
solutions/12/src/cormorant/tests/__init__.py
|
larsbratholm/champs_kaggle
|
fda4f213d02fd5e0138a86c52b4140c9f94fec6e
|
[
"MIT"
] | 2
|
2020-09-09T02:53:57.000Z
|
2020-12-06T08:20:52.000Z
|
from .cormorant_tests import cormorant_tests
| 22.5
| 44
| 0.888889
|
682dcb0e3796fb539739294e9ac15d9f1c2a43f6
| 7,278
|
py
|
Python
|
hoki/tests/test_spec.py
|
HeloiseS/hoki
|
31cecc4c4ee2bbb617e1451df1cab896f485d801
|
[
"BSD-3-Clause"
] | 40
|
2019-09-18T13:04:09.000Z
|
2022-03-08T12:45:35.000Z
|
hoki/tests/test_spec.py
|
HeloiseS/hoki
|
31cecc4c4ee2bbb617e1451df1cab896f485d801
|
[
"BSD-3-Clause"
] | 60
|
2019-10-26T01:11:52.000Z
|
2022-02-10T06:52:08.000Z
|
hoki/tests/test_spec.py
|
HeloiseS/hoki
|
31cecc4c4ee2bbb617e1451df1cab896f485d801
|
[
"BSD-3-Clause"
] | 8
|
2020-03-04T23:35:01.000Z
|
2021-05-23T16:32:54.000Z
|
"""
Tests for the `hoki.spec` module.
Authors: Martin Glatzle
"""
from unittest import TestCase
from hoki import spec
import numpy as np
class TestBinwiseTrapz(TestCase):
def test_std(self):
x = np.linspace(1, 10)
y = np.ones_like(x).reshape((1, -1))
bin_edges = np.array([4, 5])
y_new = spec._binwise_trapz_sorted(x, y, bin_edges)
self.assertAlmostEqual(
1.0, y_new[0, 0]
)
return
def test_edge_cases(self):
lam = np.linspace(1, 10)
L_lam = np.ones_like(lam).reshape(1, -1)
# bin edges at ends of lam
bin_edges = np.array([lam[0], (lam[0]+lam[-1])/2, lam[-1]])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
self.assertAlmostEqual(
1.0, L_bin_int[0, 0]/np.diff(bin_edges)[0]
)
self.assertAlmostEqual(
1.0, L_bin_int[0, 1]/np.diff(bin_edges)[1]
)
# only one bin
bin_edges = np.array([lam[0], lam[-1]])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
self.assertAlmostEqual(
1.0, L_bin_int[0, 0]/np.diff(bin_edges)[0]
)
# one bin smaller than resolution
bin_edges = np.array([lam[0], (lam[0]+lam[1])/2])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
self.assertAlmostEqual(
1.0, L_bin_int[0, 0]/np.diff(bin_edges)[0]
)
# one bin covering neighbouring resolution elements
bin_edges = np.array([lam[0], (lam[1]+lam[2])/2])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
self.assertAlmostEqual(
1.0, L_bin_int[0, 0]/np.diff(bin_edges)[0]
)
return
def test_L_conservation(self):
lam = np.linspace(1, 5, num=50)
L_lam = np.random.random((4, len(lam)))
bin_edges = np.array([lam[0], lam[-1]])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
for j, row in enumerate(L_lam):
self.assertAlmostEqual(
np.trapz(row, x=lam), L_bin_int[j, 0]
)
return
def test_L_conservation_2(self):
lam = np.linspace(1, 5, num=50)
L_lam = np.random.random((1, len(lam)))
bin_edges = np.array([lam[0], (lam[0]+lam[-1])/2, lam[-1]])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
self.assertAlmostEqual(
np.trapz(L_lam[0, :], x=lam),
np.sum(L_bin_int)
)
return
def test_L_conservation_3(self):
lam = np.linspace(1, 5, num=50)
L_lam = np.random.random((1, len(lam)))
bin_edges = np.array([lam[4], lam[20]])
L_bin_int = spec._binwise_trapz_sorted(lam, L_lam, bin_edges)
self.assertAlmostEqual(
np.trapz(L_lam[0, 4:21], x=lam[4:21]),
L_bin_int[0, 0]
)
return
class TestBinSpectra(TestCase):
def test_std(self):
wl = np.linspace(1, 1000, num=2000)
spectra = np.random.random((10, len(wl)))
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra
)
self.assertEqual(
(spectra.shape[0], 10), spectra_new.shape
)
self.assertTrue(
np.all(spectra_new >= 0)
)
bin_edges = np.linspace(100, 500, num=500)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra, bin_edges
)
self.assertTrue(
np.allclose(0.5*(bin_edges[1:]+bin_edges[:-1]), wl_new)
)
self.assertEqual(
(spectra.shape[0], len(bin_edges)-1), spectra_new.shape
)
self.assertTrue(
np.all(spectra_new >= 0)
)
return
def test_L_conservation(self):
wl = np.linspace(1, 1000, num=20000, endpoint=True)
spectra = np.random.random((20, len(wl)))
bin_edges = np.linspace(1, 1000, num=10, endpoint=True)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra, bin_edges
)
self.assertTrue(
np.allclose(
np.trapz(spectra, x=wl, axis=1),
np.sum(spectra_new*np.diff(bin_edges), axis=1)
)
)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra
)
self.assertTrue(
np.allclose(
np.trapz(spectra, x=wl, axis=1),
np.sum(spectra_new, axis=1)*(wl[-1]-wl[0])/10
)
)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra, 1
)
self.assertTrue(
np.allclose(
np.trapz(spectra, x=wl, axis=1),
np.sum(spectra_new, axis=1)*(wl[-1]-wl[0])
)
)
return
def test_L_conservation_desc(self):
wl = np.linspace(1000, 1, num=20000, endpoint=True)
spectra = np.random.random((20, len(wl)))
bin_edges = np.linspace(1000, 1, num=10, endpoint=True)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra, bin_edges
)
self.assertTrue(
np.allclose(
np.trapz(spectra, x=wl, axis=1),
np.sum(spectra_new*np.diff(bin_edges), axis=1)
)
)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra
)
self.assertTrue(
np.allclose(
np.trapz(spectra, x=wl, axis=1),
np.sum(spectra_new, axis=1)*(wl[-1]-wl[0])/10
)
)
wl_new, spectra_new = spec.bin_luminosity(
wl, spectra, 1
)
self.assertTrue(
np.allclose(
np.trapz(spectra, x=wl, axis=1),
np.sum(spectra_new, axis=1)*(wl[-1]-wl[0])
)
)
return
def test_exceptions(self):
# wrong dimensionality
wl = np.linspace(1, 100)
sed = np.empty((len(wl)))
bin_edges = np.array([0.5, 20])
with self.assertRaises(ValueError):
spec.bin_luminosity(wl, sed, bin_edges)
# incompatible shapes
wl = np.linspace(1, 100)
sed = np.empty((1, len(wl)-2))
bin_edges = np.array([0.5, 20])
with self.assertRaises(ValueError):
spec.bin_luminosity(wl, sed, bin_edges)
# identical values in wl
wl = np.hstack((
np.linspace(1, 100),
np.linspace(1, 100)
))
sed = np.empty((1, len(wl)))
bin_edges = np.array([0.5, 20])
with self.assertRaises(ValueError):
spec.bin_luminosity(wl, sed, bin_edges)
# bins outside range
wl = np.linspace(1, 100)
sed = np.empty((1, len(wl)))
bin_edges = np.array([0.5, 20])
with self.assertRaises(ValueError):
spec.bin_luminosity(wl, sed, bin_edges)
bin_edges = np.array([2, 200])
with self.assertRaises(ValueError):
spec.bin_luminosity(wl, sed, bin_edges)
wl = np.linspace(100, 1)
sed = np.empty((1, len(wl)))
bin_edges = np.array([0.5, 20])
with self.assertRaises(ValueError):
spec.bin_luminosity(wl, sed, bin_edges)
return
| 30.451883
| 69
| 0.537648
|
b51eea7ae0503c32740a8a2d70dfa15273cae35c
| 14,805
|
py
|
Python
|
SNCPython/EnvSensorv2/EnvSensorv2.py
|
rt-ispace/SNC
|
890acbbe336bb0429f47fa7bed499a46215aac64
|
[
"MIT"
] | 1
|
2021-08-08T05:40:52.000Z
|
2021-08-08T05:40:52.000Z
|
SNCPython/EnvSensorv2/EnvSensorv2.py
|
rt-ispace/SNC
|
890acbbe336bb0429f47fa7bed499a46215aac64
|
[
"MIT"
] | null | null | null |
SNCPython/EnvSensorv2/EnvSensorv2.py
|
rt-ispace/SNC
|
890acbbe336bb0429f47fa7bed499a46215aac64
|
[
"MIT"
] | 1
|
2021-08-08T05:40:56.000Z
|
2021-08-08T05:40:56.000Z
|
#!/usr/bin/python
'''
////////////////////////////////////////////////////////////////////////////
//
// This file is part of SNC
//
// Copyright (c) 2014-2021, Richard Barnett
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
// Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
BME680 code via Pimoroni:
MIT License
Copyright (c) 2018 Pimoroni Ltd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
# camera parameters - change as required
CAMERA_ENABLE = False
# If CAMERA_USB is true, expects a webcam
# If False, expects a Pi camera
CAMERA_USB = True
CAMERA_INDEX = 0
CAMERA_WIDTH = 1280
CAMERA_HEIGHT = 720
CAMERA_RATE = 10
import sys
import SNCPython
import SNCPythonPy
import time
import io
import picamera
import json
# import the sensor drivers
sys.path.append('../SensorDrivers')
import SensorJSON
import bme680
import RT_TSL2561
import RT_TSL2591
from RT_I2C import RT_I2C
# the sensor update interval (in seconds). Change as required.
SENSOR_UPDATE_INTERVAL = 0.5
# The set of sensors. Choose which ones are required or use NullSensor if no physical sensor
# Multi sensor objects (such as BMP180 for temp and pressure) can be reused
light = None
if RT_I2C.checkI2CAddress(RT_TSL2591.TSL2591_ADDR):
light = RT_TSL2591.RT_TSL2591()
if RT_I2C.checkI2CAddress(RT_TSL2561.TSL2561_ADDRESS_ADDR_FLOAT):
light = RT_TSL2561.RT_TSL2561()
if light != None:
light.enable()
bmeSensor = bme680.BME680(0x77)
bmeSensor.set_humidity_oversample(bme680.OS_2X)
bmeSensor.set_pressure_oversample(bme680.OS_4X)
bmeSensor.set_temperature_oversample(bme680.OS_8X)
bmeSensor.set_filter(bme680.FILTER_SIZE_3)
bmeSensor.set_gas_status(bme680.ENABLE_GAS_MEAS)
bmeSensor.set_gas_heater_temperature(320)
bmeSensor.set_gas_heater_duration(150)
bmeSensor.select_gas_heater_profile(0)
start_time = time.time()
curr_time = time.time()
burn_in_time = 300
burningIn = True
gas_baseline = 0
hum_baseline = 0
burn_in_data = []
air_quality_score = -1
'''
------------------------------------------------------------
User interface function
'''
def processUserInput():
''' Process any user console input that might have arrived.
Returns False if need to exit. '''
# see if in daemon mode. if so, just return
if SNCPython.checkDaemonMode():
return True
c = SNCPython.getConsoleInput()
if (c != None):
print("\n")
sc = chr(c)
if (sc == "x"):
return False
elif (sc == "s"):
print("Status:")
if (SNCPython.isConnected()):
print("SNCLink connected")
else:
print("SNCLink closed")
print("Temperature: %.2fC" % bmeSensor.data.temperature)
print("Pressure: %.2fhPa" % bmeSensor.data.pressure)
print("Humidity: %.2f%%RH" % bmeSensor.data.humidity)
if light != None:
print("Light: %.2flux" % light.readLight())
if burningIn:
print("Burning in gas: {0} Ohms, {1} seconds left".format(bmeSensor.data.gas_resistance, burn_in_time - (curr_time - start_time)))
else:
print("Gas baseline: {0} Ohms, humidity baseline: {1:.2f} %RH".format(gas_baseline, hum_baseline))
print("Gas: {0:.2f} Ohms, humidity: {1:.2f} %RH,air quality: {2:.2f}".format(bmeSensor.data.gas_resistance, bmeSensor.data.humidity, air_quality_score))
elif (sc == 'h'):
print("Available commands are:")
print(" s - display status")
print(" x - exit")
print("\nEnter command: "),
sys.stdout.flush()
return True
'''
------------------------------------------------------------
Sensor functions
'''
# global to maintain last sensor read time
lastSensorReadTime = time.time()
# global to maintain the sensor service port
sensorPort = -1
def readSensors():
global lastSensorReadTime
global sensorPort
global bmeSensor, burningIn, burn_in_data, curr_time, burn_in_time, start_time, air_quality_score
global gas_baseline, hum_baseline, hum_offset, hum_weighting, hum_score
if ((time.time() - lastSensorReadTime) < SENSOR_UPDATE_INTERVAL):
return
# time send send the sensor readings
lastSensorReadTime = time.time()
if bmeSensor.get_sensor_data():
if burningIn:
if curr_time - start_time < burn_in_time:
curr_time = time.time()
if bmeSensor.data.heat_stable:
gas = bmeSensor.data.gas_resistance
burn_in_data.append(gas)
else:
burningIn = False
gas_baseline = sum(burn_in_data[-50:]) / 50.0
# Set the humidity baseline to 40%, an optimal indoor humidity.
hum_baseline = 40.0
# This sets the balance between humidity and gas reading in the
# calculation of air_quality_score (25:75, humidity:gas)
hum_weighting = 0.25
else:
if bmeSensor.data.heat_stable:
gas = bmeSensor.data.gas_resistance
gas_offset = gas_baseline - gas
hum = bmeSensor.data.humidity
hum_offset = hum - hum_baseline
# Calculate hum_score as the distance from the hum_baseline.
if hum_offset > 0:
hum_score = (100 - hum_baseline - hum_offset) / (100 - hum_baseline) * (hum_weighting * 100)
else:
hum_score = (hum_baseline + hum_offset) / hum_baseline * (hum_weighting * 100)
# Calculate gas_score as the distance from the gas_baseline.
if gas_offset > 0:
gas_score = (gas / gas_baseline) * (100 - (hum_weighting * 100))
else:
gas_score = 100 - (hum_weighting * 100)
# Calculate air_quality_score.
air_quality_score = hum_score + gas_score
sensorDict = {}
sensorDict[SensorJSON.TIMESTAMP] = time.time()
sensorDict[SensorJSON.TEMPERATURE_DATA] = bmeSensor.data.temperature
sensorDict[SensorJSON.PRESSURE_DATA] = bmeSensor.data.pressure
sensorDict[SensorJSON.HUMIDITY_DATA] = bmeSensor.data.humidity
sensorDict[SensorJSON.AIRQUALITY_DATA] = air_quality_score
if light != None:
sensorDict[SensorJSON.LIGHT_DATA] = light.readLight()
if (SNCPython.isServiceActive(sensorPort) and SNCPython.isClearToSend(sensorPort)):
SNCPython.sendSensorData(sensorPort, json.dumps(sensorDict))
'''
------------------------------------------------------------
USB camera functions and main loop
'''
def USBCameraLoop():
''' This is the main loop when the USB camera is enabled. '''
# Open the camera device
if (not SNCPython.vidCapOpen(CAMERA_INDEX, CAMERA_WIDTH, CAMERA_HEIGHT, CAMERA_RATE)):
print("Failed to open USB camera")
SNCPython.stop()
sys.exit()
# Activate the video stream
videoPort = SNCPython.addMulticastSource(SNCPythonPy.SNC_STREAMNAME_AVMUX)
if (videoPort == -1):
print("Failed to activate video service")
SNCPython.stop()
sys.exit()
while True:
# try to get a frame from the USB camera
ret, frame, jpeg, width, height, rate = SNCPython.vidCapGetFrame(CAMERA_INDEX)
if (ret):
# check if it can be sent on the SNCLink
if (SNCPython.isServiceActive(videoPort) and SNCPython.isClearToSend(videoPort)):
# send the frame
SNCPython.setVideoParams(width, height, rate)
pcm = str("")
if (jpeg):
SNCPython.sendJpegAVData(videoPort, frame, pcm)
else:
SNCPython.sendAVData(videoPort, frame, pcm)
# check if anything from the sensors
readSensors()
# handle user input
if (not processUserInput()):
break;
#give other things a chance
time.sleep(0.02)
# exiting
SNCPython.removeService(videoPort)
'''
------------------------------------------------------------
pi camera functions and main loop
'''
# this is used to track when we have a new frame
piCameraLastFrameIndex = -1
def piCameraSendFrameHelper(stream, frame, videoPort):
''' do the actual frame processing '''
global piCameraLastFrameIndex
# record index of new frame
piCameraLastFrameIndex = frame.index
# get the frame data and display it
stream.seek(frame.position)
image = stream.read(frame.frame_size)
# now check if it can be sent on the SNCLink
if (SNCPython.isServiceActive(videoPort) and SNCPython.isClearToSend(videoPort)):
# send the frame
pcm = str("")
SNCPython.sendJpegAVData(videoPort, image, pcm)
def piCameraSendFrame(stream, videoPort):
''' sendFrame checks the circular buffer to see if there is a new frame
and send it on the SNCLink if that's possible '''
global piCameraLastFrameIndex
with stream.lock:
if (CAMERA_RATE > 10):
for frame in stream.frames:
if (frame.index > piCameraLastFrameIndex):
piCameraSendFrameHelper(stream, frame, videoPort)
else:
# skip to last frame in iteration
frame = None
for frame in stream.frames:
pass
if (frame is None):
return
# check to see if this is new
if (frame.index == piCameraLastFrameIndex):
return
piCameraSendFrameHelper(stream, frame, videoPort)
def piCameraLoop():
''' This is the main loop when the pi camera is enabled. '''
# Activate the video stream
videoPort = SNCPython.addMulticastSource(SNCPythonPy.SNC_STREAMNAME_AVMUX)
if (videoPort == -1):
print("Failed to activate video service")
SNCPython.stop()
sys.exit()
# Tell the library what video params we are using
SNCPython.setVideoParams(CAMERA_WIDTH, CAMERA_HEIGHT, CAMERA_RATE)
with picamera.PiCamera(CAMERA_INDEX) as camera:
camera.resolution = (CAMERA_WIDTH, CAMERA_HEIGHT)
camera.framerate = (CAMERA_RATE, 1)
# need enough buffering to overcome any latency
stream = picamera.PiCameraCircularIO(camera, seconds = 1)
# start recoding in mjpeg mode
camera.start_recording(stream, format = 'mjpeg')
try:
while(True):
# process any new frames
camera.wait_recording(0)
piCameraSendFrame(stream, videoPort)
# see if anythng new from the sensors
readSensors()
# handle user input
if (not processUserInput()):
break;
#give other things a chance
time.sleep(0.02)
finally:
camera.stop_recording()
SNCPython.removeService(videoPort)
'''
------------------------------------------------------------
No camera main loop
'''
def noCameraLoop():
''' This is the main loop when no camera is running. '''
while True:
# see if anything from the sensors
readSensors()
# handle any user input
if (not processUserInput()):
break;
# give other things a chance
time.sleep(0.02)
'''
------------------------------------------------------------
Main code
'''
time.sleep(30)
# start SNCPython running
SNCPython.start("EnvSensorv2", sys.argv, False)
# this delay is necessary to allow Qt startup to complete
time.sleep(2)
if not SNCPython.checkDaemonMode():
# put console into single character mode
SNCPython.startConsoleInput()
print("EnvSensorv2 starting...")
print("Enter command: "),
sys.stdout.flush()
# set the title if in GUI mode
SNCPython.setWindowTitle(SNCPython.getAppName() + " camera stream")
sensorPort = SNCPython.addMulticastSource(SNCPythonPy.SNC_STREAMNAME_SENSOR)
if (sensorPort == -1):
print("Failed to activate sensor service")
SNCPython.stop()
sys.exit()
if CAMERA_ENABLE:
if CAMERA_USB:
USBCameraLoop()
else:
piCameraLoop()
else:
noCameraLoop()
# Exiting so clean everything up.
SNCPython.stop()
if not SNCPython.checkDaemonMode():
print("EnvSensorv2 exiting")
| 32.754425
| 168
| 0.620196
|
caec23ae5d577f27694a1c73f9e0c6082f6864a3
| 5,383
|
py
|
Python
|
envs/fetch/custom.py
|
timofriedl/multicam-ihgg
|
f52659a1bb38369b6e2ac62261cc9c65fbc3ca80
|
[
"MIT"
] | null | null | null |
envs/fetch/custom.py
|
timofriedl/multicam-ihgg
|
f52659a1bb38369b6e2ac62261cc9c65fbc3ca80
|
[
"MIT"
] | null | null | null |
envs/fetch/custom.py
|
timofriedl/multicam-ihgg
|
f52659a1bb38369b6e2ac62261cc9c65fbc3ca80
|
[
"MIT"
] | null | null | null |
import numpy as np
import gym
from utils.os_utils import remove_color
class CustomGoalEnv():
def __init__(self, args):
self.args = args
self.env = gym.make(args.env)
self.np_random = self.env.env.np_random
self.distance_threshold = self.env.env.distance_threshold
self.action_space = self.env.action_space
self.observation_space = self.env.observation_space
self.max_episode_steps = self.env._max_episode_steps
self.fixed_obj = False
self.has_object = self.env.env.has_object
self.obj_range = self.env.env.obj_range
# self.target_range = self.env.env.target_range
self.target_offset = self.env.env.target_offset
self.target_in_the_air = self.env.env.target_in_the_air
if self.has_object: self.height_offset = self.env.env.height_offset
self.render = self.env.render
self.get_obs = self.env.env._get_obs
self.reset_sim = self.env.env._reset_sim
self.reset_ep()
self.env_info = {
'Rewards': self.process_info_rewards, # episode cumulative rewards
'Distance': self.process_info_distance, # distance in the last step
'Success@green': self.process_info_success # is_success in the last step
}
self.env.reset()
self.fixed_obj = True
def compute_reward(self, achieved, goal):
# achieved is a tuple of two goals
return self.env.env.compute_reward(achieved[0], goal, None)
# Original
# dis = goal_distance(achieved[0], goal)
# return -1.0 if dis > self.distance_threshold else 0.0
def compute_distance(self, achieved, goal):
return np.sqrt(np.sum(np.square(achieved - goal)))
def process_info_rewards(self, obs, reward, info):
self.rewards += reward
return self.rewards
def process_info_distance(self, obs, reward, info):
return self.compute_distance(obs['achieved_goal'], obs['desired_goal'])
def process_info_success(self, obs, reward, info):
return info['is_success']
def process_info(self, obs, reward, info):
return {
remove_color(key): value_func(obs, reward, info)
for key, value_func in self.env_info.items()
}
def step(self, action):
# imaginary infinity horizon (without done signal)
obs, reward, done, info = self.env.step(action)
info = self.process_info(obs, reward, info)
reward = self.compute_reward((obs['achieved_goal'], self.last_obs['achieved_goal']),
obs['desired_goal']) # TODO: why the heck second argument if it is then ignored??
self.last_obs = obs.copy()
return obs, reward, False, info
def reset_ep(self):
self.rewards = 0.0
def reset(self):
self.reset_ep()
self.last_obs = (self.env.reset()).copy()
return self.last_obs.copy()
@property
def sim(self):
return self.env.env.sim
@sim.setter
def sim(self, new_sim):
self.env.env.sim = new_sim
@property
def initial_state(self):
return self.env.env.initial_state
@property
def initial_gripper_xpos(self):
return self.env.env.initial_gripper_xpos.copy()
@property
def goal(self):
return self.env.env.goal.copy()
@goal.setter
def goal(self, value):
self.env.env.goal = value.copy()
def generate_goal(self):
"""
if self.has_object:
goal = self.initial_gripper_xpos[:3] + self.target_offset
if self.args.env == 'FetchSlide-v1':
goal[0] += self.target_range * 0.5
goal[1] += np.random.uniform(-self.target_range, self.target_range) * 0.5
else:
goal[0] += np.random.uniform(-self.target_range, self.target_range)
goal[1] += self.target_range
# goal[1] += np.random.uniform(-self.target_range, self.target_range) # TODO: changed
goal[2] = self.height_offset + int(self.target_in_the_air) * 0.45
else:
goal = self.initial_gripper_xpos[:3] + np.array(
[np.random.uniform(-self.target_range, self.target_range), self.target_range, self.target_range])
return goal.copy()
"""
return self.env.env._sample_goal()
def reset(self):
self.reset_ep()
self.env.env._reset_sim()
"""
self.sim.set_state(self.initial_state)
if self.has_object:
object_xpos = self.initial_gripper_xpos[:2].copy()
random_offset = np.random.uniform(-1, 1) * self.obj_range * self.args.init_offset
if self.args.env == 'FetchSlide-v1':
object_xpos -= np.array([self.obj_range * 0.5, random_offset])
else:
object_xpos -= np.array([random_offset, self.obj_range])
object_qpos = self.sim.data.get_joint_qpos('object0:joint')
assert object_qpos.shape == (7,)
object_qpos[:2] = object_xpos
self.sim.data.set_joint_qpos('object0:joint', object_qpos)
self.sim.forward()
"""
self.goal = self.generate_goal()
self.last_obs = (self.get_obs()).copy()
return self.get_obs()
def generate_goal(self):
return self.env.env._sample_goal()
| 35.886667
| 119
| 0.6188
|
d4818253078e2c27a917dd5a122d8306533522e1
| 1,416
|
py
|
Python
|
public/index.py
|
DudhaneShrey86/Shramik
|
47a0f6071775909287630d0562ff57e54ede2b43
|
[
"MIT"
] | null | null | null |
public/index.py
|
DudhaneShrey86/Shramik
|
47a0f6071775909287630d0562ff57e54ede2b43
|
[
"MIT"
] | null | null | null |
public/index.py
|
DudhaneShrey86/Shramik
|
47a0f6071775909287630d0562ff57e54ede2b43
|
[
"MIT"
] | null | null | null |
import sys
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestRegressor
from sklearn.tree import export_graphviz
import pydot
import json
features = pd.read_csv("dataset2.csv")
labels = np.array(features['hired'])
features= features.drop('hired', axis = 1)
feature_list = list(features.columns)
features = np.array(features)
train_features, test_features, train_labels, test_labels = train_test_split(features, labels, test_size = 0.25, random_state = 42)
rf = RandomForestRegressor(n_estimators = 1000, random_state = 42)
rf.fit(train_features, train_labels);
arr = json.loads(sys.argv[1])
predictions = rf.predict(arr)
print(predictions.tolist())
# errors = abs(predictions - test_labels)
# print('Mean Absolute Error:', round(np.mean(errors), 2), 'degrees.')
#
# tree = rf.estimators_[5]
# export_graphviz(tree, out_file = 'tree.dot', feature_names = feature_list, rounded = True, precision = 1)
# (graph, ) = pydot.graph_from_dot_file('tree.dot')
# graph.write_png('tree.png')
#
# importances = list(rf.feature_importances_)
# feature_importances = [(feature, round(importance, 2)) for feature, importance in zip(feature_list, importances)]
# feature_importances = sorted(feature_importances, key = lambda x: x[1], reverse = True)
# [print('Variable: {:20} Importance: {}'.format(*pair)) for pair in feature_importances];
| 35.4
| 130
| 0.759181
|
d382cd629d3cb6a44ca1d303d5ce0c525618641a
| 11,966
|
py
|
Python
|
python/ray/tune/automlboard/backend/collector.py
|
cumttang/ray
|
eb1e5fa2cf26233701ccbda3eb8a301ecd418d8c
|
[
"Apache-2.0"
] | 29
|
2019-05-18T12:18:34.000Z
|
2022-03-30T01:46:48.000Z
|
python/ray/tune/automlboard/backend/collector.py
|
collinswei/ray
|
2e30f7ba386e716bf80f019dcd473b67d83abb95
|
[
"Apache-2.0"
] | 8
|
2019-08-15T05:42:10.000Z
|
2021-05-21T09:41:15.000Z
|
python/ray/tune/automlboard/backend/collector.py
|
collinswei/ray
|
2e30f7ba386e716bf80f019dcd473b67d83abb95
|
[
"Apache-2.0"
] | 8
|
2019-07-15T22:36:20.000Z
|
2020-08-09T07:03:26.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
import time
from threading import Thread
from ray.tune.automlboard.common.exception import CollectorError
from ray.tune.automlboard.common.utils import parse_json, \
parse_multiple_json, timestamp2date
from ray.tune.automlboard.models.models import JobRecord, \
TrialRecord, ResultRecord
from ray.tune.result import DEFAULT_RESULTS_DIR, JOB_META_FILE, \
EXPR_PARARM_FILE, EXPR_RESULT_FILE, EXPR_META_FILE
class CollectorService(object):
"""Server implementation to monitor the log directory.
The service will save the information of job and
trials information in db.
"""
def __init__(self,
log_dir=DEFAULT_RESULTS_DIR,
reload_interval=30,
standalone=True,
log_level="INFO"):
"""Initialize the collector service.
Args:
log_dir (str): Directory of the logs about trials' information.
reload_interval (int): Sleep time period after each polling round.
standalone (boolean): The service will not stop and if True.
log_level (str): Level of logging.
"""
self.logger = self.init_logger(log_level)
self.standalone = standalone
self.collector = Collector(
reload_interval=reload_interval,
logdir=log_dir,
logger=self.logger)
def run(self):
"""Start the collector worker thread.
If running in standalone mode, the current thread will wait
until the collector thread ends.
"""
self.collector.start()
if self.standalone:
self.collector.join()
def stop(self):
"""Stop the collector worker thread."""
self.collector.stop()
@classmethod
def init_logger(cls, log_level):
"""Initialize logger settings."""
logger = logging.getLogger("AutoMLBoard")
handler = logging.StreamHandler()
formatter = logging.Formatter('[%(levelname)s %(asctime)s] '
'%(filename)s: %(lineno)d '
'%(message)s')
handler.setFormatter(formatter)
logger.setLevel(log_level)
logger.addHandler(handler)
return logger
class Collector(Thread):
"""Worker thread for collector service."""
def __init__(self, reload_interval, logdir, logger):
"""Initialize collector worker thread.
Args
reload_interval (int): Time period to sleep after each round
of polling.
logdir (str): Directory path to save the status information of
jobs and trials.
logger (Logger): Logger for collector thread.
"""
super(Collector, self).__init__()
self._is_finished = False
self._reload_interval = reload_interval
self._logdir = logdir
self._monitored_jobs = set()
self._monitored_trials = set()
self._result_offsets = {}
self.logger = logger
self.daemon = True
def run(self):
"""Run the main event loop for collector thread.
In each round the collector traverse the results log directory
and reload trial information from the status files.
"""
self._initialize()
self._do_collect()
while not self._is_finished:
time.sleep(self._reload_interval)
self._do_collect()
self.logger.info("Collector stopped.")
def stop(self):
"""Stop the main polling loop."""
self._is_finished = True
def _initialize(self):
"""Initialize collector worker thread, Log path will be checked first.
Records in DB backend will be cleared.
"""
if not os.path.exists(self._logdir):
raise CollectorError("Log directory %s not exists" % self._logdir)
self.logger.info("Collector started, taking %s as parent directory"
"for all job logs." % self._logdir)
# clear old records
JobRecord.objects.filter().delete()
TrialRecord.objects.filter().delete()
ResultRecord.objects.filter().delete()
def _do_collect(self):
sub_dirs = os.listdir(self._logdir)
job_names = filter(
lambda d: os.path.isdir(os.path.join(self._logdir, d)), sub_dirs)
for job_name in job_names:
self.sync_job_info(job_name)
def sync_job_info(self, job_name):
"""Load information of the job with the given job name.
1. Traverse each experiment sub-directory and sync information
for each trial.
2. Create or update the job information, together with the job
meta file.
Args:
job_name (str) name of the Tune experiment
"""
job_path = os.path.join(self._logdir, job_name)
if job_name not in self._monitored_jobs:
self._create_job_info(job_path)
self._monitored_jobs.add(job_name)
else:
self._update_job_info(job_path)
expr_dirs = filter(lambda d: os.path.isdir(os.path.join(job_path, d)),
os.listdir(job_path))
for expr_dir_name in expr_dirs:
self.sync_trial_info(job_path, expr_dir_name)
self._update_job_info(job_path)
def sync_trial_info(self, job_path, expr_dir_name):
"""Load information of the trial from the given experiment directory.
Create or update the trial information, together with the trial
meta file.
Args:
job_path(str)
expr_dir_name(str)
"""
expr_name = expr_dir_name[-8:]
expr_path = os.path.join(job_path, expr_dir_name)
if expr_name not in self._monitored_trials:
self._create_trial_info(expr_path)
self._monitored_trials.add(expr_name)
else:
self._update_trial_info(expr_path)
def _create_job_info(self, job_dir):
"""Create information for given job.
Meta file will be loaded if exists, and the job information will
be saved in db backend.
Args:
job_dir (str): Directory path of the job.
"""
meta = self._build_job_meta(job_dir)
self.logger.debug("Create job: %s" % meta)
job_record = JobRecord.from_json(meta)
job_record.save()
@classmethod
def _update_job_info(cls, job_dir):
"""Update information for given job.
Meta file will be loaded if exists, and the job information in
in db backend will be updated.
Args:
job_dir (str): Directory path of the job.
Return:
Updated dict of job meta info
"""
meta_file = os.path.join(job_dir, JOB_META_FILE)
meta = parse_json(meta_file)
if meta:
logging.debug("Update job info for %s" % meta["job_id"])
JobRecord.objects \
.filter(job_id=meta["job_id"]) \
.update(end_time=timestamp2date(meta["end_time"]))
def _create_trial_info(self, expr_dir):
"""Create information for given trial.
Meta file will be loaded if exists, and the trial information
will be saved in db backend.
Args:
expr_dir (str): Directory path of the experiment.
"""
meta = self._build_trial_meta(expr_dir)
self.logger.debug("Create trial for %s" % meta)
trial_record = TrialRecord.from_json(meta)
trial_record.save()
def _update_trial_info(self, expr_dir):
"""Update information for given trial.
Meta file will be loaded if exists, and the trial information
in db backend will be updated.
Args:
expr_dir(str)
"""
trial_id = expr_dir[-8:]
meta_file = os.path.join(expr_dir, EXPR_META_FILE)
meta = parse_json(meta_file)
result_file = os.path.join(expr_dir, EXPR_RESULT_FILE)
offset = self._result_offsets.get(trial_id, 0)
results, new_offset = parse_multiple_json(result_file, offset)
self._add_results(results, trial_id)
self._result_offsets[trial_id] = new_offset
if meta:
TrialRecord.objects \
.filter(trial_id=trial_id) \
.update(trial_status=meta["status"],
end_time=timestamp2date(meta.get("end_time", None)))
elif len(results) > 0:
metrics = {
"episode_reward": results[-1].get("episode_reward_mean", None),
"accuracy": results[-1].get("mean_accuracy", None),
"loss": results[-1].get("loss", None)
}
if results[-1].get("done"):
TrialRecord.objects \
.filter(trial_id=trial_id) \
.update(trial_status="TERMINATED",
end_time=results[-1].get("date", None),
metrics=str(metrics))
else:
TrialRecord.objects \
.filter(trial_id=trial_id) \
.update(metrics=str(metrics))
@classmethod
def _build_job_meta(cls, job_dir):
"""Build meta file for job.
Args:
job_dir (str): Directory path of the job.
Return:
A dict of job meta info.
"""
meta_file = os.path.join(job_dir, JOB_META_FILE)
meta = parse_json(meta_file)
if not meta:
job_name = job_dir.split('/')[-1]
user = os.environ.get("USER", None)
meta = {
"job_id": job_name,
"job_name": job_name,
"user": user,
"type": "ray",
"start_time": os.path.getctime(job_dir),
"end_time": None,
"best_trial_id": None,
}
if meta.get("start_time", None):
meta["start_time"] = timestamp2date(meta["start_time"])
return meta
@classmethod
def _build_trial_meta(cls, expr_dir):
"""Build meta file for trial.
Args:
expr_dir (str): Directory path of the experiment.
Return:
A dict of trial meta info.
"""
meta_file = os.path.join(expr_dir, EXPR_META_FILE)
meta = parse_json(meta_file)
if not meta:
job_id = expr_dir.split('/')[-2]
trial_id = expr_dir[-8:]
params = parse_json(os.path.join(expr_dir, EXPR_PARARM_FILE))
meta = {
"trial_id": trial_id,
"job_id": job_id,
"status": "RUNNING",
"type": "TUNE",
"start_time": os.path.getctime(expr_dir),
"end_time": None,
"progress_offset": 0,
"result_offset": 0,
"params": params
}
if not meta.get("start_time", None):
meta["start_time"] = os.path.getctime(expr_dir)
if isinstance(meta["start_time"], float):
meta["start_time"] = timestamp2date(meta["start_time"])
if meta.get("end_time", None):
meta["end_time"] = timestamp2date(meta["end_time"])
meta["params"] = parse_json(os.path.join(expr_dir, EXPR_PARARM_FILE))
return meta
def _add_results(self, results, trial_id):
"""Add a list of results into db.
Args:
results (list): A list of json results.
trial_id (str): Id of the trial.
"""
for result in results:
self.logger.debug("Appending result: %s" % result)
result["trial_id"] = trial_id
result_record = ResultRecord.from_json(result)
result_record.save()
| 32.516304
| 79
| 0.583737
|
6d189e56d1f511251988cb019320635522adae37
| 1,352
|
py
|
Python
|
Country cleaning/Peru/Peru14.py
|
Demonliquid/cars-python-cleaning
|
91c516a33c4522114dc024cfaf04f1c1d594f973
|
[
"MIT"
] | null | null | null |
Country cleaning/Peru/Peru14.py
|
Demonliquid/cars-python-cleaning
|
91c516a33c4522114dc024cfaf04f1c1d594f973
|
[
"MIT"
] | null | null | null |
Country cleaning/Peru/Peru14.py
|
Demonliquid/cars-python-cleaning
|
91c516a33c4522114dc024cfaf04f1c1d594f973
|
[
"MIT"
] | null | null | null |
# %%
import os
import pandas as pd
import numpy as np
import datetime
import re
# %%
pd.set_option('display.max_colwidth', None)
# %% CARGA DE DATOS
path = r'D:\Basededatos\Origen\Peru\2012-2015\2014'
os.chdir(path)
files = os.listdir(path)
files
# %%
files_xls = [f for f in files if f[-4:] == 'xlsx']
files_xls
# %%
columnas = []
peru = pd.DataFrame(columns=columnas)
# %%
for f in files_xls:
data = pd.read_excel(f, engine='openpyxl')
peru = pd.concat([peru , data], ignore_index=True, join='outer')
# %%
peru.info()
# %%
peru["AÑO DE IMPORTACION"] = 2014
# %%
columnas = ["DESCRIPCION COMERCIAL 1",
"DESCRIPCION COMERCIAL 2",
"DESCRIPCION COMERCIAL 3",
"DESCRIPCION COMERCIAL 4",
"DESCRIPCION COMERCIAL 5",
"PAIS DE ORIGEN",
"POSICION ARANCELARIA",
"AÑO DE IMPORTACION",
"CANTIDAD",
"A�O FABRICACION"]
peru = peru[columnas]
# %%
peru = peru.rename(columns={
"PAIS DE ORIGEN":"ORIGEN",
"POSICION ARANCELARIA": "DATOS PERSONALES",
"A�O FABRICACION": "AÑO DE FABRICACION",
"CANTIDAD": "CANTIDAD"})
# %%
peru.to_csv(r"D:\Basededatos\Origen\Peru\2012-2015\2014.csv", index=False)
# %%
peru.info()
# %%
| 19.314286
| 74
| 0.565828
|
b199ca5a1c18878375234dec5749df3b5d2aee86
| 810
|
py
|
Python
|
tests/programs/package_module_collision/Something/__init__.py
|
augustand/Nuitka
|
b7b9dd50b60505a309f430ce17cad36fb7d75048
|
[
"Apache-2.0"
] | null | null | null |
tests/programs/package_module_collision/Something/__init__.py
|
augustand/Nuitka
|
b7b9dd50b60505a309f430ce17cad36fb7d75048
|
[
"Apache-2.0"
] | null | null | null |
tests/programs/package_module_collision/Something/__init__.py
|
augustand/Nuitka
|
b7b9dd50b60505a309f430ce17cad36fb7d75048
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
print("Importing Something/__init__.py")
| 42.631579
| 79
| 0.720988
|
c53b13a0a795da02b8e79c41bdc67e5d63fe3c4e
| 4,398
|
py
|
Python
|
src/dietpdf/token/TokenStack.py
|
Zigazou/dietpdf
|
4608ea2d9d54861d4d894b725a60895e0bdc131a
|
[
"MIT"
] | 6
|
2022-02-07T06:17:41.000Z
|
2022-02-15T17:29:16.000Z
|
src/dietpdf/token/TokenStack.py
|
Zigazou/dietpdf
|
4608ea2d9d54861d4d894b725a60895e0bdc131a
|
[
"MIT"
] | 2
|
2022-02-08T10:00:08.000Z
|
2022-02-08T12:59:42.000Z
|
src/dietpdf/token/TokenStack.py
|
Zigazou/dietpdf
|
4608ea2d9d54861d4d894b725a60895e0bdc131a
|
[
"MIT"
] | null | null | null |
__author__ = "Frédéric BISSON"
__copyright__ = "Copyright 2022, Frédéric BISSON"
__credits__ = ["Frédéric BISSON"]
__license__ = "mit"
__maintainer__ = "Frédéric BISSON"
__email__ = "zigazou@protonmail.com"
from .PDFToken import PDFToken
class TokenStack:
"""A token stack
A token stack which can be read or written.
"""
def __init__(self):
self.stack = []
def insert(self, index: int, item: PDFToken):
"""Insert a PDFToken at specified index.
:param item: The item to insert
:type item: PDFToken or any subclass of PDFToken
:param index: The index where to insert the item
:type index: int
:raise TypeError: If `item` is not a PDFToken or any subclass of PDFToken
"""
if not isinstance(item, PDFToken):
raise TypeError("expected PDFToken or any subclass")
self.stack.insert(index, item)
def push(self, item: PDFToken):
"""Push a PDFToken.
Any number of PDFToken may be pushed but PDFObject may only be pushed
once.
:param item:
:type item: PDFToken or any subclass of PDFToken
:raise TypeError: If `item` is not a PDFToken or any subclass of PDFToken
"""
if not isinstance(item, PDFToken):
raise TypeError("expected PDFToken or any subclass")
self.stack.append(item)
def pop(self, index=-1) -> PDFToken:
"""Pop the last pushed item.
:param index: The index of the item to pop, the last one if None
:type index: int
:return: The last pushed item
:rtype: PDFToken or any subclass of PDFToken
:raise IndexError: If there is no more PDFToken to pop
"""
try:
item = self.stack.pop(index)
except IndexError:
raise IndexError("pop from empty PDF")
return item
def stack_size(self) -> int:
"""Returns the stack size of the PDF
:return: Stack size of the PDF
:rtype: int
"""
return len(self.stack)
def stack_at(self, index: int) -> PDFToken:
"""Get the element on the stack at the specified index.
:param index: The index
:type index: int
"""
return self.stack[index]
def find(self, select: callable, start: int = 0):
"""Find an item in the stack according to a predicate.
The predicate should have the following signature:
predicate(index: int, item: PDFToken) -> bool
Where:
- index is the index of the item in the stack, not its object number
- item is a PDFToken or any subclass
- the predicate must return True if the item meets the criteria
:param select: The predicate
:type select: function
:param start: the starting index
:type start: int
:raise TypeError: If the predicate is not a function
"""
if not callable(select):
raise TypeError("select must be a function which returns a boolean")
index = start
while index < len(self.stack):
item = self.stack[index]
if select(index, item):
yield (index, item)
index += 1
def find_all(self, select: callable, start: int = 0) -> list:
"""Find an item in the stack according to a predicate.
See the `find` method for information about the predicate.
:param select: The predicate
:type select: function
:param start: the starting index
:type start: int
:raise TypeError: If the predicate is not a function
:return: A list of PDFToken satisfying the predicate
:rtype: list
"""
return [item for _, item in self.find(select, start)]
def find_first(self, select: callable, start: int = 0) -> PDFToken:
"""Find an item in the stack according to a predicate.
See the `find` method for information about the predicate.
:param select: The predicate
:type select: function
:param start: the starting index
:type start: int
:raise TypeError: If the predicate is not a function
:return: The first item satisfying the predicate
:rtype: PDFToken or None
"""
for index, item in self.find(select, start):
return item
return None
| 30.331034
| 81
| 0.608004
|
1b51114554e62f53dee8a425a8932728acc8d17e
| 519
|
py
|
Python
|
setup.py
|
biosckon/petpy
|
dceef40c416eb3d3fdcc8bd5704671ba670580c5
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
biosckon/petpy
|
dceef40c416eb3d3fdcc8bd5704671ba670580c5
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
biosckon/petpy
|
dceef40c416eb3d3fdcc8bd5704671ba670580c5
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup
VERSION = ""
with open("VERSION", "r+") as f:
content = f.read()
ver = str(content).strip().split(".")
ver[2] = str(int(ver[2]) + 1)
VERSION = ".".join(ver)
f.seek(0)
f.write(VERSION)
setup(
name="petpy",
version=VERSION,
description="Petrophysics utilities",
url="https://example.com/",
author="Agile Scientific",
author_email="matt@agilescientific.com",
license="Apache 2",
packages=["petpy"],
install_requires=["numpy"],
)
| 20.76
| 44
| 0.606936
|
7d92f6500e58ba36f663975996ce327a3d2b1e08
| 25,883
|
py
|
Python
|
rllib/policy/eager_tf_policy.py
|
another-green/ray
|
69dc1e1386a53dd96593b2e6da3b2bdf8fba874f
|
[
"Apache-2.0"
] | null | null | null |
rllib/policy/eager_tf_policy.py
|
another-green/ray
|
69dc1e1386a53dd96593b2e6da3b2bdf8fba874f
|
[
"Apache-2.0"
] | null | null | null |
rllib/policy/eager_tf_policy.py
|
another-green/ray
|
69dc1e1386a53dd96593b2e6da3b2bdf8fba874f
|
[
"Apache-2.0"
] | null | null | null |
"""Eager mode TF policy built using build_tf_policy().
It supports both traced and non-traced eager execution modes."""
import functools
import logging
from ray.util.debug import log_once
from ray.rllib.models.catalog import ModelCatalog
from ray.rllib.models.repeated_values import RepeatedValues
from ray.rllib.policy.policy import Policy, LEARNER_STATS_KEY
from ray.rllib.policy.rnn_sequencing import pad_batch_to_sequences_of_same_size
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils import add_mixins
from ray.rllib.utils.annotations import override
from ray.rllib.utils.framework import try_import_tf
from ray.rllib.utils.tf_ops import convert_to_non_tf_type
from ray.rllib.utils.tracking_dict import UsageTrackingDict
tf1, tf, tfv = try_import_tf()
logger = logging.getLogger(__name__)
def _convert_to_tf(x, dtype=None):
if isinstance(x, SampleBatch):
x = {k: v for k, v in x.items() if k != SampleBatch.INFOS}
return tf.nest.map_structure(_convert_to_tf, x)
elif isinstance(x, Policy):
return x
# Special handling of "Repeated" values.
elif isinstance(x, RepeatedValues):
return RepeatedValues(
tf.nest.map_structure(_convert_to_tf, x.values), x.lengths,
x.max_len)
if x is not None:
d = dtype
x = tf.nest.map_structure(
lambda f: tf.convert_to_tensor(f, d) if f is not None else None, x)
return x
def _convert_to_numpy(x):
def _map(x):
if isinstance(x, tf.Tensor):
return x.numpy()
return x
try:
return tf.nest.map_structure(_map, x)
except AttributeError:
raise TypeError(
("Object of type {} has no method to convert to numpy.").format(
type(x)))
def convert_eager_inputs(func):
@functools.wraps(func)
def _func(*args, **kwargs):
if tf.executing_eagerly():
args = [_convert_to_tf(x) for x in args]
# TODO: (sven) find a way to remove key-specific hacks.
kwargs = {
k: _convert_to_tf(
v, dtype=tf.int64 if k == "timestep" else None)
for k, v in kwargs.items()
if k not in {"info_batch", "episodes"}
}
return func(*args, **kwargs)
return _func
def convert_eager_outputs(func):
@functools.wraps(func)
def _func(*args, **kwargs):
out = func(*args, **kwargs)
if tf.executing_eagerly():
out = tf.nest.map_structure(_convert_to_numpy, out)
return out
return _func
def _disallow_var_creation(next_creator, **kw):
v = next_creator(**kw)
raise ValueError("Detected a variable being created during an eager "
"forward pass. Variables should only be created during "
"model initialization: {}".format(v.name))
def traced_eager_policy(eager_policy_cls):
"""Wrapper that enables tracing for all eager policy methods.
This is enabled by the --trace / "eager_tracing" config."""
class TracedEagerPolicy(eager_policy_cls):
def __init__(self, *args, **kwargs):
self._traced_learn_on_batch = None
self._traced_compute_actions = None
self._traced_compute_gradients = None
self._traced_apply_gradients = None
super(TracedEagerPolicy, self).__init__(*args, **kwargs)
@override(eager_policy_cls)
@convert_eager_inputs
@convert_eager_outputs
def _learn_on_batch_eager(self, samples):
if self._traced_learn_on_batch is None:
self._traced_learn_on_batch = tf.function(
super(TracedEagerPolicy, self)._learn_on_batch_eager,
autograph=False,
experimental_relax_shapes=True)
return self._traced_learn_on_batch(samples)
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def compute_actions(self,
obs_batch,
state_batches=None,
prev_action_batch=None,
prev_reward_batch=None,
info_batch=None,
episodes=None,
explore=None,
timestep=None,
**kwargs):
obs_batch = tf.convert_to_tensor(obs_batch)
state_batches = _convert_to_tf(state_batches)
prev_action_batch = _convert_to_tf(prev_action_batch)
prev_reward_batch = _convert_to_tf(prev_reward_batch)
if self._traced_compute_actions is None:
self._traced_compute_actions = tf.function(
super(TracedEagerPolicy, self).compute_actions,
autograph=False,
experimental_relax_shapes=True)
return self._traced_compute_actions(
obs_batch, state_batches, prev_action_batch, prev_reward_batch,
info_batch, episodes, explore, timestep, **kwargs)
@override(eager_policy_cls)
@convert_eager_inputs
@convert_eager_outputs
def _compute_gradients_eager(self, samples):
if self._traced_compute_gradients is None:
self._traced_compute_gradients = tf.function(
super(TracedEagerPolicy, self).compute_gradients,
autograph=False,
experimental_relax_shapes=True)
return self._traced_compute_gradients(samples)
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def apply_gradients(self, grads):
if self._traced_apply_gradients is None:
self._traced_apply_gradients = tf.function(
super(TracedEagerPolicy, self).apply_gradients,
autograph=False,
experimental_relax_shapes=True)
return self._traced_apply_gradients(grads)
TracedEagerPolicy.__name__ = eager_policy_cls.__name__
TracedEagerPolicy.__qualname__ = eager_policy_cls.__qualname__
return TracedEagerPolicy
def build_eager_tf_policy(name,
loss_fn,
get_default_config=None,
postprocess_fn=None,
stats_fn=None,
optimizer_fn=None,
gradients_fn=None,
apply_gradients_fn=None,
grad_stats_fn=None,
extra_learn_fetches_fn=None,
extra_action_fetches_fn=None,
validate_spaces=None,
before_init=None,
before_loss_init=None,
after_init=None,
make_model=None,
action_sampler_fn=None,
action_distribution_fn=None,
mixins=None,
obs_include_prev_action_reward=True,
get_batch_divisibility_req=None):
"""Build an eager TF policy.
An eager policy runs all operations in eager mode, which makes debugging
much simpler, but has lower performance.
You shouldn't need to call this directly. Rather, prefer to build a TF
graph policy and use set {"framework": "tfe"} in the trainer config to have
it automatically be converted to an eager policy.
This has the same signature as build_tf_policy()."""
base = add_mixins(Policy, mixins)
class eager_policy_cls(base):
def __init__(self, observation_space, action_space, config):
assert tf.executing_eagerly()
self.framework = config.get("framework", "tfe")
Policy.__init__(self, observation_space, action_space, config)
self._is_training = False
self._loss_initialized = False
self._sess = None
self._loss = loss_fn
self.batch_divisibility_req = get_batch_divisibility_req(self) if \
callable(get_batch_divisibility_req) else \
(get_batch_divisibility_req or 1)
self._max_seq_len = config["model"]["max_seq_len"]
if get_default_config:
config = dict(get_default_config(), **config)
if validate_spaces:
validate_spaces(self, observation_space, action_space, config)
if before_init:
before_init(self, observation_space, action_space, config)
self.config = config
self.dist_class = None
if action_sampler_fn or action_distribution_fn:
if not make_model:
raise ValueError(
"`make_model` is required if `action_sampler_fn` OR "
"`action_distribution_fn` is given")
else:
self.dist_class, logit_dim = ModelCatalog.get_action_dist(
action_space, self.config["model"])
if make_model:
self.model = make_model(self, observation_space, action_space,
config)
else:
self.model = ModelCatalog.get_model_v2(
observation_space,
action_space,
logit_dim,
config["model"],
framework=self.framework,
)
# Auto-update model's inference view requirements, if recurrent.
self._update_model_inference_view_requirements_from_init_state()
self.exploration = self._create_exploration()
self._state_in = [
tf.convert_to_tensor([s])
for s in self.model.get_initial_state()
]
# Combine view_requirements for Model and Policy.
self.view_requirements.update(
self.model.inference_view_requirements)
if before_loss_init:
before_loss_init(self, observation_space, action_space, config)
self._initialize_loss_from_dummy_batch(
auto_remove_unneeded_view_reqs=True,
stats_fn=stats_fn,
)
self._loss_initialized = True
if optimizer_fn:
self._optimizer = optimizer_fn(self, config)
else:
self._optimizer = tf.keras.optimizers.Adam(config["lr"])
if after_init:
after_init(self, observation_space, action_space, config)
# Got to reset global_timestep again after this fake run-through.
self.global_timestep = 0
@override(Policy)
def postprocess_trajectory(self,
sample_batch,
other_agent_batches=None,
episode=None):
assert tf.executing_eagerly()
# Call super's postprocess_trajectory first.
sample_batch = Policy.postprocess_trajectory(self, sample_batch)
if postprocess_fn:
return postprocess_fn(self, sample_batch, other_agent_batches,
episode)
return sample_batch
@override(Policy)
def learn_on_batch(self, postprocessed_batch):
# Callback handling.
self.callbacks.on_learn_on_batch(
policy=self, train_batch=postprocessed_batch)
# Get batch ready for RNNs, if applicable.
pad_batch_to_sequences_of_same_size(
postprocessed_batch,
shuffle=False,
max_seq_len=self._max_seq_len,
batch_divisibility_req=self.batch_divisibility_req)
return self._learn_on_batch_eager(postprocessed_batch)
@convert_eager_inputs
@convert_eager_outputs
def _learn_on_batch_eager(self, samples):
with tf.variable_creator_scope(_disallow_var_creation):
grads_and_vars, stats = self._compute_gradients(samples)
self._apply_gradients(grads_and_vars)
return stats
@override(Policy)
def compute_gradients(self, samples):
# Get batch ready for RNNs, if applicable.
pad_batch_to_sequences_of_same_size(
samples,
shuffle=False,
max_seq_len=self._max_seq_len,
batch_divisibility_req=self.batch_divisibility_req)
return self._compute_gradients_eager(samples)
@convert_eager_inputs
@convert_eager_outputs
def _compute_gradients_eager(self, samples):
with tf.variable_creator_scope(_disallow_var_creation):
grads_and_vars, stats = self._compute_gradients(samples)
grads = [g for g, v in grads_and_vars]
return grads, stats
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def compute_actions(self,
obs_batch,
state_batches=None,
prev_action_batch=None,
prev_reward_batch=None,
info_batch=None,
episodes=None,
explore=None,
timestep=None,
**kwargs):
explore = explore if explore is not None else \
self.config["explore"]
timestep = timestep if timestep is not None else \
self.global_timestep
# TODO: remove python side effect to cull sources of bugs.
self._is_training = False
self._state_in = state_batches
if not tf1.executing_eagerly():
tf1.enable_eager_execution()
input_dict = {
SampleBatch.CUR_OBS: tf.convert_to_tensor(obs_batch),
"is_training": tf.constant(False),
}
batch_size = input_dict[SampleBatch.CUR_OBS].shape[0]
seq_lens = tf.ones(batch_size, dtype=tf.int32)
if obs_include_prev_action_reward:
if prev_action_batch is not None:
input_dict[SampleBatch.PREV_ACTIONS] = \
tf.convert_to_tensor(prev_action_batch)
if prev_reward_batch is not None:
input_dict[SampleBatch.PREV_REWARDS] = \
tf.convert_to_tensor(prev_reward_batch)
# Use Exploration object.
with tf.variable_creator_scope(_disallow_var_creation):
if action_sampler_fn:
dist_inputs = None
state_out = []
actions, logp = self.action_sampler_fn(
self,
self.model,
input_dict[SampleBatch.CUR_OBS],
explore=explore,
timestep=timestep,
episodes=episodes)
else:
# Exploration hook before each forward pass.
self.exploration.before_compute_actions(
timestep=timestep, explore=explore)
if action_distribution_fn:
dist_inputs, dist_class, state_out = \
action_distribution_fn(
self, self.model,
input_dict[SampleBatch.CUR_OBS],
explore=explore,
timestep=timestep,
is_training=False)
else:
dist_class = self.dist_class
dist_inputs, state_out = self.model(
input_dict, state_batches, seq_lens)
action_dist = dist_class(dist_inputs, self.model)
# Get the exploration action from the forward results.
actions, logp = self.exploration.get_exploration_action(
action_distribution=action_dist,
timestep=timestep,
explore=explore)
# Add default and custom fetches.
extra_fetches = {}
# Action-logp and action-prob.
if logp is not None:
extra_fetches[SampleBatch.ACTION_PROB] = tf.exp(logp)
extra_fetches[SampleBatch.ACTION_LOGP] = logp
# Action-dist inputs.
if dist_inputs is not None:
extra_fetches[SampleBatch.ACTION_DIST_INPUTS] = dist_inputs
# Custom extra fetches.
if extra_action_fetches_fn:
extra_fetches.update(extra_action_fetches_fn(self))
# Update our global timestep by the batch size.
self.global_timestep += int(batch_size)
return actions, state_out, extra_fetches
@override(Policy)
def compute_log_likelihoods(self,
actions,
obs_batch,
state_batches=None,
prev_action_batch=None,
prev_reward_batch=None):
if action_sampler_fn and action_distribution_fn is None:
raise ValueError("Cannot compute log-prob/likelihood w/o an "
"`action_distribution_fn` and a provided "
"`action_sampler_fn`!")
seq_lens = tf.ones(len(obs_batch), dtype=tf.int32)
input_dict = {
SampleBatch.CUR_OBS: tf.convert_to_tensor(obs_batch),
"is_training": tf.constant(False),
}
if obs_include_prev_action_reward:
input_dict.update({
SampleBatch.PREV_ACTIONS: tf.convert_to_tensor(
prev_action_batch),
SampleBatch.PREV_REWARDS: tf.convert_to_tensor(
prev_reward_batch),
})
# Exploration hook before each forward pass.
self.exploration.before_compute_actions(explore=False)
# Action dist class and inputs are generated via custom function.
if action_distribution_fn:
dist_inputs, dist_class, _ = action_distribution_fn(
self,
self.model,
input_dict[SampleBatch.CUR_OBS],
explore=False,
is_training=False)
action_dist = dist_class(dist_inputs, self.model)
log_likelihoods = action_dist.logp(actions)
# Default log-likelihood calculation.
else:
dist_inputs, _ = self.model(input_dict, state_batches,
seq_lens)
dist_class = self.dist_class
action_dist = dist_class(dist_inputs, self.model)
log_likelihoods = action_dist.logp(actions)
return log_likelihoods
@override(Policy)
def apply_gradients(self, gradients):
self._apply_gradients(
zip([(tf.convert_to_tensor(g) if g is not None else None)
for g in gradients], self.model.trainable_variables()))
@override(Policy)
def get_exploration_info(self):
return _convert_to_numpy(self.exploration.get_info())
@override(Policy)
def get_weights(self, as_dict=False):
variables = self.variables()
if as_dict:
return {v.name: v.numpy() for v in variables}
return [v.numpy() for v in variables]
@override(Policy)
def set_weights(self, weights):
variables = self.variables()
assert len(weights) == len(variables), (len(weights),
len(variables))
for v, w in zip(variables, weights):
v.assign(w)
@override(Policy)
def get_state(self):
state = {"_state": super().get_state()}
state["_optimizer_variables"] = self._optimizer.variables()
return state
@override(Policy)
def set_state(self, state):
state = state.copy() # shallow copy
# Set optimizer vars first.
optimizer_vars = state.pop("_optimizer_variables", None)
if optimizer_vars and self._optimizer.variables():
logger.warning(
"Cannot restore an optimizer's state for tf eager! Keras "
"is not able to save the v1.x optimizers (from "
"tf.compat.v1.train) since they aren't compatible with "
"checkpoints.")
for opt_var, value in zip(self._optimizer.variables(),
optimizer_vars):
opt_var.assign(value)
# Then the Policy's (NN) weights.
super().set_state(state["_state"])
def variables(self):
"""Return the list of all savable variables for this policy."""
return self.model.variables()
@override(Policy)
def is_recurrent(self):
return len(self._state_in) > 0
@override(Policy)
def num_state_tensors(self):
return len(self._state_in)
@override(Policy)
def get_initial_state(self):
return self.model.get_initial_state()
def get_session(self):
return None # None implies eager
def get_placeholder(self, ph):
raise ValueError(
"get_placeholder() is not allowed in eager mode. Try using "
"rllib.utils.tf_ops.make_tf_callable() to write "
"functions that work in both graph and eager mode.")
def loss_initialized(self):
return self._loss_initialized
@override(Policy)
def export_model(self, export_dir):
pass
@override(Policy)
def export_checkpoint(self, export_dir):
pass
def _get_is_training_placeholder(self):
return tf.convert_to_tensor(self._is_training)
def _apply_gradients(self, grads_and_vars):
if apply_gradients_fn:
apply_gradients_fn(self, self._optimizer, grads_and_vars)
else:
self._optimizer.apply_gradients(grads_and_vars)
def _compute_gradients(self, samples):
"""Computes and returns grads as eager tensors."""
self._is_training = True
with tf.GradientTape(persistent=gradients_fn is not None) as tape:
# TODO: set seq len and state-in properly
state_in = []
for i in range(self.num_state_tensors()):
state_in.append(samples["state_in_{}".format(i)])
self._state_in = state_in
model_out, _ = self.model(samples, self._state_in,
samples.get("seq_lens"))
loss = loss_fn(self, self.model, self.dist_class, samples)
variables = self.model.trainable_variables()
if gradients_fn:
class OptimizerWrapper:
def __init__(self, tape):
self.tape = tape
def compute_gradients(self, loss, var_list):
return list(
zip(self.tape.gradient(loss, var_list), var_list))
grads_and_vars = gradients_fn(self, OptimizerWrapper(tape),
loss)
else:
grads_and_vars = list(
zip(tape.gradient(loss, variables), variables))
if log_once("grad_vars"):
for _, v in grads_and_vars:
logger.info("Optimizing variable {}".format(v.name))
grads = [g for g, v in grads_and_vars]
stats = self._stats(self, samples, grads)
return grads_and_vars, stats
def _stats(self, outputs, samples, grads):
fetches = {}
if stats_fn:
fetches[LEARNER_STATS_KEY] = {
k: v
for k, v in stats_fn(outputs, samples).items()
}
else:
fetches[LEARNER_STATS_KEY] = {}
if extra_learn_fetches_fn:
fetches.update(
{k: v
for k, v in extra_learn_fetches_fn(self).items()})
if grad_stats_fn:
fetches.update({
k: v
for k, v in grad_stats_fn(self, samples, grads).items()
})
return fetches
def _lazy_tensor_dict(self, postprocessed_batch):
train_batch = UsageTrackingDict(postprocessed_batch)
train_batch.set_get_interceptor(_convert_to_tf)
return train_batch
def _lazy_numpy_dict(self, postprocessed_batch):
train_batch = UsageTrackingDict(postprocessed_batch)
train_batch.set_get_interceptor(convert_to_non_tf_type)
return train_batch
@classmethod
def with_tracing(cls):
return traced_eager_policy(cls)
eager_policy_cls.__name__ = name + "_eager"
eager_policy_cls.__qualname__ = name + "_eager"
return eager_policy_cls
| 39.039216
| 79
| 0.560986
|
69c1207aee2d7c97a6e05c9488b060594593e37b
| 347
|
py
|
Python
|
manage.py
|
kfolkes/MicrosoftLab_NSBE46
|
5e66cf3378aa27ee36fba5297b7f917e4890b1f8
|
[
"MIT"
] | null | null | null |
manage.py
|
kfolkes/MicrosoftLab_NSBE46
|
5e66cf3378aa27ee36fba5297b7f917e4890b1f8
|
[
"MIT"
] | 3
|
2020-06-05T20:14:46.000Z
|
2021-06-10T21:20:45.000Z
|
manage.py
|
kfolkes/MicrosoftLab_NSBE46
|
5e66cf3378aa27ee36fba5297b7f917e4890b1f8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Command-line utility for administrative tasks.
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE",
"djangoapp.settings"
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 19.277778
| 65
| 0.677233
|
79768aedce63216e0b361ac106e613d7225502a7
| 2,663
|
py
|
Python
|
tests/main/test_specify_output_dir.py
|
insight-infrastructure/cookiecutter
|
5e77a6f59786759cf1b469e7bd827b6d340a31c6
|
[
"BSD-3-Clause"
] | 8
|
2020-06-15T18:49:24.000Z
|
2021-04-15T10:34:24.000Z
|
tests/main/test_specify_output_dir.py
|
insight-infrastructure/cookiecutter
|
5e77a6f59786759cf1b469e7bd827b6d340a31c6
|
[
"BSD-3-Clause"
] | 19
|
2020-06-28T16:03:56.000Z
|
2020-10-07T15:52:06.000Z
|
tests/main/test_specify_output_dir.py
|
insight-infrastructure/nukikata
|
5e77a6f59786759cf1b469e7bd827b6d340a31c6
|
[
"BSD-3-Clause"
] | null | null | null |
"""Tests for cookiecutter's output directory customization feature."""
import pytest
from cookiecutter import main
@pytest.fixture
def context():
"""Fixture to return a valid context as known from a cookiecutter.json."""
return {
'cookiecutter': {
'email': 'raphael@hackebrot.de',
'full_name': 'Raphael Pierzina',
'github_username': 'hackebrot',
'version': '0.1.0',
}
}
@pytest.fixture
def output_dir(tmpdir):
"""Fixture to prepare test output directory."""
return str(tmpdir.mkdir('output'))
@pytest.fixture
def template(tmpdir):
"""Fixture to prepare test template directory."""
template_dir = tmpdir.mkdir('template')
template_dir.join('cookiecutter.json').ensure(file=True)
return str(template_dir)
@pytest.fixture
def template_renderable(tmpdir):
"""Fixture to prepare test template directory."""
template_dir = tmpdir.mkdir('template')
template_dir.join('cookiecutter.json').ensure(file=True)
return str(template_dir)
@pytest.fixture(autouse=True)
def mock_gen_context(mocker, context):
"""Fixture. Automatically mock cookiecutter's function with expected output."""
mocker.patch('cookiecutter.main.generate_context', return_value=context)
@pytest.fixture(autouse=True)
def mock_prompt(mocker):
"""Fixture. Automatically mock cookiecutter's function with expected output."""
mocker.patch('cookiecutter.main.prompt_for_config')
@pytest.fixture(autouse=True)
def mock_replay(mocker):
"""Fixture. Automatically mock cookiecutter's function with expected output."""
mocker.patch('cookiecutter.main.dump')
def test_api_invocation(mocker, template, output_dir, context):
"""Verify output dir location is correctly passed."""
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
main.cookiecutter(template, output_dir=output_dir)
mock_gen_files.assert_called_once_with(
repo_dir=template,
context=context,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir=output_dir,
context_key='cookiecutter',
accept_hooks=True,
)
def test_default_output_dir(mocker, template, context):
"""Verify default output dir is current working folder."""
mock_gen_files = mocker.patch('cookiecutter.main.generate_files')
main.cookiecutter(template)
mock_gen_files.assert_called_once_with(
repo_dir=template,
context=context,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
context_key='cookiecutter',
accept_hooks=True,
)
| 28.945652
| 83
| 0.704469
|
3c95d778f759aed2bae2dd57ec5457835aeb3afa
| 3,992
|
py
|
Python
|
tensorflow_graphics/geometry/representation/point.py
|
prafael18/graphics
|
2f250a53431697cfb43fd1edf61a2d965b20c596
|
[
"Apache-2.0"
] | 2
|
2021-01-06T03:24:47.000Z
|
2021-01-07T06:39:54.000Z
|
tensorflow_graphics/geometry/representation/point.py
|
prafael18/graphics
|
2f250a53431697cfb43fd1edf61a2d965b20c596
|
[
"Apache-2.0"
] | 1
|
2021-02-24T10:36:11.000Z
|
2021-02-24T10:36:11.000Z
|
tensorflow_graphics/geometry/representation/point.py
|
isabella232/graphics-1
|
d5c26cf05125e5c096f5b2cde6c85f88c7df2d59
|
[
"Apache-2.0"
] | 1
|
2020-08-03T20:22:59.000Z
|
2020-08-03T20:22:59.000Z
|
# Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow point utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow_graphics.math import vector
from tensorflow_graphics.util import asserts
from tensorflow_graphics.util import export_api
from tensorflow_graphics.util import shape
def distance_to_ray(point, origin, direction, keepdims=True, name=None):
"""Computes the distance from a M-d point to a M-d ray.
Note:
In the following, A1 to An are optional batch dimensions, which must be
broadcast compatible.
Args:
point: A tensor of shape `[A1, ..., An, M]`.
origin: A tensor of shape `[A1, ..., An, M]`.
direction: A tensor of shape `[A1, ..., An, M]`. The last dimension must be
normalized.
keepdims: A `bool`, whether to keep the last dimension with length 1 or to
remove it.
name: A name for this op. Defaults to "point_distance_to_ray".
Returns:
A tensor of shape `[A1, ..., An, 1]` containing the distance from each point
to the corresponding ray.
Raises:
ValueError: If the shape of `point`, `origin`, or 'direction' is not
supported.
"""
with tf.compat.v1.name_scope(name, "point_distance_to_ray",
[point, origin, direction]):
point = tf.convert_to_tensor(value=point)
origin = tf.convert_to_tensor(value=origin)
direction = tf.convert_to_tensor(value=direction)
shape.compare_dimensions((point, origin, direction), -1,
("point", "origin", "direction"))
shape.compare_batch_dimensions(
tensors=(point, origin, direction),
last_axes=-2,
broadcast_compatible=True)
direction = asserts.assert_normalized(direction)
vec = point - origin
dot = vector.dot(vec, direction)
vec -= dot * direction
return tf.norm(tensor=vec, axis=-1, keepdims=keepdims)
def project_to_ray(point, origin, direction, name=None):
"""Computes the projection of a M-d point on a M-d ray.
Note:
In the following, A1 to An are optional batch dimensions, which must be
broadcast compatible.
Args:
point: A tensor of shape `[A1, ..., An, M]`.
origin: A tensor of shape `[A1, ..., An, M]`.
direction: A tensor of shape `[A1, ..., An, M]`. The last dimension must be
normalized.
name: A name for this op. Defaults to "point_project_to_ray".
Returns:
A tensor of shape `[A1, ..., An, M]` containing the projected point.
Raises:
ValueError: If the shape of `point`, `origin`, or 'direction' is not
supported.
"""
with tf.compat.v1.name_scope(name, "point_project_to_ray",
[point, origin, direction]):
point = tf.convert_to_tensor(value=point)
origin = tf.convert_to_tensor(value=origin)
direction = tf.convert_to_tensor(value=direction)
shape.compare_dimensions((point, origin, direction), -1,
("point", "origin", "direction"))
shape.compare_batch_dimensions(
tensors=(point, origin, direction),
last_axes=-2,
broadcast_compatible=True)
direction = asserts.assert_normalized(direction)
vec = point - origin
dot = vector.dot(vec, direction)
return origin + dot * direction
# API contains all public functions and classes.
__all__ = export_api.get_functions_and_classes()
| 35.017544
| 80
| 0.686373
|
5c97aba7d6e145347358cc7c55b5cd8ef4dc22ef
| 1,455
|
py
|
Python
|
tests/fixtures/input_gdf.py
|
geodatascience/geo_snippets
|
d7f7a3566a82c4261ad66482d3f58871cfbd7949
|
[
"MIT"
] | null | null | null |
tests/fixtures/input_gdf.py
|
geodatascience/geo_snippets
|
d7f7a3566a82c4261ad66482d3f58871cfbd7949
|
[
"MIT"
] | null | null | null |
tests/fixtures/input_gdf.py
|
geodatascience/geo_snippets
|
d7f7a3566a82c4261ad66482d3f58871cfbd7949
|
[
"MIT"
] | null | null | null |
import pytest
from shapely.geometry import LineString
import geojson
import geopandas as gpd
def build_gdf(features):
all_geojson_features = []
for feature in features:
feature["properties"]["bounds"] = ", ".join(
map(str, feature["geometry"].bounds)
)
feature = geojson.Feature(
geometry=feature["geometry"], properties=feature["properties"]
)
all_geojson_features.append(feature)
return gpd.GeoDataFrame.from_features(all_geojson_features)
@pytest.fixture
def gdf_linestrings():
all_features = [
{
"geometry": LineString(
[
(4.07114907206, 46.0376034527),
(4.07091681769, 46.03699538217),
(4.07079583285, 46.03660928470),
]
),
"properties": {"a": 42, "b": "hello"},
},
{
"geometry": LineString(
[
(4.07079583285, 46.036609284706),
(4.07085925751, 46.036602948616),
(4.07086909165, 46.036677933937),
(4.07093731600, 46.036749231456),
(4.07103135497, 46.036703133922),
(4.07098587207, 46.036623231531),
]
),
"properties": {"a": 52, "b": "coucou"},
},
]
output_gdf = build_gdf(all_features)
return output_gdf
| 27.980769
| 74
| 0.513402
|
19713d52ca42b579c8ada777ec4a4227f0f06ac2
| 2,293
|
py
|
Python
|
tableauxml/workbook.py
|
ErikKBethke/tableau-workbook-xml
|
8c728b7a415c99cda5234c6dc70eb51ac65c5d0a
|
[
"MIT"
] | null | null | null |
tableauxml/workbook.py
|
ErikKBethke/tableau-workbook-xml
|
8c728b7a415c99cda5234c6dc70eb51ac65c5d0a
|
[
"MIT"
] | null | null | null |
tableauxml/workbook.py
|
ErikKBethke/tableau-workbook-xml
|
8c728b7a415c99cda5234c6dc70eb51ac65c5d0a
|
[
"MIT"
] | null | null | null |
import lxml.etree as etree
from tableauxml import Datasource
# %%
class Workbook:
"""
A class for the Tableau Workbook, which can be read due to its xml formatting.
Sub classes:
datasource
"""
def __init__(self, filepath, encoding='utf-8'):
"""
Constructor. Builds workbook from filepath. Will build datasources, XXXXXXXX (list additional construction) automatically.
"""
self._filepath = filepath
self.encoding = encoding
self._xml = self._load_xml(self._filepath)
self._datasources = self._load_datasources(self._xml)
@property
def filepath(self):
"""
Returns the filepath.
"""
return self._filepath
@property
def datasources(self):
"""
Datasources, as a list. Can use indices to select data sources in question.
"""
return self._datasources
def get_docinfo(self):
"""
Returns all doc info values for the XML workbook object.
"""
return {
'URL' : self._xml.docinfo.URL,
'doctype' : self._xml.docinfo.doctype,
'encoding' : self._xml.docinfo.encoding,
'externalDTD' : self._xml.docinfo.externalDTD,
'internalDTD' : self._xml.docinfo.externalDTD,
'public_id' : self._xml.docinfo.public_id,
'root_name' : self._xml.docinfo.root_name,
'standalone' : self._xml.docinfo.standalone,
'system_url' : self._xml.docinfo.system_url,
'xml_version' : self._xml.docinfo.xml_version,
}
@staticmethod
def _load_xml(filepath):
"""
Generates workbook xml tree on instantiating a workbook
To-Do: enable archived for tbwx
"""
return etree.parse(filepath)
@staticmethod
def _load_datasources(xml):
"""
Generates datasources elements and parses through them on instantiating a workbook
"""
# for workbooks with more than one datasource, iterate
sources = []
for source in xml.xpath('/workbook/datasources/datasource'):
ds = Datasource(source)
sources.append(ds)
pass
return sources
| 31.410959
| 130
| 0.591801
|
cd1977bc4b9a0c25cbee4eff0f9ecf19cafb9fdd
| 3,035
|
py
|
Python
|
adanet/experimental/phases/repeat_phase.py
|
Mario-Kart-Felix/adanet
|
b0f7fda2e7e5fb31790965b3dcb3fc6b431c05c6
|
[
"Apache-2.0"
] | 3,323
|
2018-10-30T15:24:26.000Z
|
2022-03-26T02:39:29.000Z
|
adanet/experimental/phases/repeat_phase.py
|
yeqingcheng368/adanet
|
5e08fad3f501836bb3e2e22316704a10b5f4507f
|
[
"Apache-2.0"
] | 136
|
2018-10-30T18:22:45.000Z
|
2022-02-02T20:39:26.000Z
|
adanet/experimental/phases/repeat_phase.py
|
yeqingcheng368/adanet
|
5e08fad3f501836bb3e2e22316704a10b5f4507f
|
[
"Apache-2.0"
] | 542
|
2018-10-30T18:18:04.000Z
|
2022-03-31T12:41:50.000Z
|
# Lint as: python3
# Copyright 2020 The AdaNet Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A phase that repeats its inner phases."""
from typing import Callable, Iterable, Iterator, List
from adanet.experimental.phases.phase import DatasetProvider
from adanet.experimental.phases.phase import ModelProvider
from adanet.experimental.phases.phase import Phase
from adanet.experimental.work_units.work_unit import WorkUnit
import tensorflow.compat.v2 as tf
class RepeatPhase(DatasetProvider, ModelProvider):
"""A phase that repeats its inner phases."""
def __init__(self,
phase_factory: List[Callable[..., Phase]],
repetitions: int):
self._phase_factory = phase_factory
self._repetitions = repetitions
self._final_phase = None
"""Initializes a RepeatPhase.
Args:
phase_factory: A list of callables that return `Phase` instances.
repetitions: Number of times to repeat the phases in the phase factory.
"""
def work_units(self, previous_phase: DatasetProvider) -> Iterator[WorkUnit]:
for _ in range(self._repetitions):
# Each repetition, the "first" previous phase is the one preceeding the
# repeat phase itself.
prev_phase = previous_phase
for phase in self._phase_factory:
phase = phase()
for work_unit in phase.work_units(prev_phase):
yield work_unit
prev_phase = phase
self._final_phase = prev_phase
def get_train_dataset(self) -> tf.data.Dataset:
if not isinstance(self._final_phase, DatasetProvider):
raise NotImplementedError(
'The last phase in repetition does not provide datasets.')
return self._final_phase.get_train_dataset()
def get_eval_dataset(self) -> tf.data.Dataset:
if not isinstance(self._final_phase, DatasetProvider):
raise NotImplementedError(
'The last phase in repetition does not provide datasets.')
return self._final_phase.get_eval_dataset()
def get_models(self) -> Iterable[tf.keras.Model]:
if not isinstance(self._final_phase, ModelProvider):
raise NotImplementedError(
'The last phase in repetition does not provide models.')
return self._final_phase.get_models()
def get_best_models(self, num_models=1) -> Iterable[tf.keras.Model]:
if not isinstance(self._final_phase, ModelProvider):
raise NotImplementedError(
'The last phase in repetition does not provide models.')
return self._final_phase.get_best_models(num_models)
| 39.934211
| 78
| 0.733773
|
11cfeee38e76f33d68d36a0e2fa32d9a1048f864
| 2,510
|
py
|
Python
|
Set3/Implement the MT19937 Mersenne Twister RNG/MT19937.py
|
BadMonkey7/Cryptopals
|
53bbef43d0d0dc62286514122c2651b1ce8e7471
|
[
"MIT"
] | 1
|
2021-05-07T16:35:24.000Z
|
2021-05-07T16:35:24.000Z
|
Set3/Implement the MT19937 Mersenne Twister RNG/MT19937.py
|
BadMonkey7/Cryptopals
|
53bbef43d0d0dc62286514122c2651b1ce8e7471
|
[
"MIT"
] | null | null | null |
Set3/Implement the MT19937 Mersenne Twister RNG/MT19937.py
|
BadMonkey7/Cryptopals
|
53bbef43d0d0dc62286514122c2651b1ce8e7471
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# 参考wiki
# https://en.m.wikipedia.org/wiki/Mersenne_Twister
# https://cedricvanrompay.gitlab.io/cryptopals/challenges/21.html
# https://zh.wikipedia.org/wiki/%E6%A2%85%E6%A3%AE%E6%97%8B%E8%BD%AC%E7%AE%97%E6%B3%95
class MT19937():
def __init__(self,seed):
self.pos = 0
self.x = list()
self.x.append(seed)
# 32 位标准
def random_32(self):
# 32位随机数的参数
(w, n, m, r, a) = (32, 624, 397, 31, 0x9908B0DF)
(u, d) = (11, 0xFFFFFFFF)
(s, b) = (7, 0x9D2C5680)
(t, c) = (15, 0xEFC60000)
l = 18
f = 1812433253
# 初始化
for i in range(1, n):
tmp = (f * (self.x[i - 1] ^ (self.x[i - 1] >> (w - 2))) + i) & d
self.x.append(tmp)
upper_mask = d << r & d
lower_mask = d >> (w - r)
# 旋转
for i in range(n):
tmp = (self.x[i] & upper_mask) + (self.x[(i + 1) % n] & lower_mask)
if tmp & 1:
tmp = tmp >> 1 ^ a
else:
tmp >>= 1
tmp ^= self.x[(i + m) % n]
self.x[i] = tmp
# # 提取
while True:
y = self.x[self.pos]
y = y ^ y >> u
y = y ^ y << s & b
y = y ^ y << t & c
y = y ^ y >> l
self.pos = (self.pos + 1) % n
# 使用generator
yield y & d
# 64 位标准
def random_64(self):
# 32位随机数的参数
(w, n, m, r, a) = (64, 312, 156, 31, 0xB5026F5AA96619E9)
(u, d) = (29, 0x5555555555555555)
(s, b) = (17, 0x71D67FFFEDA60000)
(t, c) = (37, 0xFFF7EEE000000000)
l = 43
f = 6364136223846793005
# 初始化
for i in range(1, n):
tmp = (f * (self.x[i - 1] ^ (self.x[i - 1] >> (w - 2))) + i) & d
self.x.append(tmp)
upper_mask = d << r & d
lower_mask = d >> (w - r)
# 旋转
for i in range(n):
tmp = (self.x[i] & upper_mask) + (self.x[(i + 1) % n] & lower_mask)
if tmp & 1:
tmp = tmp >> 1 ^ a
else:
tmp >>= 1
tmp ^= self.x[(i + m) % n]
self.x[i] = tmp
# 提取
while True:
y = self.x[self.pos]
y = y ^ y >> u
y = y ^ y << s & b
y = y ^ y << t & c
y = y ^ y >> l
self.pos = (self.pos + 1) % n
# 使用generator
yield y & d
| 29.186047
| 86
| 0.393625
|
ab9d617ad3320485eada23ac9ff29298160014b4
| 3,171
|
py
|
Python
|
lsp_shiloh/common/devices/pip/tools/gen_1d_ramp.py
|
internaru/Pinetree_P
|
1f1525454c8b20c6c589529ff4bc159404611297
|
[
"FSFAP"
] | null | null | null |
lsp_shiloh/common/devices/pip/tools/gen_1d_ramp.py
|
internaru/Pinetree_P
|
1f1525454c8b20c6c589529ff4bc159404611297
|
[
"FSFAP"
] | null | null | null |
lsp_shiloh/common/devices/pip/tools/gen_1d_ramp.py
|
internaru/Pinetree_P
|
1f1525454c8b20c6c589529ff4bc159404611297
|
[
"FSFAP"
] | null | null | null |
#!/usr/bin/python
#******************************************************************************
# * Copyright (c) 2011 Marvell International, Ltd. All Rights Reserved
# *
# * Marvell Confidential
#******************************************************************************
import sys
# Array storing input (x-axis) sample points and corresponding output (y-axis) values. Linear interpolation
# is performed between sample points.
xsamp = [ 0, 63, 127, 191, 255, 319, 383, 447, 511, 575, 639, 703, 767, 831, 895, 959, 1023, 1024]
ysamp = [560, 526, 490, 455, 420, 385, 350, 315, 280, 245, 210, 175, 140, 105, 70, 35, 0, 0]
## 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
#xsamp = [ 0, 20, 68, 100, 148, 192, 232, 296, 356, 424, 484, 552, 616, 712, 812, 936, 1023, 1024]
##ysamp = [560, 549, 523, 505, 479, 455, 433, 397, 365, 328, 295, 258, 223, 170, 116, 48, 0, 0]
##ysamp = [560, 544, 518, 500, 474, 450, 428, 391, 358, 310, 288, 215, 190, 180, 140, 78, 0, 0]
#ysamp = [560, 544, 518, 500, 474, 450, 428, 391, 358, 310, 288, 215, 190, 175, 150, 80, 0, 0] # -- Good
# Write out the file header
def write_header(f):
f.write("/******************************************************************************\n")
f.write(" * Copyright (c) 2011 Marvell International, Ltd. All Rights Reserved\n")
f.write(" *\n")
f.write(" * Marvell Confidential\n")
f.write(" ******************************************************************************/\n")
f.write("\n")
f.write("#ifndef INC_INV1D_RAMP_CLIPPED_H\n")
f.write("#define INC_INV1D_RAMP_CLIPPED_H\n")
#f.write("#ifndef INC_1D_RAMP_CLIPPED_H\n")
#f.write("#define INC_1D_RAMP_CLIPPED_H\n")
f.write("\n")
f.write("uint32_t PA_inv1d_ramp_clipped[] =\n")
#f.write("uint32_t PA_1d_ramp_clipped[] =\n")
f.write("{\n")
return
def write_footer(f):
f.write("};\n")
f.write("\n")
f.write("#endif\n")
# Calculate the rise for the given section
def calc_rise(section):
rise = ysamp[section+1] - ysamp[section]
return rise
# Calculate the run for the given section
def calc_run(section):
run = xsamp[section+1] - xsamp[section]
return run
# Calculate the yint for the given section
def calc_yint(section, rise, run):
yint = ysamp[section] - (rise*xsamp[section])/run
return yint
# Main function
def main():
# Open the file to write to
fout = open(sys.argv[1], "wb")
write_header(fout)
section = 0
rise = calc_rise(section)
run = calc_run(section)
yint = ysamp[0]
for in_val in range(1024):
if in_val > xsamp[section+1]:
section += 1
rise = calc_rise(section)
run = calc_run(section)
yint = calc_yint(section, rise, run)
out_val = ((rise*in_val)/run) + yint
if out_val < 0:
out_val = 0
# out_val = 1023 - out_val
print out_val
str_out = "0x%08x,\n" % out_val
fout.write( str_out )
write_footer(fout)
if __name__ == '__main__':
main()
| 33.734043
| 113
| 0.525702
|
1c8ca52a77091627482b86592b4609eb48ad1157
| 45,099
|
py
|
Python
|
snakemake/jobs.py
|
r-mllr/snakemake
|
ee0d5b17311c9126d89b49eec70045a9fdd6bd9c
|
[
"MIT"
] | 1
|
2021-02-05T13:42:24.000Z
|
2021-02-05T13:42:24.000Z
|
snakemake/jobs.py
|
r-mllr/snakemake
|
ee0d5b17311c9126d89b49eec70045a9fdd6bd9c
|
[
"MIT"
] | null | null | null |
snakemake/jobs.py
|
r-mllr/snakemake
|
ee0d5b17311c9126d89b49eec70045a9fdd6bd9c
|
[
"MIT"
] | null | null | null |
__author__ = "Johannes Köster"
__copyright__ = "Copyright 2015-2019, Johannes Köster"
__email__ = "koester@jimmy.harvard.edu"
__license__ = "MIT"
import hashlib
import os
import sys
import base64
import tempfile
import subprocess
import json
from collections import defaultdict
from itertools import chain, filterfalse
from functools import partial
from operator import attrgetter
from urllib.request import urlopen
from urllib.parse import urlparse
from snakemake.io import (
IOFile,
Wildcards,
Resources,
_IOFile,
is_flagged,
get_flag_value,
contains_wildcard,
)
from snakemake.utils import format, listfiles
from snakemake.exceptions import RuleException, ProtectedOutputException, WorkflowError
from snakemake.exceptions import (
UnexpectedOutputException,
CreateCondaEnvironmentException,
)
from snakemake.logging import logger
from snakemake.common import DYNAMIC_FILL, lazy_property, get_uuid
from snakemake import conda, wrapper
def format_files(job, io, dynamicio):
for f in io:
if f in dynamicio:
yield "{} (dynamic)".format(f.format_dynamic())
elif is_flagged(f, "pipe"):
yield "{} (pipe)".format(f)
elif is_flagged(f, "checkpoint_target"):
yield "<TBD>"
else:
yield f
def jobfiles(jobs, type):
return chain(*map(attrgetter(type), jobs))
class AbstractJob:
def is_group(self):
raise NotImplementedError()
def log_info(self, skip_dynamic=False):
raise NotImplementedError()
def log_error(self, msg=None, **kwargs):
raise NotImplementedError()
def remove_existing_output(self):
raise NotImplementedError()
def download_remote_input(self):
raise NotImplementedError()
def properties(self, omit_resources=["_cores", "_nodes"], **aux_properties):
raise NotImplementedError()
class Job(AbstractJob):
HIGHEST_PRIORITY = sys.maxsize
__slots__ = [
"rule",
"dag",
"wildcards_dict",
"wildcards",
"_format_wildcards",
"input",
"dependencies",
"output",
"_params",
"_log",
"_benchmark",
"_resources",
"_conda_env_file",
"_conda_env",
"shadow_dir",
"_inputsize",
"dynamic_output",
"dynamic_input",
"temp_output",
"protected_output",
"touch_output",
"subworkflow_input",
"_hash",
"_attempt",
"_group",
"targetfile",
]
def __init__(
self, rule, dag, wildcards_dict=None, format_wildcards=None, targetfile=None
):
self.rule = rule
self.dag = dag
# the targetfile that led to the job
# it is important to record this, since we need it to submit the
# job on a cluster. In contrast, an arbitrary targetfile could
# lead to a different composition of wildcard values (in case of
# ambiguity in matching).
self.targetfile = targetfile
self.wildcards_dict = wildcards_dict
self.wildcards = Wildcards(fromdict=self.wildcards_dict)
self._format_wildcards = (
self.wildcards
if format_wildcards is None
else Wildcards(fromdict=format_wildcards)
)
self.input, input_mapping, self.dependencies = self.rule.expand_input(
self.wildcards_dict
)
self.output, output_mapping = self.rule.expand_output(self.wildcards_dict)
# other properties are lazy to be able to use additional parameters and check already existing files
self._params = None
self._log = None
self._benchmark = None
self._resources = None
self._conda_env_file = None
self._conda_env = None
self._group = None
self.shadow_dir = None
self._inputsize = None
self.is_updated = False
self._attempt = self.dag.workflow.attempt
# TODO get rid of these
self.dynamic_output, self.dynamic_input = set(), set()
self.temp_output, self.protected_output = set(), set()
self.touch_output = set()
self.subworkflow_input = dict()
for f in self.output:
f_ = output_mapping[f]
if f_ in self.rule.dynamic_output:
self.dynamic_output.add(f)
if f_ in self.rule.temp_output:
self.temp_output.add(f)
if f_ in self.rule.protected_output:
self.protected_output.add(f)
if f_ in self.rule.touch_output:
self.touch_output.add(f)
for f in self.input:
f_ = input_mapping[f]
if f_ in self.rule.dynamic_input:
self.dynamic_input.add(f)
if f_ in self.rule.subworkflow_input:
self.subworkflow_input[f] = self.rule.subworkflow_input[f_]
elif "subworkflow" in f.flags:
sub = f.flags["subworkflow"]
if f in self.subworkflow_input:
other = self.subworkflow_input[f]
if sub != other:
raise WorkflowError(
"The input file {} is ambiguously "
"associated with two subworkflows {} "
"and {}.".format(f, sub, other),
rule=self.rule,
)
self.subworkflow_input[f] = sub
self._hash = self.rule.__hash__()
for wildcard_value in self.wildcards_dict.values():
self._hash ^= wildcard_value.__hash__()
def updated(self):
job = Job(
self.rule,
self.dag,
wildcards_dict=self.wildcards_dict,
targetfile=self.targetfile,
)
job.is_updated = True
return job
def is_valid(self):
"""Check if job is valid"""
# these properties have to work in dry-run as well. Hence we check them here:
self.rule.expand_benchmark(self.wildcards_dict)
self.rule.expand_log(self.wildcards_dict)
def outputs_older_than_script(self):
"""return output that's older than script, i.e. script has changed"""
if not self.is_script:
return
assert os.path.exists(self.rule.script) # to make sure lstat works
script_mtime = os.lstat(self.rule.script).st_mtime
for f in self.expanded_output:
if f.exists:
if not f.is_newer(script_mtime):
yield f
@property
def threads(self):
return self.resources._cores
@property
def params(self):
if self._params is None:
self._params = self.rule.expand_params(
self.wildcards_dict, self.input, self.output, self.resources
)
return self._params
@property
def log(self):
if self._log is None:
self._log = self.rule.expand_log(self.wildcards_dict)
return self._log
@property
def benchmark(self):
if self._benchmark is None:
self._benchmark = self.rule.expand_benchmark(self.wildcards_dict)
return self._benchmark
@property
def benchmark_repeats(self):
if self.benchmark is not None:
return get_flag_value(self.benchmark, "repeat") or 1
@property
def group(self):
if self._group is None:
self._group = self.rule.expand_group(self.wildcards_dict)
return self._group
@group.setter
def group(self, group):
self._group = group
@property
def attempt(self):
return self._attempt
@attempt.setter
def attempt(self, attempt):
# reset resources
self._resources = None
self._attempt = attempt
@property
def resources(self):
if self._resources is None:
self._resources = self.rule.expand_resources(
self.wildcards_dict, self.input, self.attempt
)
return self._resources
@property
def conda_env_file(self):
if self._conda_env_file is None:
expanded_env = self.rule.expand_conda_env(self.wildcards_dict)
if expanded_env is not None:
scheme, _, path, *_ = urlparse(expanded_env)
# Normalize 'file:///my/path.yml' to '/my/path.yml'
if scheme == "file" or not scheme:
self._conda_env_file = path
else:
self._conda_env_file = expanded_env
return self._conda_env_file
@property
def conda_env(self):
if self.conda_env_file:
if self._conda_env is None:
self._conda_env = self.dag.conda_envs.get(
(self.conda_env_file, self.singularity_img_url)
)
return self._conda_env
return None
@property
def conda_env_path(self):
return self.conda_env.path if self.conda_env else None
def archive_conda_env(self):
"""Archive a conda environment into a custom local channel."""
if self.conda_env_file:
return self.conda_env.create_archive()
return None
@property
def needs_singularity(self):
return self.singularity_img is not None
@property
def singularity_img_url(self):
return self.rule.singularity_img
@property
def singularity_img(self):
if self.singularity_img_url:
return self.dag.singularity_imgs[self.singularity_img_url]
return None
@property
def singularity_img_path(self):
return self.singularity_img.path if self.singularity_img else None
@property
def is_shadow(self):
return self.rule.shadow_depth is not None
@property
def priority(self):
return self.dag.priority(self)
@property
def b64id(self):
return base64.b64encode(
(self.rule.name + "".join(self.output)).encode("utf-8")
).decode("utf-8")
@property
def inputsize(self):
"""
Return the size of the input files.
Input files need to be present.
"""
if self._inputsize is None:
self._inputsize = sum(f.size for f in self.input)
return self._inputsize
@property
def message(self):
""" Return the message for this job. """
try:
return (
self.format_wildcards(self.rule.message) if self.rule.message else None
)
except AttributeError as ex:
raise RuleException(str(ex), rule=self.rule)
except KeyError as ex:
raise RuleException(
"Unknown variable in message " "of shell command: {}".format(str(ex)),
rule=self.rule,
)
@property
def shellcmd(self):
""" Return the shell command. """
try:
return (
self.format_wildcards(self.rule.shellcmd)
if self.rule.shellcmd
else None
)
except AttributeError as ex:
raise RuleException(str(ex), rule=self.rule)
except KeyError as ex:
raise RuleException(
"Unknown variable when printing " "shell command: {}".format(str(ex)),
rule=self.rule,
)
@property
def is_shell(self):
return self.rule.shellcmd is not None
@property
def is_norun(self):
return self.rule.norun
@property
def is_script(self):
return self.rule.script is not None
@property
def is_wrapper(self):
return self.rule.wrapper is not None
@property
def is_cwl(self):
return self.rule.cwl is not None
@property
def is_run(self):
return not (
self.is_shell
or self.is_norun
or self.is_script
or self.is_wrapper
or self.is_cwl
)
@property
def expanded_output(self):
""" Iterate over output files while dynamic output is expanded. """
for f, f_ in zip(self.output, self.rule.output):
if f in self.dynamic_output:
expansion = self.expand_dynamic(f_)
if not expansion:
yield f_
for f, _ in expansion:
file_to_yield = IOFile(f, self.rule)
file_to_yield.clone_flags(f_)
yield file_to_yield
else:
yield f
def shadowed_path(self, f):
""" Get the shadowed path of IOFile f. """
if not self.shadow_dir:
return f
f_ = IOFile(os.path.join(self.shadow_dir, f), self.rule)
f_.clone_flags(f)
return f_
@property
def dynamic_wildcards(self):
""" Return all wildcard values determined from dynamic output. """
combinations = set()
for f, f_ in zip(self.output, self.rule.output):
if f in self.dynamic_output:
for f, w in self.expand_dynamic(f_):
combinations.add(tuple(w.items()))
wildcards = defaultdict(list)
for combination in combinations:
for name, value in combination:
wildcards[name].append(value)
return wildcards
@property
def missing_input(self):
""" Return missing input files. """
# omit file if it comes from a subworkflow
return set(
f for f in self.input if not f.exists and not f in self.subworkflow_input
)
@property
def existing_remote_input(self):
files = set()
for f in self.input:
if f.is_remote:
if f.exists_remote:
files.add(f)
return files
@property
def existing_remote_output(self):
files = set()
for f in self.remote_output:
if f.exists_remote:
files.add(f)
return files
@property
def missing_remote_input(self):
return self.remote_input - self.existing_remote_input
@property
def missing_remote_output(self):
return self.remote_output - self.existing_remote_output
@property
def output_mintime(self):
""" Return oldest output file. """
existing = [f.mtime for f in self.expanded_output if f.exists]
if self.benchmark and self.benchmark.exists:
existing.append(self.benchmark.mtime)
if existing:
return min(existing)
return None
@property
def output_mintime_local(self):
existing = [f.mtime_local for f in self.expanded_output if f.exists]
if self.benchmark and self.benchmark.exists:
existing.append(self.benchmark.mtime_local)
if existing:
return min(existing)
return None
@property
def input_maxtime(self):
""" Return newest input file. """
existing = [f.mtime for f in self.input if f.exists]
if existing:
return max(existing)
return None
def missing_output(self, requested=None):
""" Return missing output files. """
files = set()
if self.benchmark and (requested is None or self.benchmark in requested):
if not self.benchmark.exists:
files.add(self.benchmark)
for f, f_ in zip(self.output, self.rule.output):
if requested is None or f in requested:
if f in self.dynamic_output:
if not self.expand_dynamic(f_):
files.add("{} (dynamic)".format(f_))
elif not f.exists:
files.add(f)
for f in self.log:
if requested and f in requested and not f.exists:
files.add(f)
return files
@property
def local_input(self):
for f in self.input:
if not f.is_remote:
yield f
@property
def unique_input(self):
seen = set()
for element in filterfalse(seen.__contains__, self.input):
seen.add(element)
yield element
@property
def local_output(self):
for f in self.output:
if not f.is_remote:
yield f
@property
def remote_input(self):
for f in self.input:
if f.is_remote:
yield f
@property
def remote_output(self):
for f in self.output:
if f.is_remote:
yield f
@property
def remote_input_newer_than_local(self):
files = set()
for f in self.remote_input:
if (f.exists_remote and f.exists_local) and (f.mtime > f.mtime_local):
files.add(f)
return files
@property
def remote_input_older_than_local(self):
files = set()
for f in self.remote_input:
if (f.exists_remote and f.exists_local) and (f.mtime < f.mtime_local):
files.add(f)
return files
@property
def remote_output_newer_than_local(self):
files = set()
for f in self.remote_output:
if (f.exists_remote and f.exists_local) and (f.mtime > f.mtime_local):
files.add(f)
return files
@property
def remote_output_older_than_local(self):
files = set()
for f in self.remote_output:
if (f.exists_remote and f.exists_local) and (f.mtime < f.mtime_local):
files.add(f)
return files
@property
def files_to_download(self):
toDownload = set()
for f in self.input:
if f.is_remote:
if (not f.exists_local and f.exists_remote) and (
not self.rule.norun or f.remote_object.keep_local
):
toDownload.add(f)
toDownload = toDownload | self.remote_input_newer_than_local
return toDownload
@property
def files_to_upload(self):
return self.missing_remote_input & self.remote_input_older_than_local
@property
def existing_output(self):
return filter(lambda f: f.exists, self.expanded_output)
def check_protected_output(self):
protected = list(filter(lambda f: f.protected, self.expanded_output))
if protected:
raise ProtectedOutputException(self.rule, protected)
def remove_existing_output(self):
"""Clean up both dynamic and regular output before rules actually run
"""
if self.dynamic_output:
for f, _ in chain(*map(self.expand_dynamic, self.rule.dynamic_output)):
os.remove(f)
for f, f_ in zip(self.output, self.rule.output):
try:
# remove_non_empty_dir only applies to directories which aren't
# flagged with directory().
f.remove(remove_non_empty_dir=False)
except FileNotFoundError:
# No file == no problem
pass
for f in self.log:
f.remove(remove_non_empty_dir=False)
def download_remote_input(self):
for f in self.files_to_download:
f.download_from_remote()
def prepare(self):
"""
Prepare execution of job.
This includes creation of directories and deletion of previously
created dynamic files.
Creates a shadow directory for the job if specified.
"""
self.check_protected_output()
unexpected_output = self.dag.reason(self).missing_output.intersection(
self.existing_output
)
if unexpected_output:
logger.warning(
"Warning: the following output files of rule {} were not "
"present when the DAG was created:\n{}".format(
self.rule, unexpected_output
)
)
self.remove_existing_output()
for f, f_ in zip(self.output, self.rule.output):
f.prepare()
self.download_remote_input()
for f in self.log:
f.prepare()
if self.benchmark:
self.benchmark.prepare()
if not self.is_shadow:
return
# Create shadow directory structure
self.shadow_dir = tempfile.mkdtemp(
dir=self.rule.workflow.persistence.shadow_path
)
cwd = os.getcwd()
if self.rule.shadow_depth == "minimal":
# Re-create the directory structure in the shadow directory
for (f, d) in set(
[
(item, os.path.dirname(item))
for sublist in [self.input, self.output, self.log]
if sublist is not None
for item in sublist
]
):
if d and not os.path.isabs(d):
rel_path = os.path.relpath(d)
# Only create subdirectories
if not rel_path.split(os.path.sep)[0] == "..":
os.makedirs(
os.path.join(self.shadow_dir, rel_path), exist_ok=True
)
else:
raise RuleException(
"The following file name references a parent directory relative to your workdir.\n"
'This isn\'t supported for shadow: "minimal". Consider using an absolute path instead.\n{}'.format(
f
),
rule=self.rule,
)
# Symlink the input files
for rel_path in set(
[os.path.relpath(f) for f in self.input if not os.path.isabs(f)]
):
link = os.path.join(self.shadow_dir, rel_path)
original = os.path.relpath(rel_path, os.path.dirname(link))
os.symlink(original, link)
# Shallow simply symlink everything in the working directory.
elif self.rule.shadow_depth == "shallow":
for source in os.listdir(cwd):
link = os.path.join(self.shadow_dir, source)
os.symlink(os.path.abspath(source), link)
elif self.rule.shadow_depth == "full":
snakemake_dir = os.path.join(cwd, ".snakemake")
for dirpath, dirnames, filenames in os.walk(cwd):
# Must exclude .snakemake and its children to avoid infinite
# loop of symlinks.
if os.path.commonprefix([snakemake_dir, dirpath]) == snakemake_dir:
continue
for dirname in dirnames:
if dirname == ".snakemake":
continue
relative_source = os.path.relpath(os.path.join(dirpath, dirname))
shadow = os.path.join(self.shadow_dir, relative_source)
os.mkdir(shadow)
for filename in filenames:
source = os.path.join(dirpath, filename)
relative_source = os.path.relpath(source)
link = os.path.join(self.shadow_dir, relative_source)
os.symlink(source, link)
def close_remote(self):
for f in self.input + self.output:
if f.is_remote:
f.remote_object.close()
def cleanup(self):
""" Cleanup output files. """
to_remove = [f for f in self.expanded_output if f.exists]
to_remove.extend([f for f in self.remote_input if f.exists_local])
to_remove.extend(
[
f
for f in self.remote_output
if (
f.exists_remote
if (f.is_remote and f.should_stay_on_remote)
else f.exists_local
)
]
)
if to_remove:
logger.info(
"Removing output files of failed job {}"
" since they might be corrupted:\n{}".format(self, ", ".join(to_remove))
)
for f in to_remove:
f.remove()
self.rmdir_empty_remote_dirs()
@property
def empty_remote_dirs(self):
for f in set(self.output) | set(self.input):
if f.is_remote and not f.should_stay_on_remote:
if os.path.exists(os.path.dirname(f)) and not len(
os.listdir(os.path.dirname(f))
):
yield os.path.dirname(f)
def rmdir_empty_remote_dirs(self):
for d in self.empty_remote_dirs:
try:
os.removedirs(d)
except:
pass # it's ok if we can't remove the leaf
def format_wildcards(self, string, **variables):
""" Format a string with variables from the job. """
_variables = dict()
_variables.update(self.rule.workflow.globals)
_variables.update(
dict(
input=self.input,
output=self.output,
params=self.params,
wildcards=self._format_wildcards,
threads=self.threads,
resources=self.resources,
log=self.log,
jobid=self.jobid,
version=self.rule.version,
name=self.name,
rule=self.rule.name,
rulename=self.rule.name,
bench_iteration=None,
)
)
_variables.update(variables)
try:
return format(string, **_variables)
except NameError as ex:
raise RuleException("NameError: " + str(ex), rule=self.rule)
except IndexError as ex:
raise RuleException("IndexError: " + str(ex), rule=self.rule)
def properties(self, omit_resources=["_cores", "_nodes"], **aux_properties):
resources = {
name: res
for name, res in self.resources.items()
if name not in omit_resources
}
params = {name: value for name, value in self.params.items()}
properties = {
"type": "single",
"rule": self.rule.name,
"local": self.is_local,
"input": self.input,
"output": self.output,
"wildcards": self.wildcards_dict,
"params": params,
"log": self.log,
"threads": self.threads,
"resources": resources,
"jobid": self.dag.jobid(self),
}
properties.update(aux_properties)
try:
return json.dumps(properties)
except TypeError:
del properties["params"]
return json.dumps(properties)
@property
def is_local(self):
return self.dag.workflow.is_local(self.rule)
def __repr__(self):
return self.rule.name
def __eq__(self, other):
if other is None:
return False
return (
self.rule == other.rule
and (self.wildcards_dict == other.wildcards_dict)
and (self.input == other.input)
)
def __lt__(self, other):
return self.rule.__lt__(other.rule)
def __gt__(self, other):
return self.rule.__gt__(other.rule)
def __hash__(self):
return self._hash
def expand_dynamic(self, pattern):
""" Expand dynamic files. """
return list(
listfiles(pattern, restriction=self.wildcards, omit_value=DYNAMIC_FILL)
)
def is_group(self):
return False
def log_info(self, skip_dynamic=False, indent=False, printshellcmd=True):
# skip dynamic jobs that will be "executed" only in dryrun mode
if skip_dynamic and self.dag.dynamic(self):
return
priority = self.priority
logger.job_info(
jobid=self.dag.jobid(self),
msg=self.message,
name=self.rule.name,
local=self.dag.workflow.is_local(self.rule),
input=list(format_files(self, self.input, self.dynamic_input)),
output=list(format_files(self, self.output, self.dynamic_output)),
log=list(self.log),
benchmark=self.benchmark,
wildcards=self.wildcards_dict,
reason=str(self.dag.reason(self)),
resources=self.resources,
priority="highest" if priority == Job.HIGHEST_PRIORITY else priority,
threads=self.threads,
indent=indent,
is_checkpoint=self.rule.is_checkpoint,
printshellcmd=printshellcmd,
)
logger.shellcmd(self.shellcmd, indent=indent)
if self.dynamic_output:
logger.info(
"Subsequent jobs will be added dynamically "
"depending on the output of this job",
indent=True,
)
def log_error(self, msg=None, indent=False, **kwargs):
logger.job_error(
name=self.rule.name,
jobid=self.dag.jobid(self),
output=list(format_files(self, self.output, self.dynamic_output)),
log=list(self.log),
conda_env=self.conda_env.path if self.conda_env else None,
aux=kwargs,
indent=indent,
shellcmd=self.shellcmd,
)
if msg is not None:
logger.error(msg)
def register(self):
self.dag.workflow.persistence.started(self)
def get_wait_for_files(self):
wait_for_files = []
wait_for_files.extend(self.local_input)
wait_for_files.extend(
f for f in self.remote_input if not f.should_stay_on_remote
)
if self.shadow_dir:
wait_for_files.append(self.shadow_dir)
if self.dag.workflow.use_conda and self.conda_env:
wait_for_files.append(self.conda_env_path)
return wait_for_files
@property
def jobid(self):
return self.dag.jobid(self)
def postprocess(
self,
upload_remote=True,
handle_log=True,
handle_touch=True,
handle_temp=True,
error=False,
ignore_missing_output=False,
assume_shared_fs=True,
latency_wait=None,
):
if assume_shared_fs:
if not error and handle_touch:
self.dag.handle_touch(self)
if handle_log:
self.dag.handle_log(self)
if not error:
self.dag.check_and_touch_output(
self, wait=latency_wait, ignore_missing_output=ignore_missing_output
)
self.dag.unshadow_output(self, only_log=error)
if not error:
self.dag.handle_remote(self, upload=upload_remote)
self.dag.handle_protected(self)
self.close_remote()
else:
if not error:
self.dag.check_and_touch_output(
self, wait=latency_wait, no_touch=True, force_stay_on_remote=True
)
if not error:
try:
self.dag.workflow.persistence.finished(self)
except IOError as e:
logger.warning(
"Error recording metadata for finished job "
"({}). Please ensure write permissions for the "
"directory {}".format(e, self.dag.workflow.persistence.path)
)
if handle_temp:
# temp handling has to happen after calling finished(),
# because we need to access temp output files to record
# start and end times.
self.dag.handle_temp(self)
@property
def name(self):
return self.rule.name
@property
def priority(self):
return self.dag.priority(self)
@property
def products(self):
products = list(self.output)
if self.benchmark:
products.append(self.benchmark)
products.extend(self.log)
return products
def get_targets(self):
return self.targetfile or [self.rule.name]
@property
def is_branched(self):
return self.rule.is_branched
@property
def rules(self):
return [self.rule.name]
@property
def restart_times(self):
return self.rule.restart_times
@property
def is_checkpoint(self):
return self.rule.is_checkpoint
def __len__(self):
return 1
class GroupJob(AbstractJob):
__slots__ = [
"groupid",
"jobs",
"_resources",
"_input",
"_output",
"_log",
"_inputsize",
"_all_products",
"_attempt",
]
def __init__(self, id, jobs):
self.groupid = id
self.jobs = frozenset(jobs)
self._resources = None
self._input = None
self._output = None
self._log = None
self._inputsize = None
self._all_products = None
self._attempt = self.dag.workflow.attempt
@property
def dag(self):
return next(iter(self.jobs)).dag
def merge(self, other):
assert other.groupid == self.groupid
self.jobs = self.jobs | other.jobs
def finalize(self):
# TODO determine resources based on toposort
dag = {
job: {dep for dep in self.dag.dependencies[job] if dep in self.jobs}
for job in self.jobs
}
from toposort import toposort
t = toposort(dag)
print(t)
@property
def all_products(self):
if self._all_products is None:
self._all_products = set(f for job in self.jobs for f in job.products)
return self._all_products
def __iter__(self):
return iter(self.jobs)
def __repr__(self):
return "JobGroup({},{})".format(self.groupid, repr(self.jobs))
def __contains__(self, job):
return job in self.jobs
def is_group(self):
return True
@property
def is_checkpoint(self):
return any(job.is_checkpoint for job in self.jobs)
@property
def is_updated(self):
return any(job.is_updated for job in self.jobs)
def log_info(self, skip_dynamic=False):
logger.group_info(groupid=self.groupid)
for job in sorted(self.jobs, key=lambda j: j.rule.name):
job.log_info(skip_dynamic, indent=True)
def log_error(self, msg=None, **kwargs):
logger.group_error(groupid=self.groupid)
for job in self.jobs:
job.log_error(msg=msg, indent=True, **kwargs)
def register(self):
for job in self.jobs:
job.register()
def remove_existing_output(self):
for job in self.jobs:
job.remove_existing_output()
def download_remote_input(self):
for job in self.jobs:
job.download_remote_input()
def get_wait_for_files(self):
local_input = [
f
for job in self.jobs
for f in job.local_input
if f not in self.all_products
]
remote_input = [
f
for job in self.jobs
for f in job.remote_input
if f not in self.all_products
]
wait_for_files = []
wait_for_files.extend(local_input)
wait_for_files.extend(f for f in remote_input if not f.should_stay_on_remote)
for job in self.jobs:
if job.shadow_dir:
wait_for_files.append(job.shadow_dir)
if self.dag.workflow.use_conda and job.conda_env:
wait_for_files.append(job.conda_env_path)
return wait_for_files
@property
def resources(self):
if self._resources is None:
self._resources = defaultdict(int)
pipe_group = any(
[any([is_flagged(o, "pipe") for o in job.output]) for job in self.jobs]
)
for job in self.jobs:
try:
job_resources = job.resources
except FileNotFoundError:
# Skip job if resource evaluation leads to a file not found error.
# This will be caused by an inner job, which needs files created by the same group.
# All we can do is to ignore such jobs for now.
continue
for res, value in job_resources.items():
if self.dag.workflow.run_local or pipe_group:
# in case of local execution, this must be a
# group of jobs that are connected with pipes
# and have to run simultaneously
self._resources[res] += value
else:
# take the maximum over all jobs
self._resources[res] = max(
self._resources.get(res, value), value
)
return Resources(fromdict=self._resources)
@property
def input(self):
if self._input is None:
self._input = [
f for job in self.jobs for f in job.input if f not in self.all_products
]
return self._input
@property
def output(self):
all_input = set(f for job in self.jobs for f in job.input)
if self._output is None:
self._output = [
f for job in self.jobs for f in job.output if f not in all_input
]
return self._output
@property
def log(self):
if self._log is None:
self._log = [f for job in self.jobs for f in job.log]
return self._log
@property
def products(self):
all_input = set(f for job in self.jobs for f in job.input)
return [f for job in self.jobs for f in job.products if f not in all_input]
def properties(self, omit_resources=["_cores", "_nodes"], **aux_properties):
resources = {
name: res
for name, res in self.resources.items()
if name not in omit_resources
}
properties = {
"type": "group",
"groupid": self.groupid,
"local": self.is_local,
"input": self.input,
"output": self.output,
"threads": self.threads,
"resources": resources,
"jobid": self.jobid,
}
properties.update(aux_properties)
return json.dumps(properties)
@property
def jobid(self):
return str(get_uuid(",".join(str(job.jobid) for job in self.jobs)))
def cleanup(self):
for job in self.jobs:
job.cleanup()
def postprocess(self, error=False, **kwargs):
for job in self.jobs:
job.postprocess(handle_temp=False, error=error, **kwargs)
# Handle temp after per-job postprocess.
# This is necessary because group jobs are not topologically sorted,
# and we might otherwise delete a temp input file before it has been
# postprocessed by the outputting job in the same group.
if not error:
for job in self.jobs:
self.dag.handle_temp(job)
# remove all pipe outputs since all jobs of this group are done and the
# pipes are no longer needed
for job in self.jobs:
for f in job.output:
if is_flagged(f, "pipe"):
f.remove()
@property
def name(self):
return str(self.groupid)
def check_protected_output(self):
for job in self.jobs:
job.check_protected_output()
@property
def dynamic_input(self):
return [
f
for job in self.jobs
for f in job.dynamic_input
if f not in self.all_products
]
@property
def inputsize(self):
if self._inputsize is None:
self._inputsize = sum(f.size for f in self.input)
return self._inputsize
@property
def priority(self):
return max(self.dag.priority(job) for job in self.jobs)
@property
def is_local(self):
return all(job.is_local for job in self.jobs)
def format_wildcards(self, string, **variables):
""" Format a string with variables from the job. """
_variables = dict()
_variables.update(self.dag.workflow.globals)
_variables.update(
dict(
input=self.input,
output=self.output,
threads=self.threads,
jobid=self.jobid,
name=self.name,
rule="GROUP",
rulename="GROUP",
resources=self.resources,
)
)
_variables.update(variables)
try:
return format(string, **_variables)
except NameError as ex:
raise WorkflowError(
"NameError with group job {}: {}".format(self.jobid, str(ex))
)
except IndexError as ex:
raise WorkflowError(
"IndexError with group job {}: {}".format(self.jobid, str(ex))
)
@property
def threads(self):
return self.resources["_cores"]
def get_targets(self):
# jobs without output are targeted by rule name
targets = [job.rule.name for job in self.jobs if not job.products]
targets.extend(self.products)
return targets
@property
def attempt(self):
return self._attempt
@attempt.setter
def attempt(self, attempt):
# reset resources
self._resources = None
self._attempt = attempt
@property
def is_branched(self):
return any(job.is_branched for job in self.jobs)
@property
def needs_singularity(self):
return any(job.needs_singularity for job in self.jobs)
@property
def rules(self):
return [job.rule.name for job in self.jobs]
@property
def expanded_output(self):
"""Yields the entire expanded output of all jobs"""
for job in self.jobs:
yield from job.expanded_output
@property
def restart_times(self):
return max(job.restart_times for job in self.jobs)
def __len__(self):
return len(self.jobs)
def __hash__(self):
return hash(self.jobs)
def __eq__(self, other):
if other.is_group():
return self.jobs == other.jobs
else:
return False
class Reason:
__slots__ = [
"_updated_input",
"_updated_input_run",
"_missing_output",
"_incomplete_output",
"forced",
"noio",
"nooutput",
"derived",
]
def __init__(self):
self._updated_input = None
self._updated_input_run = None
self._missing_output = None
self._incomplete_output = None
self.forced = False
self.noio = False
self.nooutput = False
self.derived = True
@lazy_property
def updated_input(self):
return set()
@lazy_property
def updated_input_run(self):
return set()
@lazy_property
def missing_output(self):
return set()
@lazy_property
def incomplete_output(self):
return set()
def __str__(self):
s = list()
if self.forced:
s.append("Forced execution")
else:
if self.noio:
s.append(
"Rules with neither input nor " "output files are always executed."
)
elif self.nooutput:
s.append(
"Rules with a run or shell declaration but no output "
"are always executed."
)
else:
if self._missing_output:
s.append(
"Missing output files: {}".format(
", ".join(self.missing_output)
)
)
if self._incomplete_output:
s.append(
"Incomplete output files: {}".format(
", ".join(self.incomplete_output)
)
)
if self._updated_input:
updated_input = self.updated_input - self.updated_input_run
s.append("Updated input files: {}".format(", ".join(updated_input)))
if self._updated_input_run:
s.append(
"Input files updated by another job: {}".format(
", ".join(self.updated_input_run)
)
)
s = "; ".join(s)
return s
def __bool__(self):
return bool(
self.updated_input
or self.missing_output
or self.forced
or self.updated_input_run
or self.noio
or self.nooutput
)
| 31.102759
| 127
| 0.562363
|
e4c7a66de3ee4ac032625182902a2d3623544e35
| 4,246
|
py
|
Python
|
flask_app.py
|
gbrixey/decay
|
ff5eb7f5cf49892dad2ffe28aee1eb0d5797c767
|
[
"MIT"
] | null | null | null |
flask_app.py
|
gbrixey/decay
|
ff5eb7f5cf49892dad2ffe28aee1eb0d5797c767
|
[
"MIT"
] | 1
|
2020-02-01T04:01:05.000Z
|
2020-02-01T04:01:05.000Z
|
flask_app.py
|
gbrixey/decay
|
ff5eb7f5cf49892dad2ffe28aee1eb0d5797c767
|
[
"MIT"
] | null | null | null |
from flask import Flask, flash, g, redirect, render_template, request
from degrade import degrade_text, fake_degrade_jpeg
from constants import MAX_FILES
from util import *
from PIL import Image
import uuid
import os
import sqlite3
app = Flask(__name__)
DATABASE = os.path.join(app.root_path, 'data.db')
UPLOAD_FOLDER = os.path.join(app.root_path, 'static', 'images')
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
def ensure_upload_directory():
"""Creates the image upload directory if necessary."""
os.makedirs(UPLOAD_FOLDER, exist_ok = True)
def ensure_items_table():
"""Creates the database table if necessary."""
db = sqlite3.connect(DATABASE)
sql_create_table = '''
create table if not exists items (
id integer primary key,
filename text,
description text
);
'''
db.cursor().execute(sql_create_table)
db.close()
def degrade_database():
"""Degrades all images and text currently in the database."""
sql_fetch = '''select * from items order by id desc'''
items = g.db.cursor().execute(sql_fetch).fetchall()
for index, item in enumerate(items):
id = item['id']
description = item['description']
old_image_path = os.path.join(UPLOAD_FOLDER, item['filename'])
new_filename = random_jpeg_filename()
new_image_path = os.path.join(UPLOAD_FOLDER, new_filename)
strong = index >= 10
fake_degrade_jpeg(old_image_path, new_image_path, strong = strong)
os.remove(old_image_path)
new_description = degrade_text(description)
sql_update = '''
update items
set filename = ?,
description = ?
where id = ?
'''
g.db.cursor().execute(sql_update, (new_filename, new_description, id))
g.db.commit()
return
def trim_database_if_necessary():
"""Ensures that the database does not contain more than the maximum number of images."""
sql_fetch = '''select id from items order by id desc'''
items = g.db.cursor().execute(sql_fetch).fetchall()
for (index, item) in enumerate(items):
if index >= MAX_FILES:
id = item['id']
sql_delete = '''delete from items where id = ?'''
g.db.cursor().execute(sql_delete, (id,))
g.db.commit()
return
def random_jpeg_filename():
"""Returns a random filename ending in .jpg"""
name = str(uuid.uuid4())[:8]
return name + '.jpg'
def makedict(cursor, row):
return dict((cursor.description[i][0], value) for i, value in enumerate(row))
@app.before_request
def before_request():
g.db = sqlite3.connect(DATABASE)
g.db.row_factory = makedict
@app.after_request
def after_request(response):
g.db.close()
return response
@app.route('/')
def index():
return render_template('index.html')
@app.route('/content')
def get_content():
sql_fetch = '''select * from items order by id desc'''
items = g.db.cursor().execute(sql_fetch).fetchall()
return render_template('content.html', items = items)
@app.route('/submit_image', methods=['POST'])
def submit_image():
image_file = request.files.get('image')
image = jpeg_image_from_file(image_file)
if image == None:
return redirect('/')
# Rotate/resize image if necessary
image = rotate_image_if_necessary(image)
image = crop_image_if_necessary(image)
image = resize_image_if_necessary(image)
filename = random_jpeg_filename()
path = os.path.join(UPLOAD_FOLDER, filename)
image.save(path, quality = 50, optimize = True)
# Degrade existing images and text before inserting the new image,
# so that the new image will start off with no glitches.
degrade_database()
description = request.form['description']
if len(description) > 1000:
description = description[:1000]
sql_insert = '''insert into items (filename, description) values (?, ?)'''
g.db.cursor().execute(sql_insert, (filename, description))
g.db.commit()
# Remove the oldest image if necessary.
trim_database_if_necessary()
return redirect('/')
ensure_upload_directory()
ensure_items_table()
if __name__ == '__main__':
app.run(debug = True)
| 33.433071
| 92
| 0.66439
|
a0b4fac51b42f479dc71ca2908a0ddc7c56457ba
| 1,190
|
py
|
Python
|
google/ads/googleads/v8/enums/types/frequency_cap_time_unit.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 285
|
2018-10-05T16:47:58.000Z
|
2022-03-31T00:58:39.000Z
|
google/ads/googleads/v8/enums/types/frequency_cap_time_unit.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 425
|
2018-09-10T13:32:41.000Z
|
2022-03-31T14:50:05.000Z
|
google/ads/googleads/v8/enums/types/frequency_cap_time_unit.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 369
|
2018-11-28T07:01:00.000Z
|
2022-03-28T09:53:22.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v8.enums",
marshal="google.ads.googleads.v8",
manifest={"FrequencyCapTimeUnitEnum",},
)
class FrequencyCapTimeUnitEnum(proto.Message):
r"""Container for enum describing the unit of time the cap is
defined at.
"""
class FrequencyCapTimeUnit(proto.Enum):
r"""Unit of time the cap is defined at (e.g. day, week)."""
UNSPECIFIED = 0
UNKNOWN = 1
DAY = 2
WEEK = 3
MONTH = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| 29.02439
| 74
| 0.693277
|
c30293b723084423d030dd93f0e9cee5ea8cfbe3
| 301
|
py
|
Python
|
python/python 'char'?.py
|
1005281342/learn
|
c9d1e2e256842d9b4846c4870ac72e83d172b20e
|
[
"Apache-2.0"
] | 1
|
2018-11-29T01:01:32.000Z
|
2018-11-29T01:01:32.000Z
|
python/python 'char'?.py
|
1005281342/learn
|
c9d1e2e256842d9b4846c4870ac72e83d172b20e
|
[
"Apache-2.0"
] | null | null | null |
python/python 'char'?.py
|
1005281342/learn
|
c9d1e2e256842d9b4846c4870ac72e83d172b20e
|
[
"Apache-2.0"
] | null | null | null |
# 最开始是在str类方法里看见 b""这个骚操作 定义字节序列
print(str.replace.__doc__)
# 尝试, wtf没报错
print(b"")
# 一堆数字就很真实,ASCII码
test_b = b"hello Python"
print(test_b)
for char in test_b:
print(char)
# 然后python也提供了相关函数, 如果是Unicode字符
test_char = 'o'
number = ord(test_char)
print(number)
char = chr(number)
print(char)
| 13.681818
| 32
| 0.724252
|
8ad5ab1720fe47393b4f951601e71dc934a52fa6
| 2,103
|
py
|
Python
|
tcex/api/tc/v2/threat_intelligence/mappings/indicator/indicator_types/url.py
|
GShepherdTC/tcex
|
70b1199b8bb9e63f53e2ba792489267108c909cd
|
[
"Apache-2.0"
] | null | null | null |
tcex/api/tc/v2/threat_intelligence/mappings/indicator/indicator_types/url.py
|
GShepherdTC/tcex
|
70b1199b8bb9e63f53e2ba792489267108c909cd
|
[
"Apache-2.0"
] | null | null | null |
tcex/api/tc/v2/threat_intelligence/mappings/indicator/indicator_types/url.py
|
GShepherdTC/tcex
|
70b1199b8bb9e63f53e2ba792489267108c909cd
|
[
"Apache-2.0"
] | null | null | null |
"""ThreatConnect TI URL"""
# standard library
from typing import TYPE_CHECKING
from urllib.parse import quote_plus
# first-party
from tcex.api.tc.v2.threat_intelligence.mappings.indicator.indicator import Indicator
if TYPE_CHECKING:
# first-party
from tcex.api.tc.v2.threat_intelligence.threat_intelligence import ThreatIntelligence
class URL(Indicator):
"""Unique API calls for URL API Endpoints"""
def __init__(self, ti: 'ThreatIntelligence', **kwargs):
"""Initialize Class Properties.
Args:
ti (ThreatIntelligence): An instance of the ThreatIntelligence Class.
text (str, kwargs): [Required for Create] The URL value for this Indicator.
active (bool, kwargs): If False the indicator is marked "inactive" in TC.
confidence (str, kwargs): The threat confidence for this Indicator.
date_added (str, kwargs): [Read-Only] The date timestamp the Indicator was created.
last_modified (str, kwargs): [Read-Only] The date timestamp the Indicator was last
modified.
private_flag (bool, kwargs): If True the indicator is marked as private in TC.
rating (str, kwargs): The threat rating for this Indicator.
xid (str, kwargs): The external id for this Indicator.
"""
super().__init__(ti, sub_type='URL', api_entity='url', api_branch='urls', **kwargs)
self.unique_id = kwargs.get('unique_id', kwargs.get('text'))
self.data['text'] = self.unique_id
if self.unique_id:
self.unique_id = quote_plus(self.fully_decode_uri(self.unique_id))
def can_create(self):
"""Return True if address can be created.
If the text has been provided returns that the URL can be created, otherwise
returns that the URL cannot be created.
"""
return not self.data.get('text') is None
def _set_unique_id(self, json_response):
"""Set the unique_id provided a json response."""
self.unique_id = quote_plus(self.fully_decode_uri(json_response.get('text', '')))
| 42.918367
| 95
| 0.671897
|
a15e5a681928ca008bcf6a35ba480c65f2466ea8
| 22,110
|
py
|
Python
|
sdk/lusid/models/create_relationship_definition_request.py
|
finbourne/lusid-sdk-python-generated-preview
|
9c36c953e8149443a4390ed7f0c04d01211401b6
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/create_relationship_definition_request.py
|
finbourne/lusid-sdk-python-generated-preview
|
9c36c953e8149443a4390ed7f0c04d01211401b6
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/create_relationship_definition_request.py
|
finbourne/lusid-sdk-python-generated-preview
|
9c36c953e8149443a4390ed7f0c04d01211401b6
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.4425
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid.configuration import Configuration
class CreateRelationshipDefinitionRequest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'scope': 'str',
'code': 'str',
'source_entity_type': 'str',
'target_entity_type': 'str',
'display_name': 'str',
'outward_description': 'str',
'inward_description': 'str',
'life_time': 'str',
'relationship_cardinality': 'str'
}
attribute_map = {
'scope': 'scope',
'code': 'code',
'source_entity_type': 'sourceEntityType',
'target_entity_type': 'targetEntityType',
'display_name': 'displayName',
'outward_description': 'outwardDescription',
'inward_description': 'inwardDescription',
'life_time': 'lifeTime',
'relationship_cardinality': 'relationshipCardinality'
}
required_map = {
'scope': 'required',
'code': 'required',
'source_entity_type': 'required',
'target_entity_type': 'required',
'display_name': 'required',
'outward_description': 'required',
'inward_description': 'required',
'life_time': 'optional',
'relationship_cardinality': 'optional'
}
def __init__(self, scope=None, code=None, source_entity_type=None, target_entity_type=None, display_name=None, outward_description=None, inward_description=None, life_time=None, relationship_cardinality=None, local_vars_configuration=None): # noqa: E501
"""CreateRelationshipDefinitionRequest - a model defined in OpenAPI"
:param scope: The scope that the relationship definition exists in. (required)
:type scope: str
:param code: The code of the relationship definition. Together with the scope this uniquely defines the relationship definition. (required)
:type code: str
:param source_entity_type: The entity type of the source entity object. Allowed values are 'Portfolio', 'PortfolioGroup', 'Person', 'LegalEntity' or a custom entity type prefixed with '~'. (required)
:type source_entity_type: str
:param target_entity_type: The entity type of the target entity object. Allowed values are 'Portfolio', 'PortfolioGroup', 'Person', 'LegalEntity' or a custom entity type prefixed with '~'. (required)
:type target_entity_type: str
:param display_name: The display name of the relationship definition. (required)
:type display_name: str
:param outward_description: The description to relate source entity object and target entity object. (required)
:type outward_description: str
:param inward_description: The description to relate target entity object and source entity object. (required)
:type inward_description: str
:param life_time: Describes how the relationships can change over time. Allowed values are 'Perpetual' and 'TimeVariant', defaults to 'Perpetual' if not specified.
:type life_time: str
:param relationship_cardinality: Describes the cardinality of the relationship with a specific source entity object and relationships under this definition. Allowed values are 'ManyToMany' and 'ManyToOne', defaults to 'ManyToMany' if not specified.
:type relationship_cardinality: str
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._scope = None
self._code = None
self._source_entity_type = None
self._target_entity_type = None
self._display_name = None
self._outward_description = None
self._inward_description = None
self._life_time = None
self._relationship_cardinality = None
self.discriminator = None
self.scope = scope
self.code = code
self.source_entity_type = source_entity_type
self.target_entity_type = target_entity_type
self.display_name = display_name
self.outward_description = outward_description
self.inward_description = inward_description
self.life_time = life_time
self.relationship_cardinality = relationship_cardinality
@property
def scope(self):
"""Gets the scope of this CreateRelationshipDefinitionRequest. # noqa: E501
The scope that the relationship definition exists in. # noqa: E501
:return: The scope of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._scope
@scope.setter
def scope(self, scope):
"""Sets the scope of this CreateRelationshipDefinitionRequest.
The scope that the relationship definition exists in. # noqa: E501
:param scope: The scope of this CreateRelationshipDefinitionRequest. # noqa: E501
:type scope: str
"""
if self.local_vars_configuration.client_side_validation and scope is None: # noqa: E501
raise ValueError("Invalid value for `scope`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
scope is not None and len(scope) > 64):
raise ValueError("Invalid value for `scope`, length must be less than or equal to `64`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
scope is not None and len(scope) < 1):
raise ValueError("Invalid value for `scope`, length must be greater than or equal to `1`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
scope is not None and not re.search(r'^[a-zA-Z0-9\-_]+$', scope)): # noqa: E501
raise ValueError(r"Invalid value for `scope`, must be a follow pattern or equal to `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
self._scope = scope
@property
def code(self):
"""Gets the code of this CreateRelationshipDefinitionRequest. # noqa: E501
The code of the relationship definition. Together with the scope this uniquely defines the relationship definition. # noqa: E501
:return: The code of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this CreateRelationshipDefinitionRequest.
The code of the relationship definition. Together with the scope this uniquely defines the relationship definition. # noqa: E501
:param code: The code of this CreateRelationshipDefinitionRequest. # noqa: E501
:type code: str
"""
if self.local_vars_configuration.client_side_validation and code is None: # noqa: E501
raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
code is not None and len(code) > 64):
raise ValueError("Invalid value for `code`, length must be less than or equal to `64`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
code is not None and len(code) < 1):
raise ValueError("Invalid value for `code`, length must be greater than or equal to `1`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
code is not None and not re.search(r'^[a-zA-Z0-9\-_]+$', code)): # noqa: E501
raise ValueError(r"Invalid value for `code`, must be a follow pattern or equal to `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
self._code = code
@property
def source_entity_type(self):
"""Gets the source_entity_type of this CreateRelationshipDefinitionRequest. # noqa: E501
The entity type of the source entity object. Allowed values are 'Portfolio', 'PortfolioGroup', 'Person', 'LegalEntity' or a custom entity type prefixed with '~'. # noqa: E501
:return: The source_entity_type of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._source_entity_type
@source_entity_type.setter
def source_entity_type(self, source_entity_type):
"""Sets the source_entity_type of this CreateRelationshipDefinitionRequest.
The entity type of the source entity object. Allowed values are 'Portfolio', 'PortfolioGroup', 'Person', 'LegalEntity' or a custom entity type prefixed with '~'. # noqa: E501
:param source_entity_type: The source_entity_type of this CreateRelationshipDefinitionRequest. # noqa: E501
:type source_entity_type: str
"""
if self.local_vars_configuration.client_side_validation and source_entity_type is None: # noqa: E501
raise ValueError("Invalid value for `source_entity_type`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
source_entity_type is not None and len(source_entity_type) > 64):
raise ValueError("Invalid value for `source_entity_type`, length must be less than or equal to `64`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
source_entity_type is not None and len(source_entity_type) < 1):
raise ValueError("Invalid value for `source_entity_type`, length must be greater than or equal to `1`") # noqa: E501
self._source_entity_type = source_entity_type
@property
def target_entity_type(self):
"""Gets the target_entity_type of this CreateRelationshipDefinitionRequest. # noqa: E501
The entity type of the target entity object. Allowed values are 'Portfolio', 'PortfolioGroup', 'Person', 'LegalEntity' or a custom entity type prefixed with '~'. # noqa: E501
:return: The target_entity_type of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._target_entity_type
@target_entity_type.setter
def target_entity_type(self, target_entity_type):
"""Sets the target_entity_type of this CreateRelationshipDefinitionRequest.
The entity type of the target entity object. Allowed values are 'Portfolio', 'PortfolioGroup', 'Person', 'LegalEntity' or a custom entity type prefixed with '~'. # noqa: E501
:param target_entity_type: The target_entity_type of this CreateRelationshipDefinitionRequest. # noqa: E501
:type target_entity_type: str
"""
if self.local_vars_configuration.client_side_validation and target_entity_type is None: # noqa: E501
raise ValueError("Invalid value for `target_entity_type`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
target_entity_type is not None and len(target_entity_type) > 64):
raise ValueError("Invalid value for `target_entity_type`, length must be less than or equal to `64`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
target_entity_type is not None and len(target_entity_type) < 1):
raise ValueError("Invalid value for `target_entity_type`, length must be greater than or equal to `1`") # noqa: E501
self._target_entity_type = target_entity_type
@property
def display_name(self):
"""Gets the display_name of this CreateRelationshipDefinitionRequest. # noqa: E501
The display name of the relationship definition. # noqa: E501
:return: The display_name of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""Sets the display_name of this CreateRelationshipDefinitionRequest.
The display name of the relationship definition. # noqa: E501
:param display_name: The display_name of this CreateRelationshipDefinitionRequest. # noqa: E501
:type display_name: str
"""
if self.local_vars_configuration.client_side_validation and display_name is None: # noqa: E501
raise ValueError("Invalid value for `display_name`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
display_name is not None and len(display_name) > 512):
raise ValueError("Invalid value for `display_name`, length must be less than or equal to `512`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
display_name is not None and len(display_name) < 1):
raise ValueError("Invalid value for `display_name`, length must be greater than or equal to `1`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
display_name is not None and not re.search(r'^[\s\S]*$', display_name)): # noqa: E501
raise ValueError(r"Invalid value for `display_name`, must be a follow pattern or equal to `/^[\s\S]*$/`") # noqa: E501
self._display_name = display_name
@property
def outward_description(self):
"""Gets the outward_description of this CreateRelationshipDefinitionRequest. # noqa: E501
The description to relate source entity object and target entity object. # noqa: E501
:return: The outward_description of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._outward_description
@outward_description.setter
def outward_description(self, outward_description):
"""Sets the outward_description of this CreateRelationshipDefinitionRequest.
The description to relate source entity object and target entity object. # noqa: E501
:param outward_description: The outward_description of this CreateRelationshipDefinitionRequest. # noqa: E501
:type outward_description: str
"""
if self.local_vars_configuration.client_side_validation and outward_description is None: # noqa: E501
raise ValueError("Invalid value for `outward_description`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
outward_description is not None and len(outward_description) > 512):
raise ValueError("Invalid value for `outward_description`, length must be less than or equal to `512`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
outward_description is not None and len(outward_description) < 1):
raise ValueError("Invalid value for `outward_description`, length must be greater than or equal to `1`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
outward_description is not None and not re.search(r'^[\s\S]*$', outward_description)): # noqa: E501
raise ValueError(r"Invalid value for `outward_description`, must be a follow pattern or equal to `/^[\s\S]*$/`") # noqa: E501
self._outward_description = outward_description
@property
def inward_description(self):
"""Gets the inward_description of this CreateRelationshipDefinitionRequest. # noqa: E501
The description to relate target entity object and source entity object. # noqa: E501
:return: The inward_description of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._inward_description
@inward_description.setter
def inward_description(self, inward_description):
"""Sets the inward_description of this CreateRelationshipDefinitionRequest.
The description to relate target entity object and source entity object. # noqa: E501
:param inward_description: The inward_description of this CreateRelationshipDefinitionRequest. # noqa: E501
:type inward_description: str
"""
if self.local_vars_configuration.client_side_validation and inward_description is None: # noqa: E501
raise ValueError("Invalid value for `inward_description`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
inward_description is not None and len(inward_description) > 512):
raise ValueError("Invalid value for `inward_description`, length must be less than or equal to `512`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
inward_description is not None and len(inward_description) < 1):
raise ValueError("Invalid value for `inward_description`, length must be greater than or equal to `1`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
inward_description is not None and not re.search(r'^[\s\S]*$', inward_description)): # noqa: E501
raise ValueError(r"Invalid value for `inward_description`, must be a follow pattern or equal to `/^[\s\S]*$/`") # noqa: E501
self._inward_description = inward_description
@property
def life_time(self):
"""Gets the life_time of this CreateRelationshipDefinitionRequest. # noqa: E501
Describes how the relationships can change over time. Allowed values are 'Perpetual' and 'TimeVariant', defaults to 'Perpetual' if not specified. # noqa: E501
:return: The life_time of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._life_time
@life_time.setter
def life_time(self, life_time):
"""Sets the life_time of this CreateRelationshipDefinitionRequest.
Describes how the relationships can change over time. Allowed values are 'Perpetual' and 'TimeVariant', defaults to 'Perpetual' if not specified. # noqa: E501
:param life_time: The life_time of this CreateRelationshipDefinitionRequest. # noqa: E501
:type life_time: str
"""
self._life_time = life_time
@property
def relationship_cardinality(self):
"""Gets the relationship_cardinality of this CreateRelationshipDefinitionRequest. # noqa: E501
Describes the cardinality of the relationship with a specific source entity object and relationships under this definition. Allowed values are 'ManyToMany' and 'ManyToOne', defaults to 'ManyToMany' if not specified. # noqa: E501
:return: The relationship_cardinality of this CreateRelationshipDefinitionRequest. # noqa: E501
:rtype: str
"""
return self._relationship_cardinality
@relationship_cardinality.setter
def relationship_cardinality(self, relationship_cardinality):
"""Sets the relationship_cardinality of this CreateRelationshipDefinitionRequest.
Describes the cardinality of the relationship with a specific source entity object and relationships under this definition. Allowed values are 'ManyToMany' and 'ManyToOne', defaults to 'ManyToMany' if not specified. # noqa: E501
:param relationship_cardinality: The relationship_cardinality of this CreateRelationshipDefinitionRequest. # noqa: E501
:type relationship_cardinality: str
"""
self._relationship_cardinality = relationship_cardinality
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateRelationshipDefinitionRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, CreateRelationshipDefinitionRequest):
return True
return self.to_dict() != other.to_dict()
| 48.486842
| 258
| 0.676979
|
81afc1cbcc371239f8677f1cb3cf68f2cff02038
| 4,586
|
py
|
Python
|
python55.py
|
onnela-lab/mech-mle
|
8a36c1a75dbbc665feb642f527aeb09e7462e90b
|
[
"BSD-3-Clause"
] | null | null | null |
python55.py
|
onnela-lab/mech-mle
|
8a36c1a75dbbc665feb642f527aeb09e7462e90b
|
[
"BSD-3-Clause"
] | null | null | null |
python55.py
|
onnela-lab/mech-mle
|
8a36c1a75dbbc665feb642f527aeb09e7462e90b
|
[
"BSD-3-Clause"
] | null | null | null |
# JP Onnela
# April 20, 2021
# Edited May 12, 2021 by Jonathan Larson
import networkx as nx
import random
import scipy.stats as ss
import time
def generate_DMC(q_mod, q_con, n):
"""Generate DMC model realization given parameters."""
G = nx.Graph()
G.add_edge(0,1)
new_nodes = list(range(2,n))
anchor_nodes = []
for v in new_nodes:
u = random.choice(list(G.nodes()))
anchor_nodes.append(u)
G.add_node(v)
# duplication
G.add_edges_from([(v,w) for w in G.neighbors(u)])
# mutation
for w in list(G.neighbors(u)):
if ss.bernoulli.rvs(q_mod):
edge = random.choice([(v,w), (u,w)])
G.remove_edge(*edge)
# complementation
if ss.bernoulli.rvs(q_con):
G.add_edge(u,v)
return (G, new_nodes, anchor_nodes)
def deconstruct_DMC(G, alpha, beta):
"""Deconstruct a DMC graph over a single step."""
# reverse complementation
if G.has_edge(alpha, beta):
G.remove_edge(alpha, beta)
w = 1
else:
w = 0
# reverse mutation
alpha_neighbors = set(G.neighbors(alpha))
beta_neighbors = set(G.neighbors(beta))
x = len(alpha_neighbors & beta_neighbors)
y = len(alpha_neighbors | beta_neighbors)
for neighbor in alpha_neighbors:
G.add_edge(beta, neighbor)
# reverse duplication
G.remove_node(alpha)
return (w, x, y)
def find_min_uni_pair(G):
"""Find pair of nodes that have minimal cardinality of the union of their neighbors."""
alpha = None
beta = None
union_size = G.number_of_nodes()
nodes = list(G.nodes())
random.shuffle(nodes)
for u in nodes:
for v in nodes:
if u > v:
u_neighbors = set(G.neighbors(u))
v_neighbors = set(G.neighbors(v))
y = len(u_neighbors | v_neighbors)
if G.has_edge(u,v):
y = y - 2
if y < union_size:
union_size = y
alpha = u
beta = v
return (alpha, beta, union_size)
def deconstruct(G):
"""Deconstruct the graph until."""
alphas = []
betas = []
W = 0
X = 0
Y = 0
(alpha, beta, union_size) = find_min_uni_pair(G)
while (not alpha is None and not beta is None):
print("Number of nodes remaining:", G.number_of_nodes())
alphas.append(alpha)
betas.append(beta)
(w, x, y) = deconstruct_DMC(G, alpha, beta)
W += w
X += x
Y += y
(alpha, beta, union_size) = find_min_uni_pair(G)
return (alphas, betas, W, X, Y)
def estimate_parms(W, X, Y, n):
"""Compute estimates of q_mod and q_con parameters."""
q_mod_hat = 1 - X / Y
q_con_hat = W / (n - 1)
return (q_mod_hat, q_con_hat)
def read_edgelist(input_file):
"""Read edgelist from input file"""
G = nx.Graph()
counter = 0
for line in open(input_file):
counter += 1
line = line.rstrip().split("\t")
node_i = line[0]
node_j = line[1]
G.add_edge(node_i, node_j)
return (G, counter)
def print_stats(G, new_nodes, anchor_nodes):
"""Print out some statistics."""
print("Nodes:", G.nodes())
print("Edges:", G.edges())
print("New nodes (alpha):", new_nodes)
print("Anchor nodes (beta):", anchor_nodes)
def save_results(output_file):
F = open(output_file, "w")
# alphas
for alpha in alphas:
F.write(str(alpha) + " ")
F.write("\n")
# betas
for beta in betas:
F.write(str(beta) + " ")
F.write("\n")
# others
F.write(str(W) + " " + str(X) + " " + str(Y) + " " + str(q_mod_hat) + " " + str(q_con_hat))
F.close()
# ----------------------------------------------------------------
# input and output files
input_file = "HuRI.tsv"
# read data
(G, counter) = read_edgelist(input_file)
# sample nodes
sampled_nodes = random.sample(list(G.nodes()),round(0.55 * G.number_of_nodes()))
G.remove_nodes_from([n for n in G if n not in set(sampled_nodes)])
G.remove_edges_from(nx.selfloop_edges(G))
print(G.number_of_edges())
# degenerate graph
n = G.number_of_nodes()
start = time.time()
(alphas, betas, W, X, Y) = deconstruct(G)
end = time.time()
print("Time elapsed:", end - start)
(q_mod_hat, q_con_hat) = estimate_parms(W, X, Y, n)
print("Parameter estimates:", q_mod_hat, q_con_hat)
| 26.976471
| 93
| 0.557567
|
d371ebc2fb0fb6c25e6410976f192c5374a3aeb7
| 1,866
|
py
|
Python
|
fimed/routes/authentication.py
|
dandobjim/FIMED2.0-BACKEND
|
1118a1afcf62a2d39de3464dce3929f008dba0ec
|
[
"MIT"
] | null | null | null |
fimed/routes/authentication.py
|
dandobjim/FIMED2.0-BACKEND
|
1118a1afcf62a2d39de3464dce3929f008dba0ec
|
[
"MIT"
] | 2
|
2021-04-06T18:29:57.000Z
|
2021-06-02T03:57:43.000Z
|
fimed/routes/authentication.py
|
dandobjim/FIMED2.0-BACKEND
|
1118a1afcf62a2d39de3464dce3929f008dba0ec
|
[
"MIT"
] | null | null | null |
from datetime import timedelta
from fastapi import HTTPException, Depends, APIRouter
from fastapi.security import OAuth2PasswordRequestForm
from starlette.status import HTTP_409_CONFLICT, HTTP_401_UNAUTHORIZED
from fimed.auth import (
authenticate_user,
ACCESS_TOKEN_EXPIRE_MINUTES,
create_access_token,
get_current_active_user,
)
from fimed.model.user import UserCreateRequest, UserInDB, User, Token
router = APIRouter()
@router.post(
"/register", name="Sing-up authentication endpoint", tags=["auth"], response_model=UserInDB,
)
async def register_to_system(user: UserCreateRequest):
authenticated_user = authenticate_user(user.username, user.password)
if authenticated_user:
raise HTTPException(
status_code=HTTP_409_CONFLICT, detail="User already exists",
)
user_in_db = User.save(user)
return user_in_db
@router.post(
"/login", name="Login authentication endpoint", tags=["auth"], response_model=Token,
)
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = authenticate_user(form_data.username, form_data.password)
# print("Entro aqui y compruebo usuario")
if not user:
print("error de usuario")
raise HTTPException(
status_code=HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(data={"sub": user.username}, expires_delta=access_token_expires)
return {"access_token": access_token, "token_type": "bearer"}
@router.get("/user/me", name="Get user data", tags=["user"], response_model=UserInDB)
async def read_users_me(current_user: UserInDB = Depends(get_current_active_user)):
return current_user
| 35.884615
| 103
| 0.743301
|
109a5dc04ae25242361d347994957652d83be495
| 3,304
|
py
|
Python
|
homeassistant/auth/permissions/util.py
|
petewill/home-assistant
|
5859dba4344f05fb8774aa1207e47ac28f627a67
|
[
"Apache-2.0"
] | 3
|
2020-05-18T10:18:16.000Z
|
2020-12-08T11:27:55.000Z
|
homeassistant/auth/permissions/util.py
|
petewill/home-assistant
|
5859dba4344f05fb8774aa1207e47ac28f627a67
|
[
"Apache-2.0"
] | 39
|
2016-12-16T12:40:34.000Z
|
2017-02-13T17:53:42.000Z
|
homeassistant/auth/permissions/util.py
|
petewill/home-assistant
|
5859dba4344f05fb8774aa1207e47ac28f627a67
|
[
"Apache-2.0"
] | 6
|
2020-04-10T06:21:11.000Z
|
2021-07-01T08:53:38.000Z
|
"""Helpers to deal with permissions."""
from functools import wraps
from typing import Callable, Dict, List, Optional, cast
from .const import SUBCAT_ALL
from .models import PermissionLookup
from .types import CategoryType, SubCategoryDict, ValueType
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], Optional[ValueType]]
SubCatLookupType = Dict[str, LookupFunc]
def lookup_all(
perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict, object_id: str
) -> ValueType:
"""Look up permission for all."""
# In case of ALL category, lookup_dict IS the schema.
return cast(ValueType, lookup_dict)
def compile_policy(
policy: CategoryType, subcategories: SubCatLookupType, perm_lookup: PermissionLookup
) -> Callable[[str, str], bool]: # noqa
"""Compile policy into a function that tests policy.
Subcategories are mapping key -> lookup function, ordered by highest
priority first.
"""
# None, False, empty dict
if not policy:
def apply_policy_deny_all(entity_id: str, key: str) -> bool:
"""Decline all."""
return False
return apply_policy_deny_all
if policy is True:
def apply_policy_allow_all(entity_id: str, key: str) -> bool:
"""Approve all."""
return True
return apply_policy_allow_all
assert isinstance(policy, dict)
funcs: List[Callable[[str, str], Optional[bool]]] = []
for key, lookup_func in subcategories.items():
lookup_value = policy.get(key)
# If any lookup value is `True`, it will always be positive
if isinstance(lookup_value, bool):
return lambda object_id, key: True
if lookup_value is not None:
funcs.append(_gen_dict_test_func(perm_lookup, lookup_func, lookup_value))
if len(funcs) == 1:
func = funcs[0]
@wraps(func)
def apply_policy_func(object_id: str, key: str) -> bool:
"""Apply a single policy function."""
return func(object_id, key) is True
return apply_policy_func
def apply_policy_funcs(object_id: str, key: str) -> bool:
"""Apply several policy functions."""
for func in funcs:
result = func(object_id, key)
if result is not None:
return result
return False
return apply_policy_funcs
def _gen_dict_test_func(
perm_lookup: PermissionLookup, lookup_func: LookupFunc, lookup_dict: SubCategoryDict
) -> Callable[[str, str], Optional[bool]]: # noqa
"""Generate a lookup function."""
def test_value(object_id: str, key: str) -> Optional[bool]:
"""Test if permission is allowed based on the keys."""
schema: ValueType = lookup_func(perm_lookup, lookup_dict, object_id)
if schema is None or isinstance(schema, bool):
return schema
assert isinstance(schema, dict)
return schema.get(key)
return test_value
def test_all(policy: CategoryType, key: str) -> bool:
"""Test if a policy has an ALL access for a specific key."""
if not isinstance(policy, dict):
return bool(policy)
all_policy = policy.get(SUBCAT_ALL)
if not isinstance(all_policy, dict):
return bool(all_policy)
return all_policy.get(key, False)
| 29.765766
| 88
| 0.663438
|
9a5a2ca08b325b9cb27a6db4ed72368abb3fec54
| 746
|
py
|
Python
|
test/odd_even_sort_test.py
|
southpolemonkey/py-sorting
|
e5d76f92dc17bf5b6a070924d071b14183e088a8
|
[
"MIT"
] | 94
|
2017-02-21T15:29:38.000Z
|
2022-03-26T09:02:59.000Z
|
test/odd_even_sort_test.py
|
southpolemonkey/py-sorting
|
e5d76f92dc17bf5b6a070924d071b14183e088a8
|
[
"MIT"
] | 17
|
2015-07-25T06:50:28.000Z
|
2015-09-19T22:27:09.000Z
|
test/odd_even_sort_test.py
|
southpolemonkey/py-sorting
|
e5d76f92dc17bf5b6a070924d071b14183e088a8
|
[
"MIT"
] | 28
|
2017-04-16T10:13:44.000Z
|
2022-01-12T04:12:43.000Z
|
import unittest
import os
import sys
from base_custom_comparison_sort_test import BaseCustomComparisonSortTest
from base_positive_integer_sort_test import BasePositiveIntegerSortTest
from base_negative_integer_sort_test import BaseNegativeIntegerSortTest
from base_string_sort_test import BaseStringSortTest
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'sort'))
import odd_even_sort
class OddEvenSortTest(unittest.TestCase,
BaseCustomComparisonSortTest,
BasePositiveIntegerSortTest,
BaseNegativeIntegerSortTest,
BaseStringSortTest):
def setUp(self):
self.sort = odd_even_sort.sort
if __name__ == '__main__':
unittest.main()
| 32.434783
| 75
| 0.756032
|
f7eb75b6213ee58cac86a080a6eadfaec1c90ba8
| 375
|
py
|
Python
|
ENCODN/BUTTONS/CRYPTOGRAPHY.py
|
chrisyo99/ENCODN
|
578a5eb87b68e4ba5ebb1c87808ad04aa160df5e
|
[
"MIT"
] | null | null | null |
ENCODN/BUTTONS/CRYPTOGRAPHY.py
|
chrisyo99/ENCODN
|
578a5eb87b68e4ba5ebb1c87808ad04aa160df5e
|
[
"MIT"
] | null | null | null |
ENCODN/BUTTONS/CRYPTOGRAPHY.py
|
chrisyo99/ENCODN
|
578a5eb87b68e4ba5ebb1c87808ad04aa160df5e
|
[
"MIT"
] | null | null | null |
from tkinter import *
def CRYPTOGRAPHY(master=None):
frame = Frame(master,bg="#215d9c", width = 800, height=550)
frame.grid(row=0, column=0, sticky="e")
t_names = ["MODERN CRYPTOGRAPHY","OTHERS"]
b = []
for i in range(len(t_names)):
b.append(Button(master,text=t_names[i],bg ='#c4c4c4' ,fg='black').grid(row=0,column=0,sticky="nw",pady=(32*(i+1))))
| 26.785714
| 118
| 0.64
|
70eae1f0361f17475bab25635eafe007827087fd
| 42,683
|
py
|
Python
|
Chapter10/zwQTBox.py
|
allen-zqh/plotly
|
bcaf0930901e77db07245b63bff049eb75893416
|
[
"MIT"
] | null | null | null |
Chapter10/zwQTBox.py
|
allen-zqh/plotly
|
bcaf0930901e77db07245b63bff049eb75893416
|
[
"MIT"
] | null | null | null |
Chapter10/zwQTBox.py
|
allen-zqh/plotly
|
bcaf0930901e77db07245b63bff049eb75893416
|
[
"MIT"
] | 1
|
2021-02-04T06:56:18.000Z
|
2021-02-04T06:56:18.000Z
|
# -*- coding: utf-8 -*-
'''
zwQuantToolBox 2016
zw量化开源工具箱系列软件
http://www.ziwang.com,Python量化第一品牌
文件名:zwQTBox.py
说明:import zwQTBox as zwx
常用zwQuant量化工具函数集
'''
import sys, os
import numpy as np
import tushare as ts
import pandas as pd
import pandas_datareader.data as web
from numba import *
import csv
import pickle
from datetime import *
from dateutil.parser import parse
from dateutil import rrule
import datetime as dt
import zwSys as zw #::zwQT
import zwTools as zwt
# -------------
# -------------xtick tick分笔数据下载
def xtick_down_init(qx, finx):
'''
根据finx股票代码文件名,读取数据到qx.stkCodeLib
并根据预设的日期,初始化相关的时间参数
[输入]
qx.xdayNum,下载时间周期为0,采用指定下载起始、结束日期模式
qx.xday0k,数据下载起始日期,为空使用默认起始日期:2005-01-01
qx.xday9k,数据下载结束日期,为空使用当前日期
日期为字符串格式,'yyyy-mm-dd'
qx.xdayNum,下载时间周期大于0时,采用指定时间周期模式,一般用于数据追加
#数据追加模式,无需设置起始、结束日期,
例如,qx.xdayNum=2 #下载今天以前2天的数据,注意这个是日历间隔,不是工作日间隔
'''
# ---finx
qx.fn_stkCode = finx
print(finx)
qx.stkCodeLib = pd.read_csv(finx, encoding='gbk')
qx.codeNum = len(qx.stkCodeLib['code'])
# ---xtim0 qx.xday0k,qx.xday9k='2010-01-01','' qx.xday0k,qx.xday9k='',''
if qx.xdayNum > 0:
qx.DTxtim9 = dt.datetime.now()
qx.DTxtim0 = qx.DTxtim9 - dt.timedelta(days=qx.xdayNum)
else:
if qx.xday9k == '':
qx.DTxtim9 = dt.datetime.now()
else:
qx.DTxtim9 = parse(qx.xday9k)
if qx.xday0k == '':
qx.DTxtim0 = dt.datetime.now()
else:
qx.DTxtim0 = parse(qx.xday0k)
#
# qx.DTxtim9=zwt.iff2(qx.xday9k=='',dt.datetime.now(),parse(qx.xday9k))
# qx.DTxtim0=zwt.iff2(qx.xday0k=='',qx.DTxtim9+dt.timedelta(days=-2) ,parse(qx.xday0k))
#
qx.xdayNum = rrule.rrule(rrule.DAILY, dtstart=qx.DTxtim0, until=qx.DTxtim9).count()
#
qx.xtim0Sgn, qx.xtim9Sgn = qx.DTxtim0.strftime('%Y-%m-%d'), qx.DTxtim9.strftime('%Y-%m-%d')
print('\n@nday', qx.xdayNum, qx.xtim0Sgn, '@', qx.xtim9Sgn) # nday=13
def xtick_down100(qx, ftg):
'''
根据指定的日期,股票代码,数据文件名:ftg
下载指定股票指定日期的ticks数据,并保存到ftg
[输入]
qx.code,股票代码
qx.xtimSgn,当前日期的字符串
ftg,保存tick数据的文件名
'''
df, dn = [], 0
try:
df = ts.get_tick_data(qx.code, date=qx.xtimSgn) # print(df.head())
except IOError:
pass # skip,error
datFlag, dn = False, len(df)
print(' n', dn, ftg) # 跳过无数据 日期
# if zwt.xin(dn,0,9):print('n2',dn,ftg)
if dn > 10:
df['type'] = df['type'].str.replace(u'中性盘', 'norm')
df['type'] = df['type'].str.replace(u'买盘', 'buy')
df['type'] = df['type'].str.replace(u'卖盘', 'sell')
df.to_csv(ftg, index=False, encoding='utf')
datFlag = True
#
return datFlag, dn
def xtick_down8tim_codes(qx):
'''
下载指定日期,stkCodeLib包含的所有代码的tick历史分笔数据
并转换成对应的分时数据:5/15/30/60 分钟
数据文件保存在:对应的数据目录 \zwdat\tick\yyyy-mm\
目录下,yyyy,是年份;mm,是月份
运行时,会根据日期,股票代码,生成数据文件名:ftg
[输入]
qx.xtimSgn,当前日期的字符串
qx.stkCodeLib,包含所有股票代码的pd数据表格
'''
# qx.xday0ChkFlag=False self.codeInx0k=-1
# inx0,qx.codeNum=qx.codeInx,len(dinx['code'])
numNil = 0
for i, xc in enumerate(qx.stkCodeLib['code']):
code = "%06d" % xc # print("\n",i,"/",qx.codeNum,"code,",code)
qx.code, qx.codeCnt = code, i
# ---
ftg = '%s%s_%s.csv' % (qx.rtickTimMon, code, qx.xtimSgn)
xfg = os.path.exists(ftg)
if xfg:
numNil = 0
else:
if numNil < 90:
datFlag, dfNum = xtick_down100(qx, ftg)
numNil = zwt.iff2(datFlag, 0, numNil + 1)
if dfNum == 3: numNil += 10
#
print(xfg, datFlag, qx.codeCnt, "/", qx.codeNum, ftg, numNil)
#
if numNil > 90: break
# if i>3:break
def xtick_down8tim_all(qx, finx):
'''
下载所有股票代码的所有tick历史分笔数据,按时间日期循环下载
数据文件保存在:对应的数据目录 \zwdat\tick\yyyy-mm\
目录下,yyyy,是年份;mm,是月份
[输入]
finx,股票代码文件
'''
xtick_down_init(qx, finx)
# qx.xday0ChkFlag=False
print('r', qx.rdat, qx.rtickTim)
# self.rtickTimMon=self.rtickTim+'2010-01\\' # \zwDat\ticktim\ 2012-01\
for tc in range(qx.xdayNum):
qx.DTxtim = qx.DTxtim0 + dt.timedelta(days=tc)
qx.xdayInx, qx.xtimSgn = tc, qx.DTxtim.strftime('%Y-%m-%d')
#
rmon0 = qx.DTxtim.strftime('%Y-%m')
qx.rtickTimMon = '%s%s\\' % (qx.rtickTim, rmon0)
xfg = os.path.exists(qx.rtickTimMon)
if not xfg:
os.mkdir(qx.rtickTimMon)
#
print('\n', xfg, qx.xdayInx, '/', qx.xdayNum, qx.xtimSgn)
#
xtick_down8tim_codes(qx)
# ----xtimck2tim.xxx 分笔tick数据,转换为分时数据
def xtick2tim_code010(qx):
'''
根据指定的股票代码,
把所有tick历史分笔数据
转换成对应的分时数据:5/15/30/60 分钟
数据文件保存在:对应的数据目录 \zwdat\min\
[输入]
qx.code,股票代码
qx.min_ksgns,分时数据时间模式列表,一般是[5,15,30,60],也可以自行设置
self.xtickAppendFlag=False
'''
# xtick_setTimeDat(qx)
for kss in qx.min_ksgns:
rx.min_knum, rx.min_ksgnWrk = int(kss), 'M' + kss
qx.datMin[kss] = pd.DataFrame(columns=zw.qxMinName)
# 默认=False,tick数据追加模式标志,如果=True,强行将所有tick文件转换为分时数据
if qx.xtickAppFlag:
fss = zw._rdatTick + qx.code + '\\' + qx.xtim + '.csv'
xfg = os.path.exists(fss)
if xfg: qx.datMin[ksgn] = pd.read_csv(fss, index_col=False)
#
flst = os.listdir(zw._rdatTick + qx.code + '\\')
qx.codeCnt, qx.codeNum = 0, len(flst)
for fs0 in flst:
qx.codeCnt += 1
nday = qx.codeNum - qx.codeCnt
if (not qx.xtickAppFlag) or (nday < qx.xtickAppNDay):
qx.xtim = fs0.split('.')[0]
xtick2tim100(qx)
print(qx.codeCnt, "/", qx.codeNum, qx.xtim, nday, qx.xtickAppNDay, qx.xtickAppFlag, '@', qx.code)
#
# ---------wr.code分时数据
xtick2minWr(qx, zw._rdatTick)
# ---------
def xtick2minWr(qx, rsk):
'''
把所有分时数据,保存到文件
会自动去重
对应的数据目录 \zwdat\min\
输出数据在min目录对应的分时目录当中,已经自动转换为5,15,30,60分钟分时数据
'''
print(qx.min_ksgns)
for ksgn0 in qx.min_ksgns:
sgnMin = 'M' + ksgn0
xdf = qx.datMin[sgnMin]
xdf.drop_duplicates(subset='time', keep='last', inplace=True)
xdf = np.round(xdf, 2)
xdf = xdf.sort_values(by=['time'], ascending=False)
# fss=zw._rdatMin+sgnMin+'\\'+qx.code+'.csv' # print(fss)
fss = rsk + sgnMin + '\\' + qx.code + '.csv'
print(fss)
if len(xdf) > 3:
xdf.to_csv(fss, columns=zw.qxMinName, index=False, encoding='utf')
qx.datMin[sgnMin] = xdf
def xtick2minsub(df):
'''
tick 数据 转换值程序,
对根据qx.minType切割的数据,进行汇总,
tick 数据 转换为分时数据:5/15/30/60 分钟
输入
df,根据qx.minType切割的数据
输出
ds,汇总后的数据,注意,格式是:pd.Series
'''
ds = pd.Series(index=zw.qxMinName)
x9 = df.iloc[-1]
ds['open'] = x9['price']
x0 = df.iloc[0]
ds['close'] = x0['price']
#
ds['high'], ds['low'] = np.max(df['price']), np.min(df['price'])
ds['volume'], ds['amount'] = np.sum(df['volume']), np.sum(df['amount'])
#
xlst = ['norm', 'buy', 'sell']
for xsgn in xlst:
df2 = df[df['type'] == xsgn]
if len(df2) > 0:
ds['vol_' + xsgn], ds['amo_' + xsgn] = np.sum(df2['volume']), np.sum(df2['amount'])
else:
ds['vol_' + xsgn], ds['amo_' + xsgn] = 0, 0
#
return ds
def xtick2min010(qx):
'''
将下载的tick分笔数据,转换为分时数据:5/15/30/60 分钟
并且追加到对应的分时数据列表当中
注意qx.xtimTick0,qx.xtimTick9是预设时间数据,在zwDatX类定义并初始化
输入
qx.min_ksgnWrk
qx.min_knum
'''
wrkDTim0, dt9 = parse(qx.xtimTick0), parse(qx.xtimTick9)
xt = dt9 - wrkDTim0
numMin = xt.total_seconds() / 60
xn9 = int(numMin / qx.min_knum) + 1 # print(wrkDTim0,xn9) #xn9=7
for tc in range(xn9):
wrkDTim9 = wrkDTim0 + dt.timedelta(minutes=qx.min_knum)
strTim0, strTim9 = wrkDTim0.strftime('%H:%M:%S'), wrkDTim9.strftime('%H:%M:%S')
# ---cut tick.dat by tim
df = qx.datTick # print(df.head())
df2 = df[df['time'] < strTim9]
df3 = df2[df2['time'] >= strTim0]
if len(df3) > 0:
# -----tick 数据 转换为分时数据:5/15/30/60 分钟
ds = xtick2minsub(df3)
ds['time'] = qx.xtimSgn + ' ' + strTim0
qx.datMin[qx.min_ksgnWrk] = qx.datMin[qx.min_ksgnWrk].append(ds.T, ignore_index=True)
# ----ok,#tc
wrkDTim0 = wrkDTim9
def xtick2tim100(qx, fdat):
'''
根据输入的fdat文件名,读取tick分笔数据,并转换为对应的分时数据:5/15/30/60 分钟
【输入】
fdat,rick数据文件名
'''
xfg = os.path.exists(fdat) # print('x100',xfg,fdat)
if xfg:
qx.datTick = pd.read_csv(fdat, index_col=False)
if len(qx.datTick) > 10:
for kss in qx.min_ksgns: # qx.min_ksgns=['M05','M15','M30','M60']
qx.min_knum, qx.min_ksgnWrk, ksgn = int(kss), 'M' + kss, 'M' + kss
xtick2min010(qx)
def xtick2tim_nday(qx):
'''
将指定时间周期的tick数据,转换为分时数据
'''
for tc in range(qx.xdayNum):
qx.DTxtim = qx.DTxtim0 + dt.timedelta(days=tc)
qx.xdayInx, qx.xtimSgn = tc, qx.DTxtim.strftime('%Y-%m-%d')
#
rmon0 = qx.DTxtim.strftime('%Y-%m')
qx.rtickTimMon = '%s%s\\' % (qx.rtickTim, rmon0)
fdat = '%s%s_%s.csv' % (qx.rtickTimMon, qx.code, qx.xtimSgn)
#
print(qx.xdayInx, '/', qx.xdayNum, qx.xtimSgn, fdat)
xtick2tim100(qx, fdat)
def xtick2tim_code100(qx):
'''
根据qx.min_ksgns预设的分时参数,
以及指定的股票代码、时间周期参数,
将对应的tick数据,转换为分时数据,并保存到文件
【输入】
qx.code,股票代码
qx.min_ksgns,分时数据时间模式列表,一般是[5,15,30,60],也可以自行设置
【输出】
分时数据保存在目录:
\zwdat\min\Mxx\
'''
for kss in qx.min_ksgns:
qx.min_knum, qx.min_ksgnWrk, ksgn = int(kss), 'M' + kss, 'M' + kss
qx.rminWrk = '%s\\%s\\' % (qx.rmin0k, qx.min_ksgnWrk)
if not os.path.exists(qx.rminWrk): os.mkdir(qx.rminWrk)
#
qx.datMin[ksgn] = pd.DataFrame(columns=zw.qxMinName)
fss = '%s%s.csv' % (qx.rminWrk, qx.code) # print('@fss',fss,len(qx.datMin[ksgn]))
xfg = os.path.exists(fss) # print(xfg,'@f100',fss,len(qx.datMin[ksgn]))
if xfg:
qx.datMin[ksgn] = pd.read_csv(fss, index_col=False)
print('\n@fss', fss, len(qx.datMin[ksgn]))
#
xtick2tim_nday(qx)
xtick2minWr(qx, qx.rmin0k)
def xtick2tim_allcode(qx):
'''
将所有股票代码的tick数据转换为分时数据
输入:
qx.stkCodeLib:,股票代码列表文件,
qx.min_ksgns,分时数据时间模式列表,一般是[5,15,30,60],也可以自行设置
输出
\zwdat\min\
输出数据在tick目录对应的分时目录当中,已经自动转换为5,15,30,60分钟分时数据
为当天最新实时分笔数据,会自动覆盖以前的就数据
'''
for i, xc in enumerate(qx.stkCodeLib['code']):
code = "%06d" % xc # print("\n",i,"/",qx.codeNum,"code,",code)
qx.code, qx.codeCnt = code, i
print(qx.codeCnt, "/", qx.codeNum, qx.rtickTimMon, code, qx.xtimSgn)
#
xtick2tim_code100(qx)
# ---------------xtick.real.xxx
def xtick_real_downsub(xcod):
''' 中国A股,tick 历史或real实时 tick 分笔数据下载子程序
会自动将中文type,替换成 英文:中性盘:norm;买盘:buy 卖盘:sell
【输入】
xcod,股票代码
xtim,日期字符串,当xtim为空时,下载的是当天 实时 tick数据
【输出】
df,股票 tick 数据
数据列格式:
time,price,change,volume,amount,type
'''
xd = ts.get_today_ticks(xcod)
dn = len(xd) # print('n',dn) # 跳过无数据 日期
if dn > 10:
xd['type'] = xd['type'].str.replace(u'中性盘', 'norm')
xd['type'] = xd['type'].str.replace(u'买盘', 'buy')
xd['type'] = xd['type'].str.replace(u'卖盘', 'sell')
# xd.to_csv('tmp\\'+xcod+'_'+xtim+'.csv',index=False,encoding='utf')
else:
xd = []
#
return xd
def xtick_real_down_all(qx, finx):
'''
下载当天的实时tick分笔数据,并自动转换为分时数据
输入:
finx,股票目录索引文件,一般每个股票,下载需要2-3分钟,
如果做高频。单机股票代码不要太多,可以分组在多台电脑运行
qx.min_ksgns,股票分时参数,例如:['20','60']
输出
\zwdat\tickreal\ 输出目录
\zwdat\tickreal\tick\ 分笔tick数据
\zwdat\tickreal\Mxx\ 分笔tick数据,转换后的分时数据
输出数据在对应的tick目录当中,已经自动转换为分时数据
当天最新实时tikc、分笔数据,会自动覆盖以前的旧数据
'''
# qx.min_ksgns=['05','15','30','60']
rdat = zw._rdatTickReal
dinx = pd.read_csv(finx, encoding='gbk')
print('finx', finx)
i, xn9 = 0, len(dinx['code'])
for xc in dinx['code']:
i += 1
code = "%06d" % xc
qx.codeCnt, qx.code = i, code
print("\n", i, "/", xn9, "code,", code)
# ---
df = xtick_real_downsub(code)
if len(df) > 10:
fss = rdat + 'tick\\' + qx.code + '.csv'
print('\n', fss)
df.to_csv(fss, index=False, encoding='utf')
qx.datTick = df
# ---------- tick 分笔数据,转换为分时数据:05,15,30,60
for kss in qx.min_ksgns: # qx.min_ksgns=['M05','M15','M30','M60']
qx.min_knum, qx.min_ksgnWrk, ksgn = int(kss), 'M' + kss, 'M' + kss
qx.rminWrk = '%s\\%s\\' % (qx.rmin0k, qx.min_ksgnWrk)
if not os.path.exists(qx.rminWrk): os.mkdir(qx.rminWrk)
#
# sgnMin='M'+ksgn0 # qx.minType=int(ksgn0) # print('@mt',qx.minType)
qx.datMin[ksgn] = pd.DataFrame(columns=zw.qxMinName)
xtick2min010(qx)
#
xtick2minWr(qx, rdat)
# ----------------down.stk
def down_stk_cn020inx(qx, xtim0):
''' 下载大盘指数数据,简版股票数据,可下载到1994年股市开市起
【输入】
qx.xcod:指数代码
'''
xcod = qx.code
tim0 = xtim0 # tim0='1994-01-01'
xd = []
rss = qx.rXDay
fss = rss + xcod + '.csv'
# if ((xtyp!='D')and(xtyp!='9') ): tim0=tim0+" 00:00:00"
# -------------------
xfg = os.path.exists(fss)
xd0 = []
if xfg:
xd0 = pd.read_csv(fss, index_col=0, parse_dates=[0], encoding='gbk')
# print(xd0.head())
xd0 = xd0.sort_index(ascending=False)
# tim0=xd0.index[0]
_xt = xd0.index[0] # xt=xd0.index[-1]###
s2 = str(_xt)
tim0 = s2.split(" ")[0]
#
print('\n', xfg, fss, ",", tim0)
# -----------
try:
xd = ts.get_h_data(xcod, start=tim0, index=True, end=None, retry_count=5, pause=1) # Day9
# -------------
if xd is not None:
if (len(xd0) > 0):
xd2 = xd0.append(xd)
# flt.dup
xd2["index"] = xd2.index
xd2.drop_duplicates(subset='index', keep='last', inplace=True)
del (xd2["index"])
# xd2.index=pd.to_datetime(xd2.index)
xd = xd2
xd = xd.sort_index(ascending=False)
xd = np.round(xd, 3)
xd.to_csv(fss, encoding='gbk')
except IOError:
pass # skip,error
return xd
def down_stk_cn010(qx):
''' 中国A股数据下载子程序
【输入】
qx (zwDatX):
xtyp (str):数据类型,9,Day9,简版股票数据,可下载到2001年,其他的全部是扩充版数据,只可下载近3年数据
D=日k线 W=周 M=月 默认为D
:ivar xcod (int): 股票代码
:ivar fss (str): 保存数据文件名
'''
xcod, rss, = qx.code, qx.rDay
tim0 = '1994-01-01' # tim0='2012-01-01'
#
fss = rss + xcod + '.csv'
# -------------------
xfg = os.path.exists(fss)
xd0 = []
xd = []
if xfg:
xd0 = pd.read_csv(fss, index_col=0, parse_dates=[0], encoding='gbk')
# print(xd0.head())
xd0 = xd0.sort_index(ascending=False)
# tim0=xd0.index[0]
_xt = xd0.index[0] # xt=xd0.index[-1]###
s2 = str(_xt)
tim0 = s2.split(" ")[0]
print('\n', xfg, fss, ",", tim0)
# -----------
try:
xd = ts.get_h_data(xcod, start=tim0, end=None, retry_count=5, pause=1) # Day9
# xd=ts.get_hist_data(xcod,start=tim0,end=None,retry_count=5,pause=1,ktype=xtyp)
# -------------
if xd is not None:
if (len(xd0) > 0):
xd2 = xd0.append(xd)
# flt.dup
xd2["index"] = xd2.index
xd2.drop_duplicates(subset='index', keep='last', inplace=True)
del (xd2["index"])
# xd2.index=pd.to_datetime(xd2.index)
xd = xd2
xd = xd.sort_index(ascending=False)
xd = np.round(xd, 3)
xd.to_csv(fss, encoding='gbk')
except IOError:
pass # skip,error
return xd
def down_stk_all(qx, finx):
'''
根据finx股票列表文件,下载所有,或追加日线数据
自动去重,排序
'''
dinx = pd.read_csv(finx, encoding='gbk')
print(finx)
xn9 = len(dinx['code'])
for i, xc in enumerate(dinx['code']):
code = "%06d" % xc
print("\n", i, "/", xn9, "code,", code)
# ---
qx.code = code
down_stk_cn010(qx)
def down_stk_inx(qx, finx):
dinx = pd.read_csv(finx, encoding='gbk')
print(finx)
xn9 = len(dinx['code'])
for i in range(xn9):
# for xc,xtim0 in dinx['code'],dinx['tim0']:
d5 = dinx.iloc[i]
xc = d5['code']
xtim0 = d5['tim0']
i += 1
code = "%06d" % xc
print("\n", i, "/", xn9, "code,", code, xtim0)
# ---
qx.code = code
down_stk_cn020inx(qx, xtim0)
def down_stk_yahoo010(qx, ftg):
'''
美股数据下载子程序
Args:
qx (zwDatX):
ftg,数据文件名
:ivar xcod (int): 股票代码
:ivar xdat (pd.DataFrame): yahoo xcod股票数据
'''
try:
xcod = qx.code
xdat = web.DataReader(xcod, "yahoo", start="1/1/1900")
xdat.to_csv(ftg)
print(ftg)
except IOError:
pass # skip,error
# --------stk.InxLib.xxx
def stkInxLibRd(qx):
'''
读取指定的大盘数据到zw.stkInxLib
Args:
:
qx.stkInxRDat='\\zwdat\\cn\\xday\\'' #大盘指数数据源路径
qx.stkInxCode='000001' #大盘指数代码
qx.stkInxName='sz001' #大盘指数名称,拼音
qx.stkInxCName='上证指数' #大盘指数中文名称,拼音
#
zw.stkInxLib=None #全局变量,大盘指数,内存股票数据库
'''
if qx.stkInxCode != '':
fss = qx.stkInxRDat + qx.stkInxCode + ".csv"
xfg = os.path.exists(fss)
if xfg:
df10 = pd.read_csv(fss, index_col=0, parse_dates=[0])
df10 = df2zwAdj(df10)
zw.stkInxLib = df10.sort_index()
def stkInxLibSet8XTim(qx, dtim0, dtim9):
''' 根据时间段,切割大盘指数数据 zw.stkInxLib
Args:
dtim0(str):起始时间
dtim9(str):结束时间
:ivar
zw.stkInxLib,大盘指数数据
'''
df10 = zw.stkInxLib
# print(df10.head())
if dtim0 == '':
df20 = df10
else:
df20 = df10[(df10.index >= dtim0) & (df10.index <= dtim9)]
# df20=df10[(df10['date']>=dtim0)&(df10['date']<=dtim9)]
zw.stkInxLib = df20.sort_index()
# --------stk.Lib.xxx
def stkLibRd(xlst, rdir):
'''
读取指定的股票数据到stkLib,可多只股票,以及股票代码文件名
Args:
xlst (list): 指定股票代码列表,
如果xlst参数首字母是'@',表示是股票代码文件名,而不是代码本身
用于批量导入股票代码
rdir (str):数据类存放目录
:ivar xcod (int): 股票代码
'''
zw.stkLib = {} # 全局变量,相关股票的交易数据
zw.stkLibCode = [] # 全局变量,相关股票的交易代码
#
x0 = xlst[0]
if x0.find('@') == 0:
fss = x0[1:] # print('fss',fss) #fss=_rdatInx+fs0
flst = pd.read_csv(fss, dtype=str, encoding='gbk')
xlst = list(flst['code'])
# print(xlst)
for xcod in xlst:
fss = rdir + xcod + ".csv"
xfg = os.path.exists(fss)
if xfg:
try:
df10 = pd.read_csv(fss, index_col=0, parse_dates=[0])
df10 = df2zwAdj(df10)
except:
print('读取文件失败:', fss)
zw.stkLib[xcod] = df10.sort_index()
zw.stkLibCode.append(xcod)
def stkLibPr():
''' 输出股票数据
:ivar xcod (int): 股票代码
'''
for xcod in zw.stkLibCode:
df10 = zw.stkLib[xcod]
print('\n::code,', xcod)
print(df10.head())
print('\n stk code num', len(zw.stkLibCode))
def stkLibSet8XTim(dtim0, dtim9):
''' 根据时间段,切割股票数据
Args:
dtim0(str):起始时间
dtim9(str):结束时间
:ivar xcod (int): 股票代码
'''
for xcod in zw.stkLibCode:
df10 = zw.stkLib[xcod]
if dtim0 == '':
df20 = df10
else:
df20 = df10[(df10.index >= dtim0) & (df10.index <= dtim9)]
#
# zw.stkLibCode.append(xcod)
zw.stkLib[xcod] = df20.sort_index()
# print(zw.stkLib[xcod])
# print(df20)
def stkLibSetDVix():
''' 根据时间段,切割股票数据
Args:
dtim0(str):起始时间
dtim9(str):结束时间
:ivar xcod (int): 股票代码
'''
for xcod in zw.stkLibCode:
df10 = zw.stkLib[xcod]
df10['dvix'] = df10['dprice'] / df10['dprice'].shift(1) * 100
#
zw.stkLib[xcod] = np.round(df10, 2)
# --------stk.Lib.get.xxx
def stkGetVars(qx, ksgn):
'''
获取股票代码,指定字段的数据
Args:
qx (zwQuantX): zwQuantX交易数据包
ksgn (str): qx.stkCode,qx.xtim,qx.stkSgnPrice
'''
d10 = zw.stkLib[qx.stkCode]
d01 = d10[qx.xtim:qx.xtim]
#
dval = 0
if len(d01) > 0:
d02 = d01[ksgn]
dval = d02[0]
return dval
def stkGetPrice(qx, ksgn):
'''
获取当前价格
Args:
qx (zwQuantX): zwQuantX交易数据包
ksgn (str): 价格模式代码
'''
d10 = zw.stkLib[qx.stkCode]
d01 = d10[qx.xtim:qx.xtim]
#
price = 0
if len(d01) > 0:
d02 = d01[ksgn]
price = d02[0]
if pd.isnull(price):
d02 = d01['dprice']
price = d02[0]
return price
def stkGetPrice9x(qx, ksgn):
'''
获取首个、末个交易日数据
Args:
qx (zwQuantX): zwQuantX交易数据包
ksgn (str): 价格模式代码
'''
d10 = zw.stkLib[qx.stkCode]
# d05=d10[qx.stkSgnPrice]
d05 = d10[ksgn]
price0 = d05[0]
price9 = d05[-1]
return price0, price9
def stkLibGetTimX(xcod):
'''
返回指定股票代码首个、末个交易日时间数据
Args:
xcod (int): 股票代码
'''
d10 = zw.stkLib[xcod]
d01 = d10.index
xtim0 = d01[0]
xtim9 = d01[-1]
# xtim0s=xtim0.strftime()
return xtim0, xtim9
def stkLibName8Code(xcod):
''' 根据股票代码,返回股票中文、英文/拼音缩写名称
Args:
xcod (int): 股票代码
'''
d10 = zw.stkCodeTbl[zw.stkCodeTbl['code'] == xcod]
# print(d10)
enam = ''
cnam = ''
if len(d10) > 0:
xc = d10.index[0]
enam = d10.at[xc, 'ename']
cnam = d10.at[xc, 'cname']
# print('c',xc,cnam,enam)
return enam, cnam
# --------stk.xxx
def stkValCalc(qx, xdicts):
''' 计算 xdicts 内所有的股票总价值
Args:
qx (zwQuantX): zwQuantX数据包
xdicts (list):所选股票代码列表
:ivar xcod (int): 股票代码
'''
dsum9 = 0
for xcod, xnum in xdicts.items():
qx.stkCode = xcod
# price=stkGetPrice(qx,'dprice')
price = stkGetPrice(qx, qx.priceCalc)
dsum = price * xnum
dsum9 = dsum9 + dsum
# print('@c',qx.xtim,price,dsum,dsum9)
return dsum9
# --------xbars.xxx
def xbarPr(bars):
''' 输出数据包数据
'''
for xd in bars:
xd.prXBar()
print('')
def xbarGet8Tim(xcod, xtim):
''' 根据指定股票代码。时间,获取数据包
Args:
xcod (int): 股票代码
xtim (str): 交易时间
'''
d10 = zw.stkLib[xcod]
d02 = d10[xtim:xtim]
return d02
def xbarGet8TimExt(xcod, xtim):
''' 根据指定股票代码。时间,获取数据包及股票数据
Args:
xcod (int): 股票代码
xtim (str): 交易时间
'''
d10 = zw.stkLib[xcod]
d02 = d10[xtim:xtim]
return d02, d10
# --------xtrd.xxx
def xtrdObjSet(qx):
''' 设置交易节点数据
Args:
qx (zwDatX): zwQuant数据包
#xtrdName=['date','ID','mode','code','dprice','num','kprice','sum','cash']
'''
b2 = pd.Series(zw.xtrdNil, index=zw.xtrdName)
b2['date'] = qx.xtim
b2['code'] = qx.stkCode
b2['num'] = qx.stkNum
if qx.stkNum != 0:
b2['mode'] = zwt.iff3(qx.stkNum, 0, 'sell', '', 'buy')
b2['dprice'] = stkGetVars(qx, qx.priceWrk)
# kprice=stkGetVars(qx,qx.priceBuy)
kprice = stkGetPrice(qx, qx.priceBuy)
b2['kprice'] = kprice
b2['sum'] = kprice * qx.stkNum
dcash9 = qx.qxUsr['cash']
b2['cash'] = dcash9 - kprice * b2['num']
# print('\nb2\n',b2)
return b2
def xtrdChkFlag(qx):
''' 检查是不是有效交易
Args:
qx (zwQuantX): zwQuantX数据包
#qx.stkNum,>0,买入股票;<0,卖出股票;-1;卖出全部股票
#预设参数:qx.qxUsr
Return:
xfg,True,有效交易;False,无效交易
b2:有效交易的数据包 Bar
:ivar xnum (int): 用户持有资产总数
'''
kfg = False
b2 = None
qx.trdNilFlag = False
dcash9 = qx.qxUsr['cash']
dnum = qx.stkNum
dnum5 = abs(dnum)
numFg = zwt.xinEQ(dnum5, qx.stkNum0, qx.stkNum9)
# --------
# b2=xtrdObjSet(qx) #print('::b2\n',b2)
if dnum > 0:
# dsum=b2['sum']
kprice = stkGetVars(qx, qx.priceBuy)
dsum = kprice * dnum
# 股票买入股票总数,必须在限额内:100-2w手,总值不能超过现金数,买空需修改此处
if numFg:
if dsum < dcash9:
kfg = True
else:
qx.trdNilFlag = True
else:
if qx.stkCode in qx.qxUsrStk:
# print('@',qx.stkCode,dnum)
xnum = qx.qxUsrStk[qx.stkCode]
if dnum == -1:
qx.stkNum = -xnum
kfg = True
else:
kfg = zwt.iff2(dnum5 <= xnum, True, False)
#
qx.trdNilFlag = not kfg
elif dnum != -1:
qx.trdNilFlag = True
#
if kfg or qx.trdNilFlag:
b2 = xtrdObjSet(qx) # 设置交易节点
else:
qx.stkNum = 0
return kfg, b2
def xtrdChkFlag00(qx):
''' 检查是不是有效交易
Args:
qx (zwQuantX): zwQuantX数据包
#qx.stkNum,>0,买入股票;<0,卖出股票;-1;卖出全部股票
#预设参数:qx.qxUsr
Return:
xfg,True,有效交易;False,无效交易
b2:有效交易的数据包 Bar
:ivar xnum (int): 用户持有资产总数
'''
kfg = False
b2 = None
dcash9 = qx.qxUsr['cash']
# --------
# b2=xtrdObjSet(qx) #print('::b2\n',b2)
if qx.stkNum > 0:
# dsum=b2['sum']
kprice = stkGetVars(qx, qx.priceBuy)
dsum = kprice * qx.stkNum
# 股票买入股票总数,必须在限额内:100-2w手,总值不能超过现金数,买空需修改此处
kfg = (zwt.xinEQ(qx.stkNum, qx.stkNum0, qx.stkNum9) and (dsum < dcash9))
else:
if qx.stkCode in qx.qxUsrStk:
xnum = qx.qxUsrStk[qx.stkCode]
if (qx.stkNum == -1) or (abs(qx.stkNum) >= xnum):
qx.stkNum = -xnum
kfg = True
elif abs(qx.stkNum) < xnum:
kfg = True
#
if kfg:
b2 = xtrdObjSet(qx) # 设置交易节点
else:
qx.stkNum = 0
return kfg, b2
def xusrStkNum(qx, xcod):
''' 返回用户持有的 xcod 股票数目
Args:
qx (zwQuantX): zwQuantX数据包
xcod (int): 股票代码
'''
dnum = 0
if xcod in qx.qxUsrStk:
dnum = qx.qxUsrStk[xcod]
return dnum
def xusrUpdate(qx):
''' 更新用户数据
Args:
qx (zwQuantX): zwQuantX数据包
'''
qx.qxUsr['date'] = qx.xtim
# dcash=qx.qxUsr['cash']
# qx.qxUsr['cash']=dcash-b2['sum']
stkVal = stkValCalc(qx, qx.qxUsrStk)
qx.qxUsr['stkVal'] = stkVal
dval0 = qx.qxUsr['val']
dval = qx.qxUsr['cash'] + stkVal
qx.qxUsr['val'] = dval
# qx.qxUsr['dret'] = (qx.qxUsr['val'] - dval0) / dval0
# # print('\n::xbarUsr\n',qx.qxUsrStk)
# # print('stkVal',stkVal)
#
# # ---------drawdown.xxx
# if dval > qx.downHigh:
# qx.downHigh = dval
# qx.downLow = dval
# # qx.downHighTime=date.today()
# qx.downHighTime = qx.xtim
# # qx.downHighTime=datetime.dateTime
# else:
# qx.downLow = min(dval, qx.downLow)
# # ----------
# qx.qxUsr['downHigh'] = qx.downHigh
# qx.qxUsr['downLow'] = qx.downLow
# kmax = downKMax(qx.downLow, qx.downHigh)
# qx.downKMax = min(qx.downKMax, kmax)
# qx.qxUsr['downKMax'] = qx.downKMax
# # xday=parse(qx.xtim)-parse(qx.downHighTime)
# nday = rrule.rrule(rrule.DAILY, dtstart=parse(qx.downHighTime), until=parse(qx.xtim)).count()
#
# dmax = max(qx.downMaxDay, nday - 1)
# qx.downMaxDay = dmax
# qx.qxUsr['downDay'] = qx.downMaxDay
def xusr4xtrd(qx, b2):
''' 根据交易数据,更新用户数据 qxUsr
Args:
qx (zwQuantX): zwQuantX数据包
b2 (pd.Series): 有效交易的数据包 Bar
:ivar xcod (int): 股票代码
'''
xcod = b2['code']
if xcod != '':
xfg = xcod in qx.qxUsrStk
# s2=zwBox.xobj2str(b2,zw.xbarName) #print(xfg,'::b2,',s2)
if xfg:
xnum = qx.qxUsrStk[xcod]
xnum2 = xnum + b2['num']
qx.qxUsrStk[xcod] = xnum2
if xnum2 == 0: del (qx.qxUsrStk[xcod])
else:
qx.qxUsrStk[xcod] = b2['num']
qx.qxUsr['cash'] = qx.qxUsr['cash'] - b2['sum']
def xtrdLibAdd(qx):
''' 添加交易到 xtrdLib
Args:
qx (zwQuantX): zwQuantX数据包
'''
qx.qxIDSet()
# print('qx.qxID',qx.qxID)
qx.xtrdChk['ID'] = qx.qxID
# xbarUsrUpdate(qx,qx.xtrdChk)
xusr4xtrd(qx, qx.xtrdChk) # qx.qxUsr['cash']=qx.qxUsr['cash']-b2['sum']
qx.xtrdLib = qx.xtrdLib.append(qx.xtrdChk.T, ignore_index=True)
def xtrdLibNilAdd(qx):
''' 添加交易到空头记录 xtrdNilLib
Args:
qx (zwQuantX): zwQuantX数据包
'''
qx.xtrdChk['ID'] = 'nil'
qx.xtrdNilLib = qx.xtrdNilLib.append(qx.xtrdChk.T, ignore_index=True)
# --zw.ret.xxx
def zwRetTradeCalc(qx, pyqt_mode=False):
''' 输出、计算交易数据
Args:
qx (zwQuantX): zwQuantX数据包
'''
df = qx.xtrdLib
xbar = qx.qxLib.iloc[-1]
xtim9 = xbar['date']
sum9 = -1 * df['sum'].sum()
print('交易总次数:%d' % len(df))
print('交易总盈利:%.2f' % sum9)
qx.result_info.append(['交易总次数', '%d' % len(df)])
# len(df)
qx.result_info.append(['交易总盈利' , '%.2f' % sum9])
# print('交易总支出:%.2f' %sumPut)
# print('交易总收入:%.2f' %sumGet)
# print('交易收益差:%.2f' %sumx)
print('')
# print('盈利交易数:%d' % numAdd)
# print('盈利交易金额:%.2f' % sumAdd)
# print('亏损交易数:%d' % numDec)
# print('亏损交易金额:%.2f' % sumDec)
# print('@t',qx.xtim)
qx.xtim = xtim9
def zwRetPr(qx):
''' 输出、计算回报率
Args:
qx (zwQuantX): zwQuantX数据包
'''
# ---回报测试
retAvg = qx.qxLib['dret'].mean()
retStd = qx.qxLib['dret'].std()
dsharp = sharpe_rate(qx.qxLib['dret'], qx.rfRate)
dsharp0 = sharpe_rate(qx.qxLib['dret'], 0)
dcash = qx.qxUsr['cash']
dstk = stkValCalc(qx, qx.qxUsrStk) # 因为个人添加了到期清仓的机制,所以dstk应该为0
dval = dstk + dcash
dret9 = (dval - qx.mbase) / qx.mbase
print('')
print("最终资产价值 Final portfolio value: $%.2f" % dval)
print("最终现金资产价值 Final cash portfolio value: $%.2f" % dcash)
print("最终证券资产价值 Final stock portfolio value: $%.2f" % dstk)
print("累计回报率 Cumulative returns: %.2f %%" % (dret9 * 100))
print("平均日收益率 Average daily return: %.3f %%" % (retAvg * 100))
print("日收益率方差 Std. dev. daily return:%.4f " % (retStd))
print('')
print("夏普比率 Sharpe ratio: %.3f,(%.2f利率)" % (dsharp, qx.rfRate))
print("无风险利率 Risk Free Rate: %.2f" % (qx.rfRate))
print("夏普比率 Sharpe ratio: %.3f,(0利率)" % (dsharp0))
print('')
print("最大回撤率 Max. drawdown: %.4f %%" % (abs(qx.downKMax)))
print("最长回撤时间 Longest drawdown duration:% d" % qx.downMaxDay)
print("回撤时间(最高点位) Time High. drawdown: ", qx.downHighTime)
print("回撤最高点位 High. drawdown: %.3f" % qx.downHigh)
print("回撤最低点位 Low. drawdown: %.3f" % qx.downLow)
print('')
print("时间周期 Date lenght: %d (Day)" % qx.periodNDay)
print("时间周期(交易日) Date lenght(weekday): %d (Day)" % qx.wrkNDay)
print("开始时间 Date begin: %s" % qx.xtim0)
print("结束时间 Date lenght: %s" % qx.xtim9)
print('')
print("项目名称 Project name: %s" % qx.prjName)
print("策略名称 Strategy name: %s" % qx.staName)
print("股票代码列表 Stock list: ", zw.stkLibCode)
print("策略参数变量 staVars[]: ", qx.staVars)
print('')
if qx.pyqt_mode_flag or qx.plotly_mode_flag == True:
qx.result_info.append(['最终资产价值' , '$%.2f' % dval])
qx.result_info.append(['最终现金资产价值' , '$%.2f' % dcash])
qx.result_info.append(['最终证券资产价值' , '$%.2f' % dstk])
qx.result_info.append(['累计回报率' , '%.2f' % (dret9 * 100)])
qx.result_info.append(['平均日收益率' , '%.3f' % (retAvg * 100)])
qx.result_info.append(['日收益率方差' , '%.4f' % retStd])
qx.result_info.append(['夏普比率' , '%.3f,(%.2f利率)' % (dsharp, qx.rfRate)])
qx.result_info.append(['无风险利率' , '%.2f' % qx.rfRate])
qx.result_info.append(['夏普比率(无风险)' , '%.3f' % dsharp0])
qx.result_info.append(['最大回撤率' , '%.4f' % abs(qx.downKMax)])
qx.result_info.append(['最长回撤时间' , '%d' % qx.downMaxDay])
qx.result_info.append(['回撤时间(最高点位)' , qx.downHighTime])
qx.result_info.append(['回撤最高点位' , '%.3f' % qx.downHigh])
qx.result_info.append(['回撤最低点位' , '%.3f' % qx.downLow])
qx.result_info.append(['时间周期' , '%d (Day)' % qx.periodNDay])
qx.result_info.append(['时间周期(交易日)' , '%d (Day)' % qx.wrkNDay])
qx.result_info.append(['开始时间' , qx.xtim0])
qx.result_info.append(['结束时间' , qx.xtim9])
qx.result_info.append(['项目名称' , '%s' % qx.prjName])
qx.result_info.append(['策略名称' , '%s' % qx.staName])
# qx.result_info.append(['股票代码列表' , ' '.join(zw.stkLibCode)])
qx.result_info.append(['策略参数变量 staVars[]' , ' '.join([str(i) for i in qx.staVars])])
# qx.result_info.append(['最大回撤率' , '%.4f' % downKMax])
# -------------qx.xxxx
def qxObjSet(xtim, stkVal, dcash, dret):
''' 设置 xtrdLib 单次交易节点数据
Args:
xtim (str): 交易时间
stkVal (int): 股票总价值
dcash (int): 资金
dret (float): 回报率
'''
qx10 = pd.Series(zw.qxLibNil, index=zw.qxLibName)
qx10['date'] = xtim
qx10['cash'] = dcash
# stkVal=xbarStkSum(qx10['xBars'],xtim)
# stkVal=0
qx10['stkVal'] = stkVal
qx10['val'] = stkVal + dcash
return qx10
# -------------
def xedit_zwXDat(df):
''' 编辑用户数据格式
Args:
df (pd.DataFrame): 股票数据
'''
df = df.rename(columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume'})
df.sort_index(ascending=True, inplace=True)
dx = df['open']
df['xdiff'] = df['high'] - df['low']
df['z-xdiff'] = df['xdiff'] * 1000 / dx
df['z-xdiff'] = df['z-xdiff'].round(0)
df['z-open'] = df['open'] * 1000 / dx.shift(1)
df['z-open'] = df['z-open'].round(0)
df['z-high'] = df['high'] * 1000 / dx
df['z-high'] = df['z-high'].round(0)
df['z-low'] = df['low'] * 1000 / dx
df['z-low'] = df['z-low'].round(0)
df['z-close'] = df['close'] * 1000 / dx
df['z-close'] = df['z-close'].round(0)
df['ma5'] = pd.rolling_mean(df['close'], window=5)
df['ma10'] = pd.rolling_mean(df['close'], window=10)
df['ma20'] = pd.rolling_mean(df['close'], window=20)
df['ma30'] = pd.rolling_mean(df['close'], window=30)
df['v-ma5'] = pd.rolling_mean(df['volume'], window=5)
df['v-ma10'] = pd.rolling_mean(df['volume'], window=10)
df['v-ma20'] = pd.rolling_mean(df['volume'], window=20)
df['v-ma30'] = pd.rolling_mean(df['volume'], window=30)
c20 = df.columns # print(c20)
if ('amount' in c20): del (df['amount'])
if ('Adj Close' in c20): del (df['Adj Close'])
df = df.round(decimals=2)
clst = ["open", "high", "low", "close", "volume", "xdiff", "z-open", "z-high", "z-low", "z-close", "z-xdiff", "ma5",
"ma10", "ma20", "ma30", "v-ma5", "v-ma10", "v-ma20", "v-ma30"]
d30 = pd.DataFrame(df, columns=clst)
return d30
# ------------
def df2yhaoo(df0):
''' 股票数据格式转换,转换为 Yahoo 格式
Args:
df0 (pd.DataFrame): 股票数据
#Date,Open,High,Low,Close,Volume,Adj Close
'''
clst = ["Open", "High", "Low", "Close", "Volume", "Adj Close"]
df2 = pd.DataFrame(columns=clst)
df0 = df0.rename(
columns={'date': 'Date', 'open': 'Open', 'high': 'High', 'low': 'Low', 'close': 'Close', 'volume': 'Volume'})
# df0=df0.round(decimals=2)
df2['Date'] = df0['Date']
df2['Open'] = df0['Open']
df2['High'] = df0['High']
df2['Low'] = df0['Low']
df2['Close'] = df0['Close']
df2['Adj Close'] = df0['Close']
df2['Volume'] = df0['Volume']
df2 = df2.set_index(['Date'])
return df2
def df2cnstk(df0):
''' 股票数据格式转换,转换为中国 A 股格式
Args:
df0 (pd.DataFrame): 股票数据
#date,open,high,close,low,volume,amount
'''
clst = ["open", "high", "low", "close", "volume", "amount"]
df2 = pd.DataFrame(columns=clst)
df0 = df0.rename(
columns={'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume'})
# df0=df0.round(decimals=2)
df2['date'] = df0['date']
df2['open'] = df0['open']
df2['high'] = df0['high']
df2['low'] = df0['low']
df2['close'] = df0['close']
df2['volume'] = df0['volume']
df2 = df2.set_index(['date'])
return df2
def df2zw(df0):
''' 股票数据格式转换,转换为 zw 格式
Args:
df0 (pd.DataFrame): 股票数据
'''
clst = ["open", "high", "low", "close", "volume"]
df2 = pd.DataFrame(columns=clst)
df0 = df0.rename(
columns={'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume'})
# df0=df0.round(decimals=2)
df2['date'] = df0['date']
df2['open'] = df0['open']
df2['high'] = df0['high']
df2['low'] = df0['low']
df2['close'] = df0['close']
df2['volume'] = df0['volume']
df2 = df2.set_index(['date'])
return df2
def df2zwAdj(df0):
''' 股票数据格式转换,转换为 zw 增强版格式,带 adj close
Args:
df0 (pd.DataFrame): 股票数据
'''
df0 = df0.rename(
columns={'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume',
"Adj Close": "adj close"})
ksgn = 'adj close'
if ksgn not in df0.columns:
df0[ksgn] = df0['close']
return df0
# def df2zwAdj(df0):
# ''' 股票数据格式转换,转换为 zw 增强版格式,带 adj close
#
# Args:
# df0 (pd.DataFrame): 股票数据
# '''
#
# clst = ["open", "high", "low", "close", "volume", "adj close"]
# df2 = pd.DataFrame(columns=clst)
# df0 = df0.rename(
# columns={'Date': 'date', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume',
# "Adj Close": "adj close"})
# # df0=df0.round(decimals=2)
#
# df0['date'] = df0.index
# df2['date'] = df0['date']
# for col_name in clst:
# if col_name in df0.columns:
# df2[col_name] = df0[col_name]
#
# # df2['open'] = df0['open']
# # df2['high'] = df0['high']
# # df2['low'] = df0['low']
# # df2['close'] = df0['close']
# # df2['volume'] = df0['volume']
# # 'adj close'
# ksgn = 'adj close'
# if ksgn in df0.columns:
# df2[ksgn] = df0[ksgn]
# else:
# df2[ksgn] = df0['close']
#
# # ----index
# df2 = df2.set_index(['date'])
#
# return df2
# -----OHLC
def stk_col_renLow(dat):
''' 股票数据格式转换,转换小写列名称
Args:
dat (pd.DataFrame): 股票数据
'''
dat = dat.rename(columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close', 'Volume': 'volume',
"Adj Close": "adj close"})
return dat
def stk_copy_OHLC(dat0):
''' 复制股票 OHLC 数据
Args:
dat0 (pd.DataFrame): 股票数据
'''
df0 = dat0
df0 = stk_col_renLow(df0)
df2 = pd.DataFrame(columns=['open', 'close', 'high', 'low'])
df2['open'] = df0['open']
df2['close'] = df0['close']
df2['high'] = df0['high']
df2['low'] = df0['low']
df2.index = df0.index
return df2
# ---------------- qt.misc.xxx
def downKMax(dlow, dhigh):
'''
downKMax(dlow,dhigh):
回缩率计算函数
低位,高位,指的是投资组合的市场总值
【输入】:
dlow,当前的低位,低水位,也称,lowerWatermark
dhigh,当前的高位,高水位,也称,highWatermark
【输出】
回缩率,百分比数值
'''
if dhigh > 0:
kmax = (dlow - dhigh) / float(dhigh) * 100
else:
kmax = 0
return kmax
def sharpe_rate(ser_return, rfRate, ntim=252, log_flag=False):
'''
sharpe_rate(ser_return,rfRate,ntim=252):
计算夏普指数
【输入】
ser_return (Series): 收益率Series(根据ntim,按日、小时、保存)
rfRate (float): 无风险收益率
ntim (int): 每年交易次数(按天、小时、等计数)
采用小时(60分钟)线数据,ntim= 252* 6.5 = 1638.
【输出】
夏普指数数值
'''
'''如果不是对数收益率,则转化成对数收益率'''
if log_flag == False:
ser_return = np.log(ser_return + 1)
ser_return.dropna(inplace=True)
rstd = ser_return.std()
'''如果波动率为0,说明ser_return是一个单一值,没有夏普比'''
if rstd != 0:
rmean = ser_return.mean()
avgExRet = rmean - rfRate / ntim
dsharp = avgExRet / rstd
rsharp = dsharp * np.sqrt(ntim)
else:
rsharp = None
return rsharp
# ----------sta.misc
# ----
def sta_dataPre0xtim(qx, xnam0):
''' 策略参数设置子函数,根据预设时间,裁剪数据源stkLib
Args:
qx (zwQuantX): zwQuantX数据包
xnam0 (str): 函数标签
'''
# 设置当前策略的变量参数
qx.staName = xnam0
qx.rfRate = 0.05 # 无风险年收益,一般为0.05(5%),计算夏普指数等需要
# qx.stkNum9=20000 #每手交易,默认最多20000股
#
# 按指定的时间周期,裁剪数据源
xt0k = qx.staVars[-2]
xt9k = qx.staVars[-1]
if (xt0k != '') or (xt9k != ''):
# xtim0=parse('9999-01-01')
# xtim9=parse('1000-01-01')
# xtim0=xtim0.strftime('%Y-%m-%d')
# xtim9=xtim9.strftime('%Y-%m-%d')
if xt0k != '':
if qx.xtim0 < xt0k:
qx.xtim0 = xt0k
if xt9k != '':
if qx.xtim9 > xt9k:
qx.xtim9 = xt9k
qx.qxTimSet(qx.xtim0, qx.xtim9)
stkLibSet8XTim(qx.xtim0, qx.xtim9) # print('zw.stkLibCode',zw.stkLibCode)
# ---stkInx 读取大盘指数数据,并裁剪数据源
# print('finx',qx.stkInxCode)
if qx.stkInxCode != '':
stkInxLibRd(qx)
stkInxLibSet8XTim(qx, qx.xtim0, qx.xtim9)
# ============
# ---设置qxUsr用户变量,起始数据
qx.qxUsr = qxObjSet(qx.xtim0, 0, qx.money, 0)
def cross_Mod(qx):
''' 均线交叉策略,判断均线向上、向下趋势
Args:
qx (zwQuantX): zwQuantX数据包
ksma (str):均线数据列名称
Return:
1:above
0:=
-1:below
'''
kma = 'ma_%d' % qx.staVars[0]
xbar = qx.xbarWrk
dma, ma2n = xbar[kma][0], xbar['ma2n'][0]
dp, dp2n = xbar['dprice'][0], xbar['dp2n'][0]
#
kmod = -9
if (dp > dma) and (dp2n < ma2n) and (dp > dp2n):
kmod = 1
# print(' crs',kmod,'xbar\n',xbar)
elif (dp < dma) and (dp2n > ma2n) and (dp < dp2n):
kmod = -1
# print(' crs',kmod,'xbar\n',xbar)
return kmod
| 26.461872
| 120
| 0.528313
|
21f688874268be596a45665b1be6c773586da97e
| 880
|
py
|
Python
|
train_PNet.py
|
huylgia/MTCNN_LPR
|
6b6c338ecce400096472f6bdb22e268d66da257b
|
[
"MIT"
] | 1
|
2022-03-21T10:36:25.000Z
|
2022-03-21T10:36:25.000Z
|
train_PNet.py
|
huylgia/MTCNN-Tensorflow
|
6b6c338ecce400096472f6bdb22e268d66da257b
|
[
"MIT"
] | null | null | null |
train_PNet.py
|
huylgia/MTCNN-Tensorflow
|
6b6c338ecce400096472f6bdb22e268d66da257b
|
[
"MIT"
] | null | null | null |
#coding:utf-8
from train_models.mtcnn_model import P_Net
from train_models.train import train
def train_PNet(base_dir, prefix, end_epoch, display, lr):
"""
train PNet
:param dataset_dir: tfrecord path
:param prefix:
:param end_epoch: max epoch for training
:param display:
:param lr: learning rate
:return:
"""
net_factory = P_Net
train(net_factory,prefix, end_epoch, base_dir, display=display, base_lr=lr)
if __name__ == '__main__':
#data path
base_dir = '/content/MTCNN-Tensorflow'
model_name = 'MTCNN'
#model_path = '../data/%s_model/PNet/PNet' % model_name
#with landmark
model_path = '/content/MTCNN-Tensorflow/data/%s_model/PNet_landmark/PNet' % model_name
prefix = model_path
end_epoch = 30
display = 100
lr = 0.001
train_PNet(base_dir, prefix, end_epoch, display, lr)
| 27.5
| 90
| 0.679545
|
a238e63b69c952e850d8305beb5192deb4ca8034
| 7,438
|
py
|
Python
|
rltoolkit/buffer/replay_buffer.py
|
raznem/rlex
|
d24b964d80067becc81d86f6ce87e5be413b7049
|
[
"MIT"
] | null | null | null |
rltoolkit/buffer/replay_buffer.py
|
raznem/rlex
|
d24b964d80067becc81d86f6ce87e5be413b7049
|
[
"MIT"
] | null | null | null |
rltoolkit/buffer/replay_buffer.py
|
raznem/rlex
|
d24b964d80067becc81d86f6ce87e5be413b7049
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
from rltoolkit.buffer.memory import Memory, MemoryMeta
class MetaReplayBuffer(MemoryMeta):
def __init__(
self,
size: int,
obs_shape,
obs_norm: bool = False,
dtype: torch.dtype = torch.float32,
*args,
**kwargs,
):
super().__init__(*args, **kwargs)
self.size = size
self.obs_shape = obs_shape
self.obs_norm = obs_norm
self.dtype = dtype
self.obs_idx = None
self.ts_idx = None
self.current_len = None
self.reset_idx()
self._obs = np.empty([self.size, self.obs_shape])
self._obs_idx = np.empty(self.size, dtype=np.int)
self._next_obs_idx = np.empty(self.size, dtype=np.int)
def reset_idx(self):
self.obs_idx = 0
self.ts_idx = 0
self.current_len = 0
@property
def obs(self):
return self._obs[self._obs_idx[: self.current_len]]
@property
def norm_obs(self):
return self.normalize(self.obs)
@property
def next_obs(self):
return self._obs[self._next_obs_idx[: self.current_len]]
@property
def norm_next_obs(self):
return self.normalize(self.next_obs)
def __len__(self):
return self.current_len
def add_obs(self, obs: torch.Tensor):
self._obs[self.obs_idx] = obs.cpu()
obs_idx = self.obs_idx
self.obs_idx = (self.obs_idx + 1) % self.size
return obs_idx
def addition(*args, **kwargs):
pass
def add_timestep(self, obs_idx: int, next_obs_idx: int, *args, **kwargs):
self._obs_idx[self.ts_idx] = obs_idx
self._next_obs_idx[self.ts_idx] = next_obs_idx
self.addition(*args, **kwargs)
if next_obs_idx < self.ts_idx:
self.current_len = self.ts_idx + 1
self.ts_idx = 0
else:
self.ts_idx += 1
self.current_len = max(self.ts_idx, self.current_len)
def normalize(self, obs: torch.tensor) -> torch.tensor:
if self.obs_norm:
return super().normalize(obs)
else:
return obs
def update_obs_mean_std(self):
if len(self.obs) > 10:
self.obs_mean = torch.tensor(self.obs.mean(axis=0), dtype=self.dtype)
self.obs_std = torch.tensor(self.obs.std(axis=0), dtype=self.dtype)
class ReplayBuffer(MetaReplayBuffer):
def __init__(
self, size: int, obs_shape, act_shape, discrete: bool = False, *args, **kwargs
):
super().__init__(size, obs_shape, *args, **kwargs)
self.discrete = discrete
if self.discrete:
self._actions = np.empty(self.size, dtype=np.int)
else:
self._actions = np.empty([self.size, act_shape])
self._rewards = np.empty(self.size, dtype=np.float32)
self._done = np.empty(self.size, dtype=np.bool_)
self._end = np.empty(self.size, dtype=np.bool_) # only for episode extraction
if self.obs_std is None and self.obs_mean is None:
self.obs_mean = torch.zeros(obs_shape, device=self.device)
self.obs_std = torch.ones(obs_shape, device=self.device)
@property
def actions(self):
return self._actions[: self.current_len]
@property
def rewards(self):
return self._rewards[: self.current_len]
@property
def done(self):
return self._done[: self.current_len]
@property
def end(self):
return self._end[: self.current_len]
def addition(self, action, rew, done, end):
self._actions[self.ts_idx] = action.cpu()
self._rewards[self.ts_idx] = rew
self._done[self.ts_idx] = done
self._end[self.ts_idx] = end
def __getitem__(self, idx):
assert idx < self.current_len, IndexError
return (
self._obs[self._obs_idx[idx]],
self._obs[self._next_obs_idx[idx]],
self._actions[idx],
self._rewards[idx],
self._done[idx],
)
def add_buffer(self, memory: Memory):
raise DeprecationWarning
obs = memory._obs
new_rollout_idx = memory._new_rollout_idx
i = 0
obs_idx = self.add_obs(obs[i])
timesteps = zip(memory.actions, memory.rewards, memory.done)
for action, rew, done in timesteps:
i += 1
next_idx = self.add_obs(obs[i])
if i in new_rollout_idx:
i += 1
continue
self.add_timestep(obs_idx, next_idx, action, rew, done)
obs_idx = next_idx
def make_obs_memory_tensor(self, obs):
processed = torch.tensor(obs, dtype=self.dtype, device=self.device)
processed = processed.reshape(-1, self.obs_shape)
return processed
def last_end(self, idx):
end = self._end[idx]
while not end:
idx -= 1
if idx < 0:
idx = self.current_len - 1
end = self._end[idx]
return idx
def last_rollout(self):
"""Get elements of last full rollout:
1. Find last end
2. Grab elements till the next end
Returns:
Memory: memory with last rollout only
"""
i = self.last_end(self.ts_idx - 1)
next_end = False
obs = []
next_obs = []
actions = []
rewards = []
dones = []
while not next_end:
new_obs = self.make_obs_memory_tensor(self._obs[self._obs_idx[i]])
new_next_obs = self.make_obs_memory_tensor(self._obs[self._next_obs_idx[i]])
obs.insert(0, new_obs)
next_obs.insert(0, new_next_obs)
actions.insert(0, self._actions[i])
rewards.insert(0, self._rewards[i])
dones.insert(0, self._end[i])
i -= 1
if i < 0:
i = self.current_len - 1
next_end = self._end[i]
memory = Memory()
memory.add_rollout(
obs=obs, actions=actions, action_logprobs=[], rewards=rewards, dones=dones
)
return memory
def sample_batch(self, batch_size=64, device=None):
f"""Sample batch of tensors from buffer
Args:
batch_size (int, optional): batch size. Defaults to { 64 }.
device (torch.device, optional): use GPU/CPU device. Defaults to { None }.
Returns:
list: list of elements from buffer (obs, next_obs, actions, rewards, dones)
"""
idxs = np.random.randint(0, self.__len__(), batch_size)
if self.discrete:
actions_type = torch.long
else:
actions_type = self.dtype
obs = torch.as_tensor(
self._obs[self._obs_idx[idxs]], dtype=self.dtype, device=self.device
)
next_obs = torch.as_tensor(
self._obs[self._next_obs_idx[idxs]], dtype=self.dtype, device=self.device
)
if self.obs_norm:
obs = self.normalize(obs)
next_obs = self.normalize(next_obs)
batch = [
obs,
next_obs,
torch.as_tensor(self._actions[idxs], dtype=actions_type),
torch.as_tensor(self._rewards[idxs], dtype=actions_type),
torch.as_tensor(self._done[idxs], dtype=torch.int8),
]
if device:
batch = [item.to(device) for item in batch]
return batch
| 30.735537
| 88
| 0.579726
|
80827082ab9857a25695d0ca17cc66d42cf60e16
| 183
|
py
|
Python
|
main/admin.py
|
guanta/guanta_deploy
|
387b23990b5a215c2a15174d2774838e28255139
|
[
"Apache-2.0"
] | null | null | null |
main/admin.py
|
guanta/guanta_deploy
|
387b23990b5a215c2a15174d2774838e28255139
|
[
"Apache-2.0"
] | null | null | null |
main/admin.py
|
guanta/guanta_deploy
|
387b23990b5a215c2a15174d2774838e28255139
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from main.models import *
# Register your models here.
admin.site.register(subscriber)
admin.site.register(respos)
admin.site.register(banque_info)
| 20.333333
| 32
| 0.808743
|
f7ddb3f96416bab76be2cbbfb4615d03d159b4ac
| 650
|
py
|
Python
|
src/molecule/cookiecutter/scenario/verifier/testinfra/{{cookiecutter.molecule_directory}}/{{cookiecutter.scenario_name}}/{{cookiecutter.verifier_directory}}/conftest.py
|
gardar/molecule
|
756fc33512a294ebbdb1e0de8aac2dabb642609e
|
[
"MIT"
] | 1,599
|
2015-11-18T01:40:26.000Z
|
2018-10-29T16:42:52.000Z
|
src/molecule/cookiecutter/scenario/verifier/testinfra/{{cookiecutter.molecule_directory}}/{{cookiecutter.scenario_name}}/{{cookiecutter.verifier_directory}}/conftest.py
|
gardar/molecule
|
756fc33512a294ebbdb1e0de8aac2dabb642609e
|
[
"MIT"
] | 1,232
|
2015-11-18T16:56:02.000Z
|
2018-10-27T03:51:50.000Z
|
src/molecule/cookiecutter/scenario/verifier/testinfra/{{cookiecutter.molecule_directory}}/{{cookiecutter.scenario_name}}/{{cookiecutter.verifier_directory}}/conftest.py
|
gardar/molecule
|
756fc33512a294ebbdb1e0de8aac2dabb642609e
|
[
"MIT"
] | 290
|
2015-11-19T18:16:41.000Z
|
2018-10-29T18:09:13.000Z
|
"""PyTest Fixtures."""
from __future__ import absolute_import
import os
import pytest
def pytest_runtest_setup(item):
"""Run tests only when under molecule with testinfra installed."""
try:
import testinfra
except ImportError:
pytest.skip("Test requires testinfra", allow_module_level=True)
if "MOLECULE_INVENTORY_FILE" in os.environ:
pytest.testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("all")
else:
pytest.skip(
"Test should run only from inside molecule.", allow_module_level=True
)
| 28.26087
| 81
| 0.689231
|
0dd5c65f4ca1857d34598710710b400b2ee651a4
| 2,909
|
py
|
Python
|
tests/test_rio_info.py
|
gitoni/rasterio
|
2ef9e4815b05a35352f47298417ba223db2ca7d9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_rio_info.py
|
gitoni/rasterio
|
2ef9e4815b05a35352f47298417ba223db2ca7d9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_rio_info.py
|
gitoni/rasterio
|
2ef9e4815b05a35352f47298417ba223db2ca7d9
|
[
"BSD-3-Clause"
] | null | null | null |
import click
from click.testing import CliRunner
import rasterio
from rasterio.rio import cli, info
def test_env():
runner = CliRunner()
result = runner.invoke(info.env, ['--formats'])
assert result.exit_code == 0
assert 'GTiff' in result.output
def test_info_err():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests'])
assert result.exit_code == 1
def test_info():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif'])
assert result.exit_code == 0
assert '"count": 3' in result.output
def test_info_verbose():
runner = CliRunner()
result = runner.invoke(
cli.cli,
['-v', 'info', 'tests/data/RGB.byte.tif'])
assert result.exit_code == 0
def test_info_quiet():
runner = CliRunner()
result = runner.invoke(
cli.cli,
['-q', 'info', 'tests/data/RGB.byte.tif'])
assert result.exit_code == 0
def test_info_count():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--count'])
assert result.exit_code == 0
assert result.output == '3\n'
def test_info_nodatavals():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--bounds'])
assert result.exit_code == 0
assert result.output == '101985.0 2611485.0 339315.0 2826915.0\n'
def test_info_tags():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--tags'])
assert result.exit_code == 0
assert result.output == '{"AREA_OR_POINT": "Area"}\n'
def test_info_res():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--res'])
assert result.exit_code == 0
assert result.output.startswith('300.037')
def test_info_lnglat():
runner = CliRunner()
result = runner.invoke(
info.info,
['tests/data/RGB.byte.tif', '--lnglat'])
assert result.exit_code == 0
assert result.output.startswith('-77.757')
def test_mo_info():
runner = CliRunner()
result = runner.invoke(info.info, ['tests/data/RGB.byte.tif'])
assert result.exit_code == 0
assert '"res": [300.037' in result.output
assert '"lnglat": [-77.757' in result.output
def test_info_stats():
runner = CliRunner()
result = runner.invoke(info.info, ['tests/data/RGB.byte.tif', '--tell-me-more'])
assert result.exit_code == 0
assert '"max": 255.0' in result.output
assert '"min": 1.0' in result.output
assert '"mean": 44.4344' in result.output
def test_info_stats_only():
runner = CliRunner()
result = runner.invoke(info.info, ['tests/data/RGB.byte.tif', '--stats', '--bidx', '2'])
assert result.exit_code == 0
assert result.output.startswith('1.000000 255.000000 66.02')
| 25.077586
| 92
| 0.622894
|
f70d988d5ed014b8b9c619af3b5473012ca3b7cd
| 2,041
|
py
|
Python
|
tokenswap/app/__init__.py
|
trinity-project/trinity
|
081eba1d4294a3bed33ba18c3f7b862b8803ee22
|
[
"MIT"
] | 60
|
2018-01-12T07:33:15.000Z
|
2021-12-28T23:06:28.000Z
|
tokenswap/app/__init__.py
|
trinity-project/trinity
|
081eba1d4294a3bed33ba18c3f7b862b8803ee22
|
[
"MIT"
] | 13
|
2018-01-23T00:14:35.000Z
|
2020-04-23T00:03:31.000Z
|
tokenswap/app/__init__.py
|
trinity-project/trinity
|
081eba1d4294a3bed33ba18c3f7b862b8803ee22
|
[
"MIT"
] | 13
|
2018-01-05T07:27:29.000Z
|
2021-01-06T16:45:05.000Z
|
# -*- coding: utf-8 -*-
__author__ = 'xu'
import os
from functools import wraps
from flask import Flask, Blueprint, jsonify
from flask_peewee.db import Database
from flask_peewee.auth import Auth
from flask_debugtoolbar import DebugToolbarExtension
from flask_mail import Mail
from flask_login import LoginManager
from flask_restful import Api
app = Flask(__name__)
APP_ENV = 'dev'
if not os.environ.has_key('APP_ENV') or os.environ['APP_ENV'].lower() == 'dev':
print 'Running on Dev Env:'
app.config.from_object('config')
elif os.environ['APP_ENV'].lower() in ('prod', 'test'):
print 'Running on %s Env:' %os.environ['APP_ENV'].upper()
app.config.from_object('config')
app.config.from_object('config_' + os.environ['APP_ENV'].lower())
APP_ENV = os.environ['APP_ENV'].lower()
else:
print 'Wrong Env!'
exit(1)
app.config["APP_ENV"] = APP_ENV
if not os.environ.has_key("VERIFY_HEADER_NAME") or not os.environ.has_key("VERIFY_PASSWORD") or not os.environ.has_key("VERIFY_HASHED"):
print 'Wrong Env!'
exit(1)
app.config["API_VERIFY"] = {
"verify_header": os.environ['VERIFY_HEADER_NAME'],
"password": os.environ["VERIFY_PASSWORD"],
"hashed": os.environ["VERIFY_HASHED"].replace("*", "$")
}
# print app.config["API_VERIFY"]
#db
db = Database(app)
auth = Auth(app, db)
toolbar = DebugToolbarExtension(app)
mail = Mail(app)
import models
import utils
utils.create_tables()
import views
# from api import ApiReset, ApiRegister, ApiLogin
# api_bp = Blueprint('api', __name__, url_prefix="/api")
# api = Api(api_bp, default_mediatype='application/json')
# resource_class_kwargs = {"models": models, "utils": utils}
# api.add_resource(
# ApiLogin,
# '/v1.0/login',
# resource_class_kwargs=resource_class_kwargs
# )
# api.add_resource(
# ApiRegister,
# '/v1.0/register',
# resource_class_kwargs=resource_class_kwargs
# )
# api.add_resource(
# ApiReset,
# '/v1.0/reset',
# resource_class_kwargs=resource_class_kwargs
# )
# app.register_blueprint(api_bp)
| 27.213333
| 136
| 0.709456
|
58b52df64dd5c46b6897e60431c0cec137c74b7c
| 8,115
|
py
|
Python
|
main.py
|
MrTeale/Tanda_Challenge
|
8f0d2117721028a6eef529b38486790eaa339fbb
|
[
"MIT"
] | null | null | null |
main.py
|
MrTeale/Tanda_Challenge
|
8f0d2117721028a6eef529b38486790eaa339fbb
|
[
"MIT"
] | null | null | null |
main.py
|
MrTeale/Tanda_Challenge
|
8f0d2117721028a6eef529b38486790eaa339fbb
|
[
"MIT"
] | null | null | null |
import pandas as pd
from collections import Counter
from pprint import pprint
import argparse
# Variables
settings_default_path = './Tanda_Settings_Info.csv'
organisation_settings_path = './Tanda_Organisation_Settings.csv'
USERS_CHANGE_X_DEFAULT = 50
def load_csv(filepath):
'''Loads CSV files containing data
Args:
filepath (string): location of file
Returns:
df (pandas.DataFrame): dataframe containing the csv files data
'''
with open(filepath, 'r') as file:
return pd.read_csv(file)
def type_switch(setting_type, value):
types = {
'Boolean': lambda value: True if 'True' == value or 'true' == value else False,
'Numeric': lambda value: float(value),
'Enumerative': lambda value: str(value)
}
return types[setting_type](value)
def percent_changed():
''' Analysis of the percentage of Users that change each settings
'''
default_df = load_csv(settings_default_path)
organisation_df = load_csv(organisation_settings_path)
percentage_df = pd.DataFrame(columns=['setting', 'percentage'])
for column in list(organisation_df.columns.values):
setting_default = default_df.loc[default_df['setting'] == column]
data = organisation_df[column].tolist()
total = 0
different = 0
default = type_switch(setting_default['data_type'].tolist()[0],
setting_default['default'].tolist()[0])
for row in data:
total += 1
if row != default and row is not default:
different += 1
percentage_df = percentage_df.append({
'setting': column,
'percentage': round((different / total) * 100, 2)
}, ignore_index=True)
percentage_df = percentage_df.set_index('setting')
percentage_df.to_csv('./part1.csv')
def change_x_percent(x_value):
default_df = load_csv(settings_default_path)
organisation_df = load_csv(organisation_settings_path)
changed_amount = {}
for column in list(organisation_df.columns.values):
changed_amount[column] = 0
for i, row in organisation_df.iterrows():
cols_changed = {}
changed = 0
total = len(list(organisation_df.columns.values))
for column in list(organisation_df.columns.values):
setting_default = default_df.loc[default_df['setting'] == column]
default = type_switch(setting_default['data_type'].tolist()[0],
setting_default['default'].tolist()[0])
data = type_switch(setting_default['data_type'].tolist()[0], row[column])
if data != default and data is not default:
changed += 1
cols_changed[column] = 1
if round((changed / total) * 100, 2) > x_value:
changed_amount = dict(Counter(cols_changed) + Counter(changed_amount))
distribution_df = pd.DataFrame.from_dict(changed_amount, orient='index')
distribution_df.columns = ['count']
distribution_df.index.name = 'setting'
distribution_df.to_csv('./part2.csv')
def implicit_explicit():
default_df = load_csv(settings_default_path)
organisation_df = load_csv(organisation_settings_path)
percentage_df = pd.DataFrame(columns=['type', 'percentage'])
implicit_total = 0
implicit_different = 0
explicit_total = 0
explicit_different = 0
for column in list(organisation_df.columns.values):
setting_default = default_df.loc[default_df['setting'] == column]
data = organisation_df[column].tolist()
setting_type = setting_default['impact'].tolist()[0]
default = type_switch(setting_default['data_type'].tolist()[0],
setting_default['default'].tolist()[0])
for row in data:
if 'Implicit' in setting_type:
implicit_total += 1
if row != default and row is not default:
implicit_different += 1
else:
explicit_total += 1
if row != default and row is not default:
explicit_different += 1
percentage_df = percentage_df.append({
'type': 'Implicit',
'percentage': round((implicit_different / implicit_total) * 100, 2)
}, ignore_index=True)
percentage_df = percentage_df.append({
'type': 'Explicit',
'percentage': round((explicit_different / explicit_total) * 100, 2)
}, ignore_index=True)
percentage_df = percentage_df.set_index('type')
percentage_df.to_csv('./part3.csv')
def affected_users(settings):
default_df = load_csv(settings_default_path)
organisation_df = load_csv(organisation_settings_path)
total = 0
affected = 0
changed_affected = {}
for column in list(organisation_df.columns.values):
changed_affected[column] = 0
for i, row in organisation_df.iterrows():
total += 1
setting_defaulted = False
for setting in settings:
setting_default = default_df.loc[default_df['setting'] == setting]
default = type_switch(setting_default['data_type'].tolist()[0],
setting_default['default'].tolist()[0])
if row[setting] == default and row[setting] is default:
setting_defaulted = True
break
if setting_defaulted is False:
affected += 1
changed_affected['input_settings'] = round((affected / total) * 100, 2)
for i, row in organisation_df.iterrows():
setting_defaulted = False
for setting in settings:
setting_default = default_df.loc[default_df['setting'] == setting]
default = type_switch(setting_default['data_type'].tolist()[0],
setting_default['default'].tolist()[0])
value = row[column]
if value == default or value is default:
setting_defaulted = True
break
if setting_defaulted is False:
for column in list(organisation_df.columns.values):
setting_default = default_df.loc[default_df['setting'] == column]
data = row[column]
default = type_switch(setting_default['data_type'].tolist()[0],
setting_default['default'].tolist()[0])
if data != default and data is not default:
changed_affected[column] += 1
for key, value in changed_affected.items():
changed_affected[key] = round((value / i) * 100, 2)
distribution_df = pd.DataFrame.from_dict(changed_affected, orient='index')
distribution_df.columns = ['percentage']
distribution_df.index.name = 'setting'
distribution_df.to_csv('./part4.csv')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-1', '--one', action='store_true',
help='percent_changed for each setting')
parser.add_argument('-2', '--two', action='store_true',
help='percent_changed for each setting')
parser.add_argument('-3', '--three', action='store_true',
help='percent_changed for each setting')
parser.add_argument('-4', '--four', action='store_true',
help='percent_changed for each setting')
args, rem_args = parser.parse_known_args()
if args.one:
percent_changed()
if args.two:
parser.add_argument('-p', '--percentage', type=float,
help='<Required> above percentage', required=True, default=USERS_CHANGE_X_DEFAULT)
args = parser.parse_args(rem_args, namespace=args)
change_x_percent(args.percentage)
if args.three:
implicit_explicit()
if args.four:
parser.add_argument('-s', '--setting', action='append',
help='<Required> Desired Settings', required=True)
args = parser.parse_args(rem_args, namespace=args)
affected_users(args.setting)
| 34.978448
| 110
| 0.619963
|
e334a872e9e4acf10878b071240898e4d0f9f794
| 6,809
|
py
|
Python
|
lib/datasets/voc_eval.py
|
ZongxianLee/dafrcnn-pytorch
|
b1c45e7c3be3eb514fc0419e5e474379477c91a7
|
[
"MIT"
] | 1
|
2020-10-14T09:37:37.000Z
|
2020-10-14T09:37:37.000Z
|
lib/datasets/voc_eval.py
|
kinredon/dafrcnn-pytorch
|
b1c45e7c3be3eb514fc0419e5e474379477c91a7
|
[
"MIT"
] | null | null | null |
lib/datasets/voc_eval.py
|
kinredon/dafrcnn-pytorch
|
b1c45e7c3be3eb514fc0419e5e474379477c91a7
|
[
"MIT"
] | 1
|
2020-10-02T08:04:12.000Z
|
2020-10-02T08:04:12.000Z
|
# --------------------------------------------------------
# Fast/er R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Bharath Hariharan
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import xml.etree.ElementTree as ET
import os
import pickle
import numpy as np
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
# city xml files don't have pose attribute
#obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text),
int(bbox.find('ymin').text),
int(bbox.find('xmax').text),
int(bbox.find('ymax').text)]
objects.append(obj_struct)
return objects
def voc_ap(rec, prec, use_07_metric=False):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:False).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=False):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default False)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, '%s_annots.pkl' % imagesetfile)
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annotations
recs = {}
for i, imagename in enumerate(imagenames):
#print('imagename', i, imagename)
#print('formated annopath', annopath.format(imagename))
recs[imagename] = parse_rec(annopath.format(imagename))
if i % 100 == 0:
print('Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames)))
# save
print('Saving cached annotations to {:s}'.format(cachefile))
with open(cachefile, 'wb') as f:
pickle.dump(recs, f)
else:
# load
with open(cachefile, 'rb') as f:
try:
recs = pickle.load(f)
except:
recs = pickle.load(f, encoding='bytes')
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
if BB.shape[0] > 0:
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin + 1., 0.)
ih = np.maximum(iymax - iymin + 1., 0.)
inters = iw * ih
# union
uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) +
(BBGT[:, 2] - BBGT[:, 0] + 1.) *
(BBGT[:, 3] - BBGT[:, 1] + 1.) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
return rec, prec, ap
| 31.817757
| 76
| 0.573653
|
b87a17f9a42aa330a49939b0701c85bae2292ba0
| 672
|
py
|
Python
|
Algo and DSA/LeetCode-Solutions-master/Python/counting-elements.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 3,269
|
2018-10-12T01:29:40.000Z
|
2022-03-31T17:58:41.000Z
|
Algo and DSA/LeetCode-Solutions-master/Python/counting-elements.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 53
|
2018-12-16T22:54:20.000Z
|
2022-02-25T08:31:20.000Z
|
Algo and DSA/LeetCode-Solutions-master/Python/counting-elements.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 1,236
|
2018-10-12T02:51:40.000Z
|
2022-03-30T13:30:37.000Z
|
# Time: O(n)
# Space: O(n)
class Solution(object):
def countElements(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
lookup = set(arr)
return sum(1 for x in arr if x+1 in lookup)
# Time: O(nlogn)
# Space: O(1)
class Solution(object):
def countElements(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
arr.sort()
result, l = 0, 1
for i in xrange(len(arr)-1):
if arr[i] == arr[i+1]:
l += 1
continue
if arr[i]+1 == arr[i+1]:
result += l
l = 1
return result
| 21
| 51
| 0.431548
|
9fda4df3e33929c8295a0ae607f0fa06302e745a
| 1,496
|
py
|
Python
|
src/oscar/apps/catalogue/managers.py
|
highbiza/django-oscar
|
7eba207a77e5dd56b04a63b9283a9d76da2f64ac
|
[
"BSD-3-Clause"
] | null | null | null |
src/oscar/apps/catalogue/managers.py
|
highbiza/django-oscar
|
7eba207a77e5dd56b04a63b9283a9d76da2f64ac
|
[
"BSD-3-Clause"
] | 2
|
2019-03-13T16:15:54.000Z
|
2019-07-04T18:53:37.000Z
|
src/oscar/apps/catalogue/managers.py
|
highbiza/django-oscar
|
7eba207a77e5dd56b04a63b9283a9d76da2f64ac
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
from django.db.models import Count
class ProductQuerySet(models.query.QuerySet):
def base_queryset(self):
"""
Applies select_related and prefetch_related for commonly related
models to save on queries
"""
return self.select_related('product_class')\
.prefetch_related('children', 'product_options', 'product_class__options', 'stockrecords', 'images') \
.annotate(num_product_class_options=Count('product_class__options'),
num_product_options=Count('product_options'))
def browsable(self):
"""
Excludes non-canonical products.
"""
return self.filter(parent=None)
class ProductManager(models.Manager):
"""
Uses ProductQuerySet and proxies its methods to allow chaining
Once Django 1.7 lands, this class can probably be removed:
https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager # noqa
"""
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def browsable(self):
return self.get_queryset().browsable()
def base_queryset(self):
return self.get_queryset().base_queryset()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
Could be deprecated after Oscar 0.7 is released
"""
def get_queryset(self):
return super().get_queryset().browsable()
| 29.333333
| 114
| 0.679813
|
edd08e17f23bdded1860982ccf912e2650e19953
| 2,023
|
py
|
Python
|
aiogram_tools/_models.py
|
LDmitriy7/aiogram-tools
|
e1d00b9707b85930522e81188875d737b7e67f02
|
[
"MIT"
] | 1
|
2021-09-05T14:46:40.000Z
|
2021-09-05T14:46:40.000Z
|
aiogram_tools/_models.py
|
LDmitriy7/aiogram-tools
|
e1d00b9707b85930522e81188875d737b7e67f02
|
[
"MIT"
] | null | null | null |
aiogram_tools/_models.py
|
LDmitriy7/aiogram-tools
|
e1d00b9707b85930522e81188875d737b7e67f02
|
[
"MIT"
] | null | null | null |
"""Contain all data models."""
from __future__ import annotations
from dataclasses import dataclass, field, fields, asdict, Field
from typing import Union, TypeVar
from bson import ObjectId
T = TypeVar('T')
# TODO: warning
EmptyList = field(default_factory=list)
EmptyDict = field(default_factory=dict)
@dataclass
class DataModel:
def to_dict(self):
return asdict(self)
@classmethod
@property
def field_names(cls) -> list[str]:
return [f.name for f in fields(cls)]
@classmethod
@property
def fields(cls) -> list[Field]:
return fields(cls)
@classmethod
def _resolve_fields(cls, obj_data: dict) -> dict:
resolved_data = {}
for key, value in obj_data.items():
if key in cls.field_names:
resolved_data[key] = value
return resolved_data
@classmethod
def from_dict(cls: type[T], obj_data: dict) -> T:
if not obj_data:
return None
resolved_data = cls._resolve_fields(obj_data)
for _field, value in resolved_data.items():
field_type = cls.__annotations__.get(_field)
factory = getattr(field_type, 'from_dict', None)
if factory:
resolved_data[_field] = factory(value)
# noinspection PyArgumentList
return cls(**resolved_data)
class MongoModelMeta(type):
def __new__(mcs, name, bases, namespace):
if '_id' not in namespace:
namespace['_id'] = field(default=None)
namespace.setdefault('__annotations__', {})['_id'] = None
cls = super().__new__(mcs, name, bases, namespace)
return cls
class MongoModel(DataModel, metaclass=MongoModelMeta):
_id: Union[str, int, ObjectId] = None
@property
def id(self) -> Union[str, int, None]:
if isinstance(self._id, ObjectId):
return str(self._id)
return self._id
@id.setter
def id(self, value: Union[str, int, None]):
setattr(self, '_id', value)
| 25.607595
| 69
| 0.627781
|
20d35bfeba8b6951ebb73234c98c0ae52be50fb0
| 2,469
|
py
|
Python
|
scripts/lanenet_data_process.py
|
mageofboy/pylot
|
c3154dc24c9429b9916274894c72ef92e03c946d
|
[
"Apache-2.0"
] | 231
|
2019-06-05T00:22:00.000Z
|
2022-03-28T06:15:00.000Z
|
scripts/lanenet_data_process.py
|
akhilkanduri/pylot_edited
|
d1295a42f0edd79670dc64053824a3e075d433e2
|
[
"Apache-2.0"
] | 108
|
2019-06-27T16:28:01.000Z
|
2022-03-28T19:14:18.000Z
|
scripts/lanenet_data_process.py
|
akhilkanduri/pylot_edited
|
d1295a42f0edd79670dc64053824a3e075d433e2
|
[
"Apache-2.0"
] | 80
|
2019-06-07T01:08:13.000Z
|
2022-03-28T01:44:42.000Z
|
from absl import app, flags
import os
from pathlib import Path
import re
FLAGS = flags.FLAGS
flags.DEFINE_string('data_path', 'data/', 'Main path where data is logged, not including town and start')
flags.DEFINE_string('output_path', 'data/lanenet/', 'Path where to place lanenet structured data')
flags.DEFINE_string('town_start', '', 'Town and start where images were collected from')
def main(argv):
data_path = os.path.join(FLAGS.data_path, FLAGS.town_start)
data_dir = os.path.abspath(data_path)
output_dir = os.path.abspath(FLAGS.output_path)
binary_img_dir = os.path.join(output_dir, "gt_binary_image")
instance_img_dir = os.path.join(output_dir, "gt_instance_image")
img_dir = os.path.join(output_dir, "gt_image")
Path(output_dir).mkdir(parents=True, exist_ok=True)
Path(binary_img_dir).mkdir(parents=True, exist_ok=True)
Path(instance_img_dir).mkdir(parents=True, exist_ok=True)
Path(img_dir).mkdir(parents=True, exist_ok=True)
group_files = {}
rename = {}
i, j, k = 0, 0, 0
for f in os.listdir(data_dir):
curr_path = os.path.join(data_dir, f)
m = re.search(r"-(\d+)\.", f)
timestamp = m.group(1)
new_f = FLAGS.town_start + "_" + timestamp + ".png" if FLAGS.town_start else timestamp +".png"
if re.search(r"binary_lane-.+\.png", f):
new_fname = os.path.join(binary_img_dir, new_f)
i+=1
elif re.search(r"lane-.+\.png", f):
new_fname = os.path.join(instance_img_dir, new_f)
j+=1
elif re.search(r"center-.+\.png", f):
new_fname = os.path.join(img_dir, new_f)
k+=1
rename[new_fname] = curr_path
if timestamp in group_files:
group_files[timestamp].append(new_fname)
else:
group_files[timestamp] = [new_fname]
print(f"{i} binary {j} lane {k} center")
with open(os.path.join(output_dir,"im_names.txt"), "a") as f:
i = 0
for v in group_files.values():
if len(v) == 3:
v_sort = sorted(v) #binary, center, lane
v_join = [v_sort[1], v_sort[0], v_sort[2]]
f.write(" ".join(v_join) +'\n')
for new_fname in v:
curr_path = rename[new_fname]
Path(curr_path).rename(new_fname)
i+=1
print(f"{i} data points ")
if __name__ == "__main__":
app.run(main)
| 41.847458
| 105
| 0.605508
|
f5d19a72fd080c3c69241a8268d902ac66e5f55d
| 4,951
|
py
|
Python
|
sdk/lusid/models/version.py
|
mneedham/lusid-sdk-python
|
edabec16b357ba3fc48a53f3faacb4f94b18843e
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/version.py
|
mneedham/lusid-sdk-python
|
edabec16b357ba3fc48a53f3faacb4f94b18843e
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/version.py
|
mneedham/lusid-sdk-python
|
edabec16b357ba3fc48a53f3faacb4f94b18843e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.2808
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Version(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'effective_from': 'datetime',
'as_at_date': 'datetime'
}
attribute_map = {
'effective_from': 'effectiveFrom',
'as_at_date': 'asAtDate'
}
required_map = {
'effective_from': 'required',
'as_at_date': 'required'
}
def __init__(self, effective_from=None, as_at_date=None): # noqa: E501
"""
Version - a model defined in OpenAPI
:param effective_from: The effective datetime at which this version became valid. Only applies when a single entity is being interacted with. (required)
:type effective_from: datetime
:param as_at_date: The asAt datetime at which the data was committed to LUSID. (required)
:type as_at_date: datetime
""" # noqa: E501
self._effective_from = None
self._as_at_date = None
self.discriminator = None
self.effective_from = effective_from
self.as_at_date = as_at_date
@property
def effective_from(self):
"""Gets the effective_from of this Version. # noqa: E501
The effective datetime at which this version became valid. Only applies when a single entity is being interacted with. # noqa: E501
:return: The effective_from of this Version. # noqa: E501
:rtype: datetime
"""
return self._effective_from
@effective_from.setter
def effective_from(self, effective_from):
"""Sets the effective_from of this Version.
The effective datetime at which this version became valid. Only applies when a single entity is being interacted with. # noqa: E501
:param effective_from: The effective_from of this Version. # noqa: E501
:type: datetime
"""
if effective_from is None:
raise ValueError("Invalid value for `effective_from`, must not be `None`") # noqa: E501
self._effective_from = effective_from
@property
def as_at_date(self):
"""Gets the as_at_date of this Version. # noqa: E501
The asAt datetime at which the data was committed to LUSID. # noqa: E501
:return: The as_at_date of this Version. # noqa: E501
:rtype: datetime
"""
return self._as_at_date
@as_at_date.setter
def as_at_date(self, as_at_date):
"""Sets the as_at_date of this Version.
The asAt datetime at which the data was committed to LUSID. # noqa: E501
:param as_at_date: The as_at_date of this Version. # noqa: E501
:type: datetime
"""
if as_at_date is None:
raise ValueError("Invalid value for `as_at_date`, must not be `None`") # noqa: E501
self._as_at_date = as_at_date
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Version):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.94375
| 161
| 0.597859
|
1df9285269ebb60de199f9ca478d61a1dfc1ff7b
| 7,013
|
py
|
Python
|
py2030/component_manager.py
|
markkorput/py2030
|
d80ac09c5e76c4ca4d8e295bd1814df7c84677ea
|
[
"MIT"
] | null | null | null |
py2030/component_manager.py
|
markkorput/py2030
|
d80ac09c5e76c4ca4d8e295bd1814df7c84677ea
|
[
"MIT"
] | 11
|
2016-04-07T08:33:02.000Z
|
2021-03-25T21:27:18.000Z
|
py2030/component_manager.py
|
markkorput/py2030
|
d80ac09c5e76c4ca4d8e295bd1814df7c84677ea
|
[
"MIT"
] | null | null | null |
import copy, sys
from datetime import datetime
import logging
logging.basicConfig(level=logging.WARNING)
from .event_manager import EventManager
from .utils.config_file import ConfigFile
class Context:
def __init__(self, event_manager):
self.event_manager = event_manager
class ComponentManager:
def __init__(self, options = {}):
# config
self.options = options
logging.basicConfig()
# logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
self.logger = logging.getLogger(__name__)
if 'verbose' in self.options and self.options['verbose']:
self.logger.setLevel(logging.DEBUG)
# attributes
self.profile = self.options['profile'] if 'profile' in self.options else 'default'
self.config_file = ConfigFile(self.options['config_file'] if 'config_file' in self.options and self.options['config_file'] else 'config.yml')
self.components = []
self.update_components = []
self.destroy_components = []
self.context = Context(EventManager())
self._profile_data = None
self.gotNextUpdateOps = False
self.running = False
self.restart = False
self.shutdown_message = "py2030, over and out.\n"
def __del__(self):
self.destroy()
def setup(self):
self.logger.debug('config file: {0}'.format(self.config_file.path))
self.logger.debug('profile: {0}'.format(self.profile))
# load from option
if self._profile_data == None and 'profile_data' in self.options:
self._profile_data = self.options['profile_data']
# load from config file
if self._profile_data == None:
# read config file content
self.config_file.load()
self._profile_data = self.config_file.get_value('py2030.profiles.'+self.profile, default_value={})
if self._profile_data == {}:
self.logger.warning("No profile data found for profile: {0}\nTrying 'default'".format(self.profile))
self.profile = 'default'
self._profile_data = self.config_file.get_value('py2030.profiles.'+self.profile, default_value={})
# load components based on profile configuration
self._load_components(self._profile_data)
if 'reload_event' in self._profile_data:
self.context.event_manager.get(self._profile_data['reload_event']).subscribe(self._onReloadEvent)
if 'stop_event' in self._profile_data:
self.context.event_manager.get(self._profile_data['stop_event']).subscribe(self._onStopEvent)
if 'restart_event' in self._profile_data:
self.context.event_manager.get(self._profile_data['restart_event']).subscribe(self._onRestartEvent)
if len(self.components) > 0:
self.running = True
else:
self.logger.warning('No components loaded. Abort.')
if 'start_event' in self._profile_data:
self.logger.debug('triggering start_event: ' + str(self._profile_data['start_event']))
self.context.event_manager.get(self._profile_data['start_event']).fire()
if 'shutdown_message' in self._profile_data:
self.shutdown_message = self._profile_data['shutdown_message']
def _onStopEvent(self):
self.logger.debug('stop_event triggered')
self.running = False
def _onRestartEvent(self):
self.logger.debug('restart_event triggered')
self.restart = True
self.running = False
def _onReloadEvent(self):
self.logger.debug('reload_event triggered')
self.nextUpdate(self._reload)
def _reload(self):
self.logger.info('-- Reloading --')
self.destroy()
self.config_file.load({'force': True})
self._profile_data = self.config_file.get_value('py2030.profiles.'+self.profile, default_value={})
self.setup()
def destroy(self):
if self._profile_data and 'reload_event' in self._profile_data:
self.context.event_manager.get(self._profile_data['reload_event']).unsubscribe(self._onReloadEvent)
for comp in self.destroy_components:
comp.destroy()
self.components = []
self.update_components = []
self.destroy_components = []
self._profile_data = None
self.running = False
def update(self):
for comp in self.update_components:
comp.update()
if self.gotNextUpdateOps:
for op in self._op_queue:
op()
del self._op_queue
self.gotNextUpdateOps = False
def _found_component_classes(self):
klasses = []
import py2030.components as comp_modules
from py2030.base_component import BaseComponent
for module_name in comp_modules.__all__:
# ignore the __init__.py file
if module_name == '__init__':
continue
# import file
mod = __import__('py2030.components.'+module_name, fromlist=['py2030.components'])
# find component class inside the module
for klass in mod.__dict__.values():
# most will have BaseComponent imported, ignore that one
if klass == BaseComponent:
continue
# only grab the classes that have config_name and create_components attributes
if hasattr(klass, 'config_name') and hasattr(klass, 'create_components'):
klasses.append(klass)
del comp_modules
del BaseComponent
return klasses
def _load_components(self, profile_data = None):
klasses = self._found_component_classes()
# loop over all configurations in our profile
for config_name, config_data in profile_data.items():
# let all classes that say they are responsible for this piece of configuration generate component(s)
for klass in filter(lambda cls: cls.config_name == config_name, klasses):
comps = klass.create_components(config_data, self.context)
if not hasattr(comps, '__iter__'):
self.logger.warning("Module with component_config_name {0} returned non-iterable components list".format(config_name))
continue
for comp in comps:
self._add_component(comp)
return
def _add_component(self, comp):
if hasattr(comp, 'update') and type(comp.update).__name__ == 'instancemethod':
self.update_components.append(comp)
if hasattr(comp, 'destroy') and type(comp.destroy).__name__ == 'instancemethod':
self.destroy_components.append(comp)
self.components.append(comp)
# operation
def nextUpdate(self, func):
if not hasattr(self, '_op_queue'):
self._op_queue = []
self._op_queue.append(func)
self.gotNextUpdateOps = True
| 37.10582
| 149
| 0.64024
|
44db522ff3a21a4b58c1e95ddc4683fa3e8ebbda
| 16,044
|
py
|
Python
|
tests/columns/test_datetime.py
|
revan57/clickhouse-driver
|
0477f17ac4edf5404d85cfdc8a4901b62c0f22c0
|
[
"MIT"
] | 1
|
2019-09-18T05:33:11.000Z
|
2019-09-18T05:33:11.000Z
|
tests/columns/test_datetime.py
|
revan57/clickhouse-driver
|
0477f17ac4edf5404d85cfdc8a4901b62c0f22c0
|
[
"MIT"
] | null | null | null |
tests/columns/test_datetime.py
|
revan57/clickhouse-driver
|
0477f17ac4edf5404d85cfdc8a4901b62c0f22c0
|
[
"MIT"
] | null | null | null |
from contextlib import contextmanager
from datetime import date, datetime
import os
from time import tzset
from mock import patch
from pytz import timezone, utc
from tests.testcase import BaseTestCase
from tests.util import require_server_version
class DateTimeTestCase(BaseTestCase):
def test_simple(self):
with self.create_table('a Date, b DateTime'):
data = [(date(2012, 10, 25), datetime(2012, 10, 25, 14, 7, 19))]
self.client.execute(
'INSERT INTO test (a, b) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(inserted, '2012-10-25\t2012-10-25 14:07:19\n')
inserted = self.client.execute(query)
self.assertEqual(inserted, data)
def test_nullable_date(self):
with self.create_table('a Nullable(Date)'):
data = [
(None, ), (date(2012, 10, 25), ),
(None, ), (date(2017, 6, 23), )
]
self.client.execute(
'INSERT INTO test (a) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted, '\\N\n2012-10-25\n\\N\n2017-06-23\n'
)
inserted = self.client.execute(query)
self.assertEqual(inserted, data)
def test_nullable_datetime(self):
with self.create_table('a Nullable(DateTime)'):
data = [
(None, ), (datetime(2012, 10, 25, 14, 7, 19), ),
(None, ), (datetime(2017, 6, 23, 19, 10, 15), )
]
self.client.execute(
'INSERT INTO test (a) VALUES', data
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted,
'\\N\n2012-10-25 14:07:19\n\\N\n2017-06-23 19:10:15\n'
)
inserted = self.client.execute(query)
self.assertEqual(inserted, data)
class DateTimeTimezonesTestCase(BaseTestCase):
@contextmanager
def patch_env_tz(self, tz_name):
# Although in many cases, changing the TZ environment variable may
# affect the output of functions like localtime() without calling
# tzset(), this behavior should not be relied on.
# https://docs.python.org/3/library/time.html#time.tzset
with patch.dict(os.environ, {'TZ': tz_name}):
tzset()
yield
tzset()
# Asia/Kamchatka = UTC+12
# Asia/Novosibirsk = UTC+7
# Europe/Moscow = UTC+3
# 1500010800 second since epoch in Europe/Moscow.
# 1500000000 second since epoch in UTC.
dt = datetime(2017, 7, 14, 5, 40)
dt_tz = timezone('Asia/Kamchatka').localize(dt)
col_tz_name = 'Asia/Novosibirsk'
col_tz = timezone(col_tz_name)
# INSERTs and SELECTs must be the same as clickhouse-client's
# if column has no timezone.
def test_use_server_timezone(self):
"""
Insert datetime with timezone UTC
into column with no timezone
using server's timezone (Europe/Moscow)
"""
# Determine server timezone and calculate expected timestamp.
server_tz_name = self.client.execute('SELECT timezone()')[0][0]
offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds()
timestamp = 1500010800 - int(offset)
with self.patch_env_tz('Asia/Novosibirsk'):
with self.create_table('a DateTime'):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt, )]
)
self.emit_cli(
"INSERT INTO test (a) VALUES ('2017-07-14 05:40:00')"
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query)
self.assertEqual(inserted, '{ts}\n{ts}\n'.format(ts=timestamp))
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted,
'2017-07-14 05:40:00\n2017-07-14 05:40:00\n'
)
inserted = self.client.execute(query)
self.assertEqual(inserted, [(self.dt, ), (self.dt, )])
def test_use_client_timezone(self):
"""
Insert datetime with timezone UTC
into column with no timezone
using client's timezone Asia/Novosibirsk
"""
settings = {'use_client_time_zone': True}
with self.patch_env_tz('Asia/Novosibirsk'):
with self.create_table('a DateTime'):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt, )],
settings=settings
)
self.emit_cli(
"INSERT INTO test (a) VALUES ('2017-07-14 05:40:00')",
use_client_time_zone=1
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
# 1499985600 = 1500000000 - 4 * 3600
self.assertEqual(inserted, '1499985600\n1499985600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
self.assertEqual(
inserted,
'2017-07-14 05:40:00\n2017-07-14 05:40:00\n'
)
inserted = self.client.execute(query, settings=settings)
self.assertEqual(inserted, [(self.dt, ), (self.dt, )])
def test_insert_integers(self):
settings = {'use_client_time_zone': True}
with self.patch_env_tz('Europe/Moscow'):
with self.create_table('a DateTime'):
self.client.execute(
'INSERT INTO test (a) VALUES', [(1530211034, )],
settings=settings
)
query = 'SELECT toUInt32(a), a FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
self.assertEqual(inserted, '1530211034\t2018-06-28 21:37:14\n')
def test_insert_integer_bounds(self):
with self.create_table('a DateTime'):
self.client.execute(
'INSERT INTO test (a) VALUES',
[(0, ), (1, ), (1500000000, ), (2**32-1, )]
)
query = 'SELECT toUInt32(a) FROM test ORDER BY a'
inserted = self.emit_cli(query)
self.assertEqual(inserted, '0\n1\n1500000000\n4294967295\n')
@require_server_version(1, 1, 54337)
def test_datetime_with_timezone_use_server_timezone(self):
"""
Insert datetime with timezone Asia/Kamchatka
into column with no timezone
using server's timezone (Europe/Moscow)
"""
server_tz_name = self.client.execute('SELECT timezone()')[0][0]
offset = timezone(server_tz_name).utcoffset(self.dt)
with self.patch_env_tz('Asia/Novosibirsk'):
with self.create_table('a DateTime'):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt_tz, )]
)
self.emit_cli(
"INSERT INTO test (a) VALUES "
"(toDateTime('2017-07-14 05:40:00', 'Asia/Kamchatka'))",
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query)
# 1499967600 = 1500000000 - 12 * 3600
self.assertEqual(inserted, '1499967600\n1499967600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
dt = (self.dt_tz.astimezone(utc) + offset).replace(tzinfo=None)
self.assertEqual(inserted, '{dt}\n{dt}\n'.format(dt=dt))
inserted = self.client.execute(query)
self.assertEqual(inserted, [(dt, ), (dt, )])
@require_server_version(1, 1, 54337)
def test_datetime_with_timezone_use_client_timezone(self):
"""
Insert datetime with timezone Asia/Kamchatka
into column with no timezone
using client's timezone Asia/Novosibirsk
"""
settings = {'use_client_time_zone': True}
with self.patch_env_tz('Asia/Novosibirsk'):
with self.create_table('a DateTime'):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt_tz, )],
settings=settings
)
self.emit_cli(
"INSERT INTO test (a) VALUES "
"(toDateTime('2017-07-14 05:40:00', 'Asia/Kamchatka'))",
use_client_time_zone=1
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
# 1499967600 = 1500000000 - 12 * 3600
self.assertEqual(inserted, '1499967600\n1499967600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
# 2017-07-14 00:40:00 = 2017-07-14 05:40:00 - 05:00:00
# (Kamchatka - Novosibirsk)
self.assertEqual(
inserted,
'2017-07-14 00:40:00\n2017-07-14 00:40:00\n'
)
inserted = self.client.execute(query, settings=settings)
dt = datetime(2017, 7, 14, 0, 40)
self.assertEqual(inserted, [(dt, ), (dt, )])
@require_server_version(1, 1, 54337)
def test_column_use_server_timezone(self):
"""
Insert datetime with no timezone
into column with timezone Asia/Novosibirsk
using server's timezone (Europe/Moscow)
"""
with self.patch_env_tz('Europe/Moscow'):
table_col = "a DateTime('{}')".format(self.col_tz_name)
with self.create_table(table_col):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt, )]
)
self.emit_cli(
"INSERT INTO test (a) VALUES ('2017-07-14 05:40:00')"
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query)
# 1499985600 = 1500000000 - 4 * 3600
self.assertEqual(inserted, '1499985600\n1499985600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted,
'2017-07-14 05:40:00\n2017-07-14 05:40:00\n'
)
inserted = self.client.execute(query)
self.assertEqual(inserted, [
(self.col_tz.localize(self.dt), ),
(self.col_tz.localize(self.dt), )
])
@require_server_version(1, 1, 54337)
def test_column_use_client_timezone(self):
"""
Insert datetime with no timezone
into column with timezone Asia/Novosibirsk
using client's timezone Europe/Moscow
"""
settings = {'use_client_time_zone': True}
with self.patch_env_tz('Europe/Moscow'):
table_col = "a DateTime('{}')".format(self.col_tz_name)
with self.create_table(table_col):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt, )],
settings=settings
)
self.emit_cli(
"INSERT INTO test (a) VALUES ('2017-07-14 05:40:00')",
use_client_time_zone=1
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
# 1499985600 = 1500000000 - 4 * 3600
self.assertEqual(inserted, '1499985600\n1499985600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
self.assertEqual(
inserted,
'2017-07-14 05:40:00\n2017-07-14 05:40:00\n'
)
inserted = self.client.execute(query, settings=settings)
self.assertEqual(inserted, [
(self.col_tz.localize(self.dt), ),
(self.col_tz.localize(self.dt), )
])
@require_server_version(1, 1, 54337)
def test_datetime_with_timezone_column_use_server_timezone(self):
"""
Insert datetime with timezone Asia/Kamchatka
into column with timezone Asia/Novosibirsk
using server's timezone (Europe/Moscow)
"""
with self.patch_env_tz('Europe/Moscow'):
table_col = "a DateTime('{}')".format(self.col_tz_name)
with self.create_table(table_col):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt_tz, )]
)
self.emit_cli(
"INSERT INTO test (a) VALUES "
"(toDateTime('2017-07-14 05:40:00', 'Asia/Kamchatka'))",
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query)
# 1499967600 = 1500000000 - 12 * 3600
self.assertEqual(inserted, '1499967600\n1499967600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
# 2017-07-14 00:40:00 = 2017-07-14 05:40:00 - 05:00:00
# (Kamchatka - Novosibirsk)
self.assertEqual(
inserted,
'2017-07-14 00:40:00\n2017-07-14 00:40:00\n'
)
inserted = self.client.execute(query)
dt = datetime(2017, 7, 14, 0, 40)
self.assertEqual(inserted, [
(self.col_tz.localize(dt), ),
(self.col_tz.localize(dt), )
])
@require_server_version(1, 1, 54337)
def test_datetime_with_timezone_column_use_client_timezone(self):
"""
Insert datetime with timezone Asia/Kamchatka
into column with timezone Asia/Novosibirsk
using client's timezone (Europe/Moscow)
"""
settings = {'use_client_time_zone': True}
with self.patch_env_tz('Europe/Moscow'):
table_col = "a DateTime('{}')".format(self.col_tz_name)
with self.create_table(table_col):
self.client.execute(
'INSERT INTO test (a) VALUES', [(self.dt_tz, )],
settings=settings
)
self.emit_cli(
"INSERT INTO test (a) VALUES "
"(toDateTime('2017-07-14 05:40:00', 'Asia/Kamchatka'))",
use_client_time_zone=1
)
query = 'SELECT toInt32(a) FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
# 1499967600 = 1500000000 - 12 * 3600
self.assertEqual(inserted, '1499967600\n1499967600\n')
query = 'SELECT * FROM test'
inserted = self.emit_cli(query, use_client_time_zone=1)
# 2017-07-14 00:40:00 = 2017-07-14 05:40:00 - 05:00:00
# (Kamchatka - Novosibirsk)
self.assertEqual(
inserted,
'2017-07-14 00:40:00\n2017-07-14 00:40:00\n'
)
inserted = self.client.execute(query, settings=settings)
dt = datetime(2017, 7, 14, 0, 40)
self.assertEqual(inserted, [
(self.col_tz.localize(dt), ),
(self.col_tz.localize(dt), )
])
| 37.839623
| 79
| 0.528546
|
9baeb4927cf9158f6a2f2d454ff01fec9822a904
| 2,529
|
py
|
Python
|
Meraki_practice/tree.py
|
agancsos/scala
|
926537f60679f23187fa05c6d578261420698802
|
[
"MIT"
] | null | null | null |
Meraki_practice/tree.py
|
agancsos/scala
|
926537f60679f23187fa05c6d578261420698802
|
[
"MIT"
] | null | null | null |
Meraki_practice/tree.py
|
agancsos/scala
|
926537f60679f23187fa05c6d578261420698802
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
###############################################################################
# Name : tree.py #
# Author : Abel Gancsos #
# Version : v. 1.0.0.0 #
# Description : Practice working with binary search trees. #
###############################################################################
import os, sys;
class Node:
left=None;right=None;data=None;
def __init__(self, left=None, right=None, data=None):
self.left = left; self.right = right; self.data = data;
pass;
pass;
class Tree:
root=None;
def __init__(self, children = []):
self.root = Node(Node(), Node(), None);
for child in children: self.insert_child(self.root, int(child));
pass;
def insert_child(self, node=None, child=None):
inserted = False;
if node == None: node = self.root;
if (node.data == None): node.data = child; return True;
if (node.left == None and node.data > child): node.left = Node(Node(), Node(), child); return True;
if (node.right == None and node.data < child): node.right = Node(Node(), Node(), child); return True;
if not inserted and node.left != None: inserted = self.insert_child(node.left, child);
if not inserted and node.right != None: inserted = self.insert_child(node.right, child);
return inserted;
pass;
def search(self, value, node = None):
if (node == None): node = self.root;
if (node.data == value): return node;
if (node.left != None and node.left.data == value): return node.left;
if (node.right != None and node.right.data == value): return node.right;
if node.left != None and self.search(value, node.left) != None: return self.search(value, node.left);
if node.right != None and self.search(value, node.right) != None: return self.search(value, node.right);
return None;
pass;
class Driver:
tree=None;search=None;
def __init__(self, params=dict()):
self.search = int(params["-s"]) if "-s" in params.keys() else 2;
self.tree = Tree(params["--children"].split(",")) if "--children" in params.keys() else Tree([10, 4, 3, 2, 1]);
pass;
def invoke(self):
node = self.tree.search(self.search);
if (node == None): print("No node found...");
else: print(node.data);
pass;
if __name__ == "__main__":
params = dict();
for i in range(0, len(sys.argv) - 1): params[sys.argv[i]] = sys.argv[i + 1];
session = Driver(params);
session.invoke();
pass;
| 40.790323
| 113
| 0.569
|
9d405e543d2d770c64a1d19305a72c5ce7834de5
| 604
|
py
|
Python
|
robot/reconnect.py
|
ksurct-officers/Mercury2018-19
|
ee90803c1b14727663c65ebc396cdb92c2b79667
|
[
"Apache-2.0"
] | null | null | null |
robot/reconnect.py
|
ksurct-officers/Mercury2018-19
|
ee90803c1b14727663c65ebc396cdb92c2b79667
|
[
"Apache-2.0"
] | null | null | null |
robot/reconnect.py
|
ksurct-officers/Mercury2018-19
|
ee90803c1b14727663c65ebc396cdb92c2b79667
|
[
"Apache-2.0"
] | null | null | null |
import os, requests
from time import sleep
# This file automatically reconnects the wifi on the pi if it drops, used for LOS tests
# DO NOT RUN THIS FILE WITHOUT A & AT THE END OF THE FILE BECAUSE THERE IS AN INFINITE LOOP SO THE REST OF THE SYSTEM WON'T START I ALREADY MADE THIS MISTAKE PLEASE DON'T DO IT AGAIN
while True:
try:
r = requests.get('http://google.com')
if (r.status_code == 200):
pass
else:
os.system('sudo sh reconnect.sh')
sleep(5)
except (requests.exceptions.ConnectionError):
os.system('sudo sh reconnect.sh')
| 35.529412
| 182
| 0.663907
|
17502dbaa0904f524ed9ad2a666ae56e768ad2fd
| 3,678
|
py
|
Python
|
python/tvm/relay/frontend/nnvm_common.py
|
TharinduRusira/tvm
|
e77205532a4fc7e3de8f12aed1b4129813b43950
|
[
"Apache-2.0"
] | 2
|
2018-12-26T13:44:45.000Z
|
2018-12-26T13:44:48.000Z
|
python/tvm/relay/frontend/nnvm_common.py
|
ganzhiliang/tvm
|
b076cad542524cb3744149d953c341b5815f6474
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relay/frontend/nnvm_common.py
|
ganzhiliang/tvm
|
b076cad542524cb3744149d953c341b5815f6474
|
[
"Apache-2.0"
] | 1
|
2018-12-21T14:03:56.000Z
|
2018-12-21T14:03:56.000Z
|
# pylint: disable=invalid-name, import-self, len-as-condition
"""Utility functions common to NNVM and MxNet conversion."""
from __future__ import absolute_import as _abs
from .. import expr as _expr
from .. import op as _op
def _get_relay_op(op_name):
op = _op
for path in op_name.split("."):
op = getattr(op, path)
if not op:
raise RuntimeError("Unable to map op_name {} to relay".format(op_name))
return op
def _warn_not_used(attr, op='nnvm'):
import warnings
err = "{} is ignored in {}.".format(attr, op)
warnings.warn(err)
def _rename(new_op):
if isinstance(new_op, str):
new_op = _get_relay_op(new_op)
# attrs are ignored.
def impl(inputs, _, _dtype='float32'):
return new_op(*inputs)
return impl
def _reshape(inputs, attrs):
if attrs.get_bool("reverse", False):
raise RuntimeError("reshape do not support option reverse")
shape = attrs.get_int_tuple("shape")
return _op.reshape(inputs[0], newshape=shape)
def _init_op(new_op):
"""Init ops like zeros/ones"""
def _impl(inputs, attrs):
assert len(inputs) == 0
shape = attrs.get_int_tuple("shape")
dtype = attrs.get_str("dtype", "float32")
return new_op(shape=shape, dtype=dtype)
return _impl
def _softmax_op(new_op):
"""softmax/log_softmax"""
def _impl(inputs, attrs):
assert len(inputs) == 1
axis = attrs.get_int("axis", -1)
return new_op(inputs[0], axis=axis)
return _impl
def _reduce(new_op):
"""Reduction ops like sum/min/max"""
def _impl(inputs, attrs):
assert len(inputs) == 1
axis = attrs.get_int_tuple("axis", [])
keepdims = attrs.get_bool("keepdims", False)
# use None for reduce over all axis.
axis = None if len(axis) == 0 else axis
return new_op(inputs[0], axis=axis, keepdims=keepdims)
return _impl
def _arg_reduce(new_op):
"""Arg Reduction ops like argmin/argmax"""
def _impl(inputs, attrs):
assert len(inputs) == 1
axis = attrs.get_int("axis", None)
keepdims = attrs.get_bool("keepdims", False)
res = new_op(inputs[0], axis=[axis], keepdims=keepdims)
# cast to dtype.
res = res.astype("float32")
return res
return _impl
def _cast(inputs, attrs):
"""Type cast"""
dtype = attrs.get_str("dtype")
return inputs[0].astype(dtype=dtype)
def _clip(inputs, attrs):
a_min = attrs.get_float("a_min")
a_max = attrs.get_float("a_max")
return _op.clip(inputs[0], a_min=a_min, a_max=a_max)
def _transpose(inputs, attrs):
axes = attrs.get_int_tuple("axes", None)
# translate default case
axes = None if len(axes) == 0 else axes
return _op.transpose(inputs[0], axes=axes)
def _upsampling(inputs, attrs):
scale = attrs.get_int("scale")
return _op.nn.upsampling(inputs[0], scale=scale)
def _elemwise_sum(inputs, _):
assert len(inputs) > 0
res = inputs[0]
for x in inputs[1:]:
res = _op.add(res, x)
return res
def _binop_scalar(new_op):
def _impl(inputs, attrs):
assert len(inputs) == 1
scalar = attrs.get_float("scalar")
# Note: binary scalar only works for float op for now
scalar = _expr.const(scalar, dtype="float32")
return new_op(inputs[0], scalar)
return _impl
def _rbinop_scalar(new_op):
def _impl(inputs, attrs):
assert len(inputs) == 1
scalar = attrs.get_float("scalar")
# Note: binary scalar only works for float op for now
scalar = _expr.const(scalar, dtype="float32")
return new_op(scalar, inputs[0])
return _impl
| 27.654135
| 79
| 0.636215
|
aeb8e6d23ab270ea997fe9ada22c6c75cc15b417
| 1,888
|
py
|
Python
|
nipype/interfaces/minc/tests/test_auto_VolSymm.py
|
dPys/nipype
|
75030b29297808e7c9a9e91b411b685154dff60b
|
[
"Apache-2.0"
] | 1
|
2019-03-25T14:11:18.000Z
|
2019-03-25T14:11:18.000Z
|
nipype/interfaces/minc/tests/test_auto_VolSymm.py
|
dPys/nipype
|
75030b29297808e7c9a9e91b411b685154dff60b
|
[
"Apache-2.0"
] | 1
|
2017-01-05T01:24:33.000Z
|
2017-01-05T01:24:33.000Z
|
nipype/interfaces/minc/tests/test_auto_VolSymm.py
|
wtriplett/nipype
|
388f140fceaf55438a987e9cdfa2a8e995428afd
|
[
"Apache-2.0"
] | 1
|
2020-12-16T16:36:48.000Z
|
2020-12-16T16:36:48.000Z
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..minc import VolSymm
def test_VolSymm_inputs():
input_map = dict(
args=dict(argstr="%s",),
clobber=dict(argstr="-clobber", usedefault=True,),
config_file=dict(argstr="-config_file %s", extensions=None,),
environ=dict(nohash=True, usedefault=True,),
fit_linear=dict(argstr="-linear",),
fit_nonlinear=dict(argstr="-nonlinear",),
input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,),
input_grid_files=dict(),
nofit=dict(argstr="-nofit",),
output_file=dict(
argstr="%s",
extensions=None,
genfile=True,
hash_files=False,
name_source=["input_file"],
name_template="%s_vol_symm.mnc",
position=-1,
),
trans_file=dict(
argstr="%s",
extensions=None,
genfile=True,
hash_files=False,
keep_extension=False,
name_source=["input_file"],
name_template="%s_vol_symm.xfm",
position=-2,
),
verbose=dict(argstr="-verbose",),
x=dict(argstr="-x",),
y=dict(argstr="-y",),
z=dict(argstr="-z",),
)
inputs = VolSymm.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_VolSymm_outputs():
output_map = dict(
output_file=dict(extensions=None,),
output_grid=dict(extensions=None,),
trans_file=dict(extensions=None,),
)
outputs = VolSymm.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 32.551724
| 84
| 0.577331
|
ffcb2bf3362c6ab1fc4dbe65a12564197c41a098
| 25
|
py
|
Python
|
deep_networks/data/dicom/__init__.py
|
2PacIsAlive/deepnet.works
|
6b91a6cbac39ea23df1fb81ec348168cbf9b5d4d
|
[
"MIT"
] | null | null | null |
deep_networks/data/dicom/__init__.py
|
2PacIsAlive/deepnet.works
|
6b91a6cbac39ea23df1fb81ec348168cbf9b5d4d
|
[
"MIT"
] | null | null | null |
deep_networks/data/dicom/__init__.py
|
2PacIsAlive/deepnet.works
|
6b91a6cbac39ea23df1fb81ec348168cbf9b5d4d
|
[
"MIT"
] | 2
|
2017-02-20T09:49:07.000Z
|
2017-02-21T03:12:47.000Z
|
import load
import plot
| 6.25
| 11
| 0.8
|
00d539db4ce46243f824df677919ec9501c4d618
| 2,376
|
py
|
Python
|
qiskit/optimization/applications/ising/max_cut.py
|
johannes-weidenfeller/qiskit-aqua
|
7775410f4dabf09ec9f933bf411ead434550accf
|
[
"Apache-2.0"
] | 15
|
2020-06-29T08:33:39.000Z
|
2022-02-12T00:28:51.000Z
|
qiskit/optimization/applications/ising/max_cut.py
|
johannes-weidenfeller/qiskit-aqua
|
7775410f4dabf09ec9f933bf411ead434550accf
|
[
"Apache-2.0"
] | 4
|
2020-11-27T09:34:13.000Z
|
2021-04-30T21:13:41.000Z
|
qiskit/optimization/applications/ising/max_cut.py
|
johannes-weidenfeller/qiskit-aqua
|
7775410f4dabf09ec9f933bf411ead434550accf
|
[
"Apache-2.0"
] | 11
|
2020-06-29T08:40:24.000Z
|
2022-02-24T17:39:16.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Convert max-cut instances into Pauli list
Deal with Gset format. See https://web.stanford.edu/~yyye/yyye/Gset/
Design the max-cut object `w` as a two-dimensional np.array
e.g., w[i, j] = x means that the weight of a edge between i and j is x
Note that the weights are symmetric, i.e., w[j, i] = x always holds.
"""
import logging
import numpy as np
from qiskit.quantum_info import Pauli
from qiskit.aqua.operators import WeightedPauliOperator
logger = logging.getLogger(__name__)
def get_operator(weight_matrix):
"""Generate Hamiltonian for the max-cut problem of a graph.
Args:
weight_matrix (numpy.ndarray) : adjacency matrix.
Returns:
WeightedPauliOperator: operator for the Hamiltonian
float: a constant shift for the obj function.
"""
num_nodes = weight_matrix.shape[0]
pauli_list = []
shift = 0
for i in range(num_nodes):
for j in range(i):
if weight_matrix[i, j] != 0:
x_p = np.zeros(num_nodes, dtype=np.bool)
z_p = np.zeros(num_nodes, dtype=np.bool)
z_p[i] = True
z_p[j] = True
pauli_list.append([0.5 * weight_matrix[i, j], Pauli(z_p, x_p)])
shift -= 0.5 * weight_matrix[i, j]
return WeightedPauliOperator(paulis=pauli_list), shift
def max_cut_value(x, w):
"""Compute the value of a cut.
Args:
x (numpy.ndarray): binary string as numpy array.
w (numpy.ndarray): adjacency matrix.
Returns:
float: value of the cut.
"""
# pylint: disable=invalid-name
X = np.outer(x, (1 - x))
return np.sum(w * X)
def get_graph_solution(x):
"""Get graph solution from binary string.
Args:
x (numpy.ndarray) : binary string as numpy array.
Returns:
numpy.ndarray: graph solution as binary numpy array.
"""
return 1 - x
| 28.97561
| 79
| 0.655724
|
49f3e019d845f40a52853e02e23dbfd52329de04
| 4,015
|
py
|
Python
|
src/mega/crypto.py
|
YoilyL/mega.py
|
8b795082e229c936ea900a5f3a26dac815a06a54
|
[
"Apache-2.0"
] | 3
|
2020-06-03T17:03:37.000Z
|
2021-01-04T14:29:40.000Z
|
src/mega/crypto.py
|
YoilyL/mega.py
|
8b795082e229c936ea900a5f3a26dac815a06a54
|
[
"Apache-2.0"
] | null | null | null |
src/mega/crypto.py
|
YoilyL/mega.py
|
8b795082e229c936ea900a5f3a26dac815a06a54
|
[
"Apache-2.0"
] | null | null | null |
from Crypto.Cipher import AES
import json
import base64
import struct
import binascii
import random
import sys
# Python3 compatibility
if sys.version_info < (3, ):
def makebyte(x):
return x
def makestring(x):
return x
else:
import codecs
def makebyte(x):
return codecs.latin_1_encode(x)[0]
def makestring(x):
return codecs.latin_1_decode(x)[0]
def aes_cbc_encrypt(data, key):
aes_cipher = AES.new(key, AES.MODE_CBC, makebyte('\0' * 16))
return aes_cipher.encrypt(data)
def aes_cbc_decrypt(data, key):
aes_cipher = AES.new(key, AES.MODE_CBC, makebyte('\0' * 16))
return aes_cipher.decrypt(data)
def aes_cbc_encrypt_a32(data, key):
return str_to_a32(aes_cbc_encrypt(a32_to_str(data), a32_to_str(key)))
def aes_cbc_decrypt_a32(data, key):
return str_to_a32(aes_cbc_decrypt(a32_to_str(data), a32_to_str(key)))
def stringhash(str, aeskey):
s32 = str_to_a32(str)
h32 = [0, 0, 0, 0]
for i in range(len(s32)):
h32[i % 4] ^= s32[i]
for r in range(0x4000):
h32 = aes_cbc_encrypt_a32(h32, aeskey)
return a32_to_base64((h32[0], h32[2]))
def prepare_key(arr):
pkey = [0x93C467E3, 0x7DB0C7A4, 0xD1BE3F81, 0x0152CB56]
for r in range(0x10000):
for j in range(0, len(arr), 4):
key = [0, 0, 0, 0]
for i in range(4):
if i + j < len(arr):
key[i] = arr[i + j]
pkey = aes_cbc_encrypt_a32(pkey, key)
return pkey
def encrypt_key(a, key):
return sum(
(aes_cbc_encrypt_a32(a[i:i + 4], key) for i in range(0, len(a), 4)), ()
)
def decrypt_key(a, key):
return sum(
(aes_cbc_decrypt_a32(a[i:i + 4], key) for i in range(0, len(a), 4)), ()
)
def encrypt_attr(attr, key):
attr = makebyte('MEGA' + json.dumps(attr))
if len(attr) % 16:
attr += b'\0' * (16 - len(attr) % 16)
return aes_cbc_encrypt(attr, a32_to_str(key))
def decrypt_attr(attr, key):
attr = aes_cbc_decrypt(attr, a32_to_str(key))
attr = makestring(attr)
attr = attr.rstrip('\0')
return json.loads(attr[4:]) if attr[:6] == 'MEGA{"' else False
def a32_to_str(a):
return struct.pack('>%dI' % len(a), *a)
def str_to_a32(b):
if isinstance(b, str):
b = makebyte(b)
if len(b) % 4:
# pad to multiple of 4
b += b'\0' * (4 - len(b) % 4)
return struct.unpack('>%dI' % (len(b) / 4), b)
def mpi_to_int(s):
"""
A Multi-precision integer is encoded as a series of bytes in big-endian
order. The first two bytes are a header which tell the number of bits in
the integer. The rest of the bytes are the integer.
"""
return int(binascii.hexlify(s[2:]), 16)
def extended_gcd(a, b):
if a == 0:
return (b, 0, 1)
else:
g, y, x = extended_gcd(b % a, a)
return (g, x - (b // a) * y, y)
def modular_inverse(a, m):
g, x, y = extended_gcd(a, m)
if g != 1:
raise Exception('modular inverse does not exist')
else:
return x % m
def base64_url_decode(data):
data += '=='[(2 - len(data) * 3) % 4:]
for search, replace in (('-', '+'), ('_', '/'), (',', '')):
data = data.replace(search, replace)
return base64.b64decode(data)
def base64_to_a32(s):
return str_to_a32(base64_url_decode(s))
def base64_url_encode(data):
data = base64.b64encode(data)
data = makestring(data)
for search, replace in (('+', '-'), ('/', '_'), ('=', '')):
data = data.replace(search, replace)
return data
def a32_to_base64(a):
return base64_url_encode(a32_to_str(a))
def get_chunks(size):
p = 0
s = 0x20000
while p + s < size:
yield (p, s)
p += s
if s < 0x100000:
s += 0x20000
yield (p, size - p)
def make_id(length):
text = ''
possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
for i in range(length):
text += random.choice(possible)
return text
| 23.479532
| 79
| 0.593026
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.