hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e2c0eb8c6d9e98c71662541c6772d31530d3d78
| 11,259
|
py
|
Python
|
tests/gpflow/test_mean_functions.py
|
joelberkeley/GPflow
|
78230b98f57c64b5ee2932ea0d2752eb9ff102ce
|
[
"Apache-2.0"
] | null | null | null |
tests/gpflow/test_mean_functions.py
|
joelberkeley/GPflow
|
78230b98f57c64b5ee2932ea0d2752eb9ff102ce
|
[
"Apache-2.0"
] | null | null | null |
tests/gpflow/test_mean_functions.py
|
joelberkeley/GPflow
|
78230b98f57c64b5ee2932ea0d2752eb9ff102ce
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Sequence, Type
import numpy as np
import pytest
from numpy.testing import assert_allclose
import gpflow
from gpflow.base import TensorType
from gpflow.config import default_int
from gpflow.inducing_variables import InducingPoints
from gpflow.mean_functions import (
Additive,
Constant,
Linear,
MeanFunction,
Product,
SwitchedMeanFunction,
Zero,
)
rng = np.random.RandomState(99021)
class Datum:
input_dim, output_dim = 3, 2
N, Ntest, M = 20, 30, 10
_mean_functions = [
Zero(),
Linear(
A=rng.randn(Datum.input_dim, Datum.output_dim),
b=rng.randn(Datum.output_dim, 1).reshape(-1),
),
Constant(c=rng.randn(Datum.output_dim, 1).reshape(-1)),
]
@pytest.mark.parametrize("mean_function_1", _mean_functions)
@pytest.mark.parametrize("mean_function_2", _mean_functions)
@pytest.mark.parametrize("operation", ["+", "*"])
def test_mean_functions_output_shape(
mean_function_1: MeanFunction, mean_function_2: MeanFunction, operation: str
) -> None:
"""
Test the output shape for basic and compositional mean functions, also
check that the combination of mean functions returns the correct class
"""
X = np.random.randn(Datum.N, Datum.input_dim)
Y = mean_function_1(X)
# basic output shape check
assert Y.shape in [(Datum.N, Datum.output_dim), (Datum.N, 1)]
# composed mean function output shape check
if operation == "+":
mean_composed = mean_function_1 + mean_function_2
elif operation == "*":
mean_composed = mean_function_1 * mean_function_2
else:
raise (NotImplementedError)
Y_composed = mean_composed(X)
assert Y_composed.shape in [(Datum.N, Datum.output_dim), (Datum.N, 1)]
@pytest.mark.parametrize("mean_function_1", _mean_functions)
@pytest.mark.parametrize("mean_function_2", _mean_functions)
@pytest.mark.parametrize("operation", ["+", "*"])
def test_mean_functions_composite_type(
mean_function_1: MeanFunction, mean_function_2: MeanFunction, operation: str
) -> None:
if operation == "+":
mean_composed = mean_function_1 + mean_function_2
assert isinstance(mean_composed, Additive)
elif operation == "*":
mean_composed = mean_function_1 * mean_function_2
assert isinstance(mean_composed, Product)
else:
raise (NotImplementedError)
_linear_functions = [
Linear(
A=rng.randn(Datum.input_dim, Datum.output_dim),
b=rng.randn(Datum.output_dim, 1).reshape(-1),
)
for _ in range(3)
]
# Append inverse of first Linear mean function in _linear_functions
_linear_functions.append(Linear(A=-1.0 * _linear_functions[0].A, b=-1.0 * _linear_functions[0].b))
_constant_functions = [Constant(c=rng.randn(Datum.output_dim, 1).reshape(-1)) for _ in range(3)]
# Append inverse of first Constant mean function in _constant_functions
_constant_functions.append(Constant(c=-1.0 * _constant_functions[0].c))
def _create_GPR_model_with_bias(
X: TensorType, Y: TensorType, mean_function: MeanFunction
) -> gpflow.models.GPR:
return gpflow.models.GPR(
(X, Y), mean_function=mean_function, kernel=gpflow.kernels.Bias(Datum.input_dim)
)
@pytest.mark.parametrize("mean_functions", [_linear_functions, _constant_functions])
def test_mean_functions_distributive_property(mean_functions: Sequence[MeanFunction]) -> None:
"""
Tests that distributive property of addition and multiplication holds for mean functions
(both Constant and Linear): A * (B + C) = A * B + A * C
"""
X, Y = rng.randn(Datum.N, Datum.input_dim), rng.randn(Datum.N, Datum.output_dim)
Xtest = rng.randn(30, Datum.input_dim)
A, B, C = mean_functions[0], mean_functions[1], mean_functions[2]
lhs = Product(A, Additive(B, C)) # A * (B + C)
rhs = Additive(Product(A, B), Product(A, C)) # A * B + A * C
model_lhs = _create_GPR_model_with_bias(X, Y, mean_function=lhs)
model_rhs = _create_GPR_model_with_bias(X, Y, mean_function=rhs)
mu_lhs, var_lhs = model_lhs.predict_f(Xtest)
mu_rhs, var_rhs = model_rhs.predict_f(Xtest)
assert_allclose(mu_lhs, mu_rhs)
assert_allclose(var_lhs, var_rhs)
@pytest.mark.parametrize("mean_functions", [_linear_functions, _constant_functions])
def test_mean_functions_A_minus_A_equals_zero(mean_functions: Sequence[MeanFunction]) -> None:
"""
Tests that the addition the inverse of a mean function to itself is equivalent to having a
Zero mean function: A + (-A) = 0
"""
X, Y = rng.randn(Datum.N, Datum.input_dim), rng.randn(Datum.N, Datum.output_dim)
Xtest = rng.randn(30, Datum.input_dim)
A, A_inverse = mean_functions[0], mean_functions[-1]
lhs = Additive(A, A_inverse) # A + (-A)
rhs = Zero() # 0
model_lhs = _create_GPR_model_with_bias(X, Y, mean_function=lhs)
model_rhs = _create_GPR_model_with_bias(X, Y, mean_function=rhs)
mu_lhs, var_lhs = model_lhs.predict_f(Xtest)
mu_rhs, var_rhs = model_rhs.predict_f(Xtest)
assert_allclose(mu_lhs, mu_rhs)
assert_allclose(var_lhs, var_rhs)
@pytest.mark.parametrize("mean_functions", [_linear_functions])
def test_linear_mean_functions_associative_property(mean_functions: Sequence[MeanFunction]) -> None:
"""
Tests that associative property of addition holds for linear mean functions:
A + (B + (-A)) = B = (A + B) + (-A)
"""
X, Y = rng.randn(Datum.N, Datum.input_dim), rng.randn(Datum.N, Datum.output_dim)
Xtest = rng.randn(30, Datum.input_dim)
A, B, A_inverse = mean_functions[0], mean_functions[1], mean_functions[-1]
lhs = Additive(A, Additive(B, A_inverse)) # A + (B + (-A))
rhs = Additive(Additive(A, B), A_inverse) # (A + B) + (-A)
model_lhs = _create_GPR_model_with_bias(X, Y, mean_function=lhs)
model_b = _create_GPR_model_with_bias(X, Y, mean_function=B)
model_rhs = _create_GPR_model_with_bias(X, Y, mean_function=rhs)
mu_lhs, var_lhs = model_lhs.predict_f(Xtest)
mu_b, var_b = model_b.predict_f(Xtest)
mu_rhs, var_rhs = model_rhs.predict_f(Xtest)
assert_allclose(mu_lhs, mu_b)
assert_allclose(var_lhs, var_b)
assert_allclose(mu_b, mu_rhs)
assert_allclose(var_b, var_rhs)
@pytest.mark.parametrize("N, D", [[10, 3]])
def test_switched_mean_function(N: int, D: int) -> None:
"""
Test for the SwitchedMeanFunction.
"""
X = np.hstack([rng.randn(N, D), 1.0 * rng.randint(0, 2, N).reshape(-1, 1)])
zeros, ones = Constant(np.zeros(1)), Constant(np.ones(1))
switched_mean = SwitchedMeanFunction([zeros, ones])
np_list = np.array([0.0, 1.0])
result_ref = (np_list[X[:, D].astype(default_int())]).reshape(-1, 1)
result = switched_mean(X)
assert_allclose(result, result_ref)
def test_bug_277_regression() -> None:
"""
See github issue #277. This is a regression test.
"""
model1, model2 = Linear(), Linear()
assert model1.b.numpy() == model2.b.numpy()
model2.b.assign([1.0])
assert not model1.b.numpy() == model2.b.numpy()
_model_classes = [
gpflow.models.GPR,
gpflow.models.SGPR,
gpflow.models.GPRFITC,
gpflow.models.SVGP,
gpflow.models.VGP,
gpflow.models.GPMC,
gpflow.models.SGPMC,
]
@pytest.mark.parametrize("model_class", _model_classes)
def test_models_with_mean_functions_changes(model_class: Type[Any]) -> None:
"""
Simply check that all models have a higher prediction with a constant mean
function than with a zero mean function.
For compositions of mean functions check that multiplication/ addition of
a constant results in a higher prediction, whereas addition of zero/
mutliplication with one does not.
"""
data = rng.randn(Datum.N, Datum.input_dim), rng.randn(Datum.N, 1)
Xnew = rng.randn(Datum.Ntest, Datum.input_dim)
inducing_variable = InducingPoints(Z=rng.randn(Datum.M, Datum.input_dim))
kernel = gpflow.kernels.Matern32()
likelihood = gpflow.likelihoods.Gaussian()
zero_mean = Zero()
non_zero_mean = Constant(c=np.ones(1) * 10)
if model_class in [gpflow.models.GPR]:
model_zero_mean = model_class(data, kernel=kernel, mean_function=zero_mean)
model_non_zero_mean = model_class(data, kernel=kernel, mean_function=non_zero_mean)
elif model_class in [gpflow.models.VGP]:
model_zero_mean = model_class(
data, likelihood=likelihood, kernel=kernel, mean_function=zero_mean
)
model_non_zero_mean = model_class(
data, likelihood=likelihood, kernel=kernel, mean_function=non_zero_mean
)
elif model_class in [gpflow.models.SVGP]:
model_zero_mean = model_class(
kernel=kernel,
likelihood=likelihood,
inducing_variable=inducing_variable,
mean_function=zero_mean,
)
model_non_zero_mean = model_class(
kernel=kernel,
likelihood=likelihood,
inducing_variable=inducing_variable,
mean_function=non_zero_mean,
)
elif model_class in [gpflow.models.SGPR, gpflow.models.GPRFITC]:
model_zero_mean = model_class(
data,
kernel=kernel,
inducing_variable=inducing_variable,
mean_function=zero_mean,
)
model_non_zero_mean = model_class(
data,
kernel=kernel,
inducing_variable=inducing_variable,
mean_function=non_zero_mean,
)
elif model_class in [gpflow.models.SGPMC]:
model_zero_mean = model_class(
data,
kernel=kernel,
likelihood=likelihood,
inducing_variable=inducing_variable,
mean_function=zero_mean,
)
model_non_zero_mean = model_class(
data,
kernel=kernel,
likelihood=likelihood,
inducing_variable=inducing_variable,
mean_function=non_zero_mean,
)
elif model_class in [gpflow.models.GPMC]:
model_zero_mean = model_class(
data, kernel=kernel, likelihood=likelihood, mean_function=zero_mean
)
model_non_zero_mean = model_class(
data, kernel=kernel, likelihood=likelihood, mean_function=non_zero_mean
)
else:
raise (NotImplementedError)
mu_zero, var_zero = model_zero_mean.predict_f(Xnew)
mu_non_zero, var_non_zero = model_non_zero_mean.predict_f(Xnew)
# predictive variance remains unchanged after modifying mean function
assert np.all(var_zero.numpy() == var_non_zero.numpy())
# predictive mean changes after modifying mean function
assert not np.all(mu_zero.numpy() == mu_non_zero.numpy())
| 35.971246
| 100
| 0.69198
|
a888213ab4b24637d6b4c02f68e343101bb85bd1
| 6,863
|
py
|
Python
|
Practicas/Practica_08/codigoFuente/src/relativos.py
|
CodeRevenge/Proyecto-seminario-traductores-i
|
a5c6a500a6be8d5a1b24b4ecacd73e6f67a34041
|
[
"MIT"
] | null | null | null |
Practicas/Practica_08/codigoFuente/src/relativos.py
|
CodeRevenge/Proyecto-seminario-traductores-i
|
a5c6a500a6be8d5a1b24b4ecacd73e6f67a34041
|
[
"MIT"
] | null | null | null |
Practicas/Practica_08/codigoFuente/src/relativos.py
|
CodeRevenge/Proyecto-seminario-traductores-i
|
a5c6a500a6be8d5a1b24b4ecacd73e6f67a34041
|
[
"MIT"
] | null | null | null |
from baseconvert import base
from bitstring import Bits
from src.crearContLoc import ContadorLocalidades
from src.funcionalidades import Funcionalidad
class Relativos(ContadorLocalidades):
def __init__(self):
ContadorLocalidades.__init__(self)
Funcionalidad.__init__(self)
self.RELATIVO = 'REL'
self.RELATIVO_9 = 'REL9'
self.LIMITE_MAX_REL8 = 127
self.LIMITE_MIN_REL8 = -128
self.LIMITE_MAX_REL16 = 32767
self.LIMITE_MIN_REL16 = -32768
self.LIMITE_REL9 = 255
self.REGISTROS = ['A','B','D','X','Y','SP']
self.direccionesRelativos = []
self.direccionesRelativos9 = []
def esRelativo(self, nemonico):
return True if nemonico[2] == self.RELATIVO or nemonico[2] == self.RELATIVO_9 else False
def esEtiqueta(self, operador):
if operador[0] != '$' or operador[0] != '%' or operador[0] != '@':
try:
base(operador,10, string=True)
except ValueError:
return True
else:
return False
def asignarMemoriaRelativo(self, nemonico):
codop = nemonico[3]
codop += "".zfill(int(nemonico[4]))
return codop
def asignarMemoriaRelativo_REL9(self):
return '040000'
def validarOperadoresRel(self, instruccion):
return True if len(instruccion[2]) == 1 else False
def validarOperadoresRel_9(self, instruccion):
return True if len(instruccion[2]) == 2 else False
def tipoRelativo(self, instruccion):
if instruccion == self.RELATIVO:
return self.RELATIVO
elif instruccion == self.RELATIVO_9:
return self.RELATIVO_9
else:
return False
def verificarRelativos(self, instruccion):
if len(instruccion[2][3]) == 2:
return self.verificarRel8(instruccion)
elif len(instruccion[2][3]) == 4:
return self.verificarRel16(instruccion)
else:
False
def verificarRel8(self, instruccion):
operador = instruccion[3][2][0]
id = self.existeIdentificador(operador)
if id[0]:
op = id[1][1]
sigPos = self.obtenerSiguientePosicion(instruccion)
op = Bits(hex=op).int
sigPos = Bits(hex=sigPos).int
res = op - sigPos
if res <= self.LIMITE_MAX_REL8 and res >= self.LIMITE_MIN_REL8:
op = Bits(int=res,length=8)
return instruccion[1][0:2] + op.hex.upper()
else:
print('El mnemomico {} esta fuera de rango -> {}'.format(instruccion[2][0],res))
else:
operador = self.verificarBase(operador)
sigPos = self.obtenerSiguientePosicion(instruccion)
operador = Bits(hex=operador).int
sigPos = Bits(hex=sigPos).int
res = operador - sigPos
if res <= self.LIMITE_MAX_REL8 and res >= self.LIMITE_MIN_REL8:
op = Bits(int=res,length=8)
return instruccion[1][0:2] + op.hex.upper()
else:
print('El mnemomico {} esta fuera de rango -> {}'.format(instruccion[2][0],res))
def verificarRel16(self, instruccion):
operador = instruccion[3][2][0]
id = self.existeIdentificador(operador)
if id[0]:
op = id[1][1]
sigPos = self.obtenerSiguientePosicion(instruccion)
op = Bits(hex=op).int
sigPos = Bits(hex=sigPos).int
res = op - sigPos
if res <= self.LIMITE_MAX_REL16 and res >= self.LIMITE_MIN_REL16:
op = Bits(int=res,length=16)
return instruccion[1][0:4] + op.hex.upper()
else:
print('El mnemomico {} esta fuera de rango -> {}'.format(instruccion[2][0],res))
else:
operador = self.verificarBase(operador)
sigPos = self.obtenerSiguientePosicion(instruccion)
res = Bits(hex=operador).int - Bits(hex=sigPos).int
if res <= self.LIMITE_MAX_REL16 and res >= self.LIMITE_MIN_REL16:
op = Bits(int=res,length=16)
return instruccion[1][0:4] + op.hex.upper()
else:
print('El mnemomico {} esta fuera de rango -> {}'.format(instruccion[2][0],res))
def verificarRelativos_9(self, instruccion):
registro = instruccion[3][2][0]
operador = instruccion[3][2][1]
if registro in self.REGISTROS:
id = self.existeIdentificador(operador)
if id[0]:
op = id[1][1]
sigPos = self.obtenerSiguientePosicion(instruccion)
op = Bits(hex=op).int
sigPos = Bits(hex=sigPos).int
res = op - sigPos
if abs(res) <= self.LIMITE_REL9:
op = Bits(int=res,length=12)
nemonico = self.encontrarNemonicoRel9(instruccion, res)
return instruccion[1][0:2] + nemonico[4] + op.hex[1:].upper()
else:
print('El mnemomico {} esta fuera de rango -> {}'.format(instruccion[2][0],res))
else:
operador = self.verificarBase(operador)
sigPos = self.obtenerSiguientePosicion(instruccion)
res = Bits(hex=operador).int - Bits(hex=sigPos).int
if abs(res) <= self.LIMITE_REL9:
op = Bits(int=res,length=12)
nemonico = self.encontrarNemonicoRel9(instruccion, res)
return instruccion[1][0:2] + nemonico[4] + op.hex[1:].upper()
else:
print('El mnemomico {} esta fuera de rango -> {}'.format(instruccion[2][0],res))
else:
print("El mnemonico {} esperaba un nombre de registro como primer parametro y recibió {}".format(instruccion[2][0], registro))
def encontrarNemonicoRel9(self, instruccion, operador):
indReg = self.REGISTROS.index(instruccion[3][2][0])
if operador >= 0:
return instruccion[2][:6][indReg]
else:
return instruccion[2][6:12][indReg]
def obtenerSiguientePosicion(self, instruccion):
actual = int(base(instruccion[0], 16, 10, string=True))
agregado = len(instruccion[1])/2
a = base(actual+agregado,10,16,string=True).rstrip('.')
return a
def existeIdentificador(self, operador):
hex = operador[0] == self.INDICADOR_HEXADECIMAL
bin = operador[0] == self.INDICADOR_BINARIO
oct = operador[0] == self.INDICADOR_OCTAL
inm = operador[0] == '#'
if hex or bin or oct or inm:
operador = operador[1:]
for e in self.listaTABSIM:
if operador == e[0]:
return [True,e]
return [False]
| 39.901163
| 138
| 0.567827
|
e5bdd3d4ecb3169c3e2117c8ae3655100c1cd695
| 7,561
|
py
|
Python
|
src/sentry/utils/sdk.py
|
uandco/sentry
|
5b8d45cb71c6617dac8e64265848623fbfce9c99
|
[
"BSD-3-Clause"
] | 4
|
2019-05-27T13:55:07.000Z
|
2021-03-30T07:05:09.000Z
|
src/sentry/utils/sdk.py
|
uandco/sentry
|
5b8d45cb71c6617dac8e64265848623fbfce9c99
|
[
"BSD-3-Clause"
] | 196
|
2019-06-10T08:34:10.000Z
|
2022-02-22T01:26:13.000Z
|
src/sentry/utils/sdk.py
|
uandco/sentry
|
5b8d45cb71c6617dac8e64265848623fbfce9c99
|
[
"BSD-3-Clause"
] | 1
|
2020-08-10T07:55:40.000Z
|
2020-08-10T07:55:40.000Z
|
from __future__ import absolute_import, print_function
import inspect
import json
import logging
import six
import zlib
from django.conf import settings
from django.utils.functional import cached_property
import sentry_sdk
from sentry_sdk.client import get_options
from sentry_sdk.transport import Transport, make_transport
from sentry_sdk.consts import VERSION as SDK_VERSION
from sentry_sdk.utils import Auth, capture_internal_exceptions
from sentry_sdk.utils import logger as sdk_logger
from sentry.utils import metrics
from sentry.utils.rust import RustInfoIntegration
UNSAFE_FILES = ('sentry/event_manager.py', 'sentry/tasks/process_buffer.py', )
# Reexport sentry_sdk just in case we ever have to write another shim like we
# did for raven
from sentry_sdk import configure_scope, push_scope, capture_message, capture_exception # NOQA
def is_current_event_safe():
"""
Tests the current stack for unsafe locations that would likely cause
recursion if an attempt to send to Sentry was made.
"""
for _, filename, _, _, _, _ in inspect.stack():
if filename.endswith(UNSAFE_FILES):
return False
return True
def get_project_key():
from sentry.models import ProjectKey
if not settings.SENTRY_PROJECT:
return None
key = None
try:
if settings.SENTRY_PROJECT_KEY is not None:
key = ProjectKey.objects.get(
id=settings.SENTRY_PROJECT_KEY,
project=settings.SENTRY_PROJECT,
)
else:
key = ProjectKey.get_default(settings.SENTRY_PROJECT)
except Exception as exc:
# if the relation fails to query or is missing completely, lets handle
# it gracefully
sdk_logger.warn('internal-error.unable-to-fetch-project', extra={
'project_id': settings.SENTRY_PROJECT,
'project_key': settings.SENTRY_PROJECT_KEY,
'error_message': six.text_type(exc),
})
if key is None:
sdk_logger.warn('internal-error.no-project-available', extra={
'project_id': settings.SENTRY_PROJECT,
'project_key': settings.SENTRY_PROJECT_KEY,
})
return key
class SentryInternalFilter(logging.Filter):
def filter(self, record):
# TODO(mattrobenolt): handle an upstream Sentry
metrics.incr('internal.uncaptured.logs', skip_internal=False)
return is_current_event_safe()
def configure_sdk():
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.celery import CeleryIntegration
assert sentry_sdk.Hub.main.client is None
options = settings.SENTRY_SDK_CONFIG
internal_transport = InternalTransport()
upstream_transport = None
if options.get('dsn'):
upstream_transport = make_transport(get_options(options))
def capture_event(event):
# Make sure we log to upstream when available first
if upstream_transport is not None:
# TODO(mattrobenolt): Bring this back safely.
# from sentry import options
# install_id = options.get('sentry:install-id')
# if install_id:
# event.setdefault('tags', {})['install-id'] = install_id
upstream_transport.capture_event(event)
internal_transport.capture_event(event)
sentry_sdk.init(
integrations=[
DjangoIntegration(),
CeleryIntegration(),
LoggingIntegration(event_level=None),
RustInfoIntegration(),
],
transport=capture_event,
**options
)
def _create_noop_hub():
def transport(event):
with capture_internal_exceptions():
metrics.incr('internal.uncaptured.events', skip_internal=False)
sdk_logger.warn('internal-error.noop-hub')
return sentry_sdk.Hub(sentry_sdk.Client(transport=transport))
NOOP_HUB = _create_noop_hub()
del _create_noop_hub
class InternalTransport(Transport):
def __init__(self):
pass
@cached_property
def project_key(self):
return get_project_key()
@cached_property
def request_factory(self):
from django.test import RequestFactory
return RequestFactory()
def capture_event(self, event):
# Disable the SDK while processing our own events. This fixes some
# recursion issues when the view crashes without including any
# UNSAFE_FILES
#
# NOTE: UNSAFE_FILES still exists because the hub does not follow the
# execution flow into the celery job triggered by StoreView. In other
# words, UNSAFE_FILES is used in case the celery job for crashes and
# that error is captured by the SDK.
with NOOP_HUB:
return self._capture_event(event)
def _capture_event(self, event):
with capture_internal_exceptions():
key = self.project_key
if key is None:
return
if not is_current_event_safe():
metrics.incr('internal.uncaptured.events', skip_internal=False)
sdk_logger.warn('internal-error.unsafe-stacktrace')
return
auth = Auth(
scheme="https",
host="localhost",
project_id=key.project_id,
public_key=key.public_key,
secret_key=key.secret_key,
client="sentry-python/%s" % SDK_VERSION
)
headers = {
'HTTP_X_SENTRY_AUTH': auth.to_header(),
'HTTP_CONTENT_ENCODING': 'deflate'
}
request = self.request_factory.post(
'/api/{}/store/'.format(key.project_id),
data=zlib.compress(json.dumps(event).encode('utf8')),
content_type='application/octet-stream',
**headers
)
from sentry.web.api import StoreView
resp = StoreView.as_view()(
request,
project_id=six.text_type(key.project_id),
)
if resp.status_code != 200:
sdk_logger.warn('internal-error.invalid-response', extra={
'project_id': settings.SENTRY_PROJECT,
'project_key': settings.SENTRY_PROJECT_KEY,
'status_code': resp.status_code,
})
class RavenShim(object):
"""Wrapper around sentry-sdk in case people are writing their own
integrations that rely on this being here."""
def captureException(self, exc_info=None, **kwargs):
with sentry_sdk.push_scope() as scope:
self._kwargs_into_scope(scope, **kwargs)
return capture_exception(exc_info)
def captureMessage(self, msg, **kwargs):
with sentry_sdk.push_scope() as scope:
self._kwargs_into_scope(scope, **kwargs)
return capture_message(msg)
def tags_context(self, tags):
with sentry_sdk.configure_scope() as scope:
for k, v in tags.items():
scope.set_tag(k, v)
def _kwargs_into_scope(self, scope, extra=None, tags=None,
fingerprint=None, request=None):
for key, value in (extra.items() if extra else ()):
scope.set_extra(key, value)
for key, value in (tags.items() if tags else ()):
scope.set_tag(key, value)
if fingerprint is not None:
scope.fingerprint = fingerprint
| 33.30837
| 94
| 0.642904
|
384ec5faa303886e06a0914818d2115c77a0cdc9
| 394
|
py
|
Python
|
modulo_2/cap2-Decisoes/variaveis2.py
|
PPedriniHp/Fiap_on_Phyton
|
fe06cfc931cd038dfed8c14394336f69dc953926
|
[
"MIT"
] | null | null | null |
modulo_2/cap2-Decisoes/variaveis2.py
|
PPedriniHp/Fiap_on_Phyton
|
fe06cfc931cd038dfed8c14394336f69dc953926
|
[
"MIT"
] | null | null | null |
modulo_2/cap2-Decisoes/variaveis2.py
|
PPedriniHp/Fiap_on_Phyton
|
fe06cfc931cd038dfed8c14394336f69dc953926
|
[
"MIT"
] | null | null | null |
responsavel=input("Digite o nome do responsável: ")
funcionario=input("Digite o nome do funcionário: ")
evento=input("Digite o nome do evento: ")
valor=float(input("Digite o valor que será ressarcido: "))
print("Declaro para o senhor " + responsavel + ", que o senhor " + funcionario + " esteve presente no evento " +
evento + " e gastou o valor de R$ " + str(valor) + " com a entrada.")
| 56.285714
| 112
| 0.690355
|
8e6a3d41e523e8eedcfde98f6891c844b1c9696d
| 8,891
|
py
|
Python
|
torchelie/recipes/pix2pix.py
|
maxferrari/Torchelie
|
d133f227bebc3c4cbbb6167bd1fae815d2b5fa81
|
[
"MIT"
] | null | null | null |
torchelie/recipes/pix2pix.py
|
maxferrari/Torchelie
|
d133f227bebc3c4cbbb6167bd1fae815d2b5fa81
|
[
"MIT"
] | null | null | null |
torchelie/recipes/pix2pix.py
|
maxferrari/Torchelie
|
d133f227bebc3c4cbbb6167bd1fae815d2b5fa81
|
[
"MIT"
] | null | null | null |
from typing import Tuple
import copy
import os
import torch
import torchelie as tch
import torchelie.utils as tu
from torchelie.recipes.gan import GANRecipe
from torchelie.transforms import MultiBranch
import torchvision.transforms as TF
import torchelie.loss.gan.standard as gan_loss
from torchelie.loss.gan.penalty import zero_gp
from torchelie.datasets import UnlabeledImages, Pix2PixDataset
from torchelie.datasets import SideBySideImagePairsDataset
from torchelie.models import *
import torch.nn as nn
def get_dataset(dataset_specs: Tuple[str, str], img_size: int):
ty, path = dataset_specs
if ty == 'pix2pix':
return Pix2PixDataset('~/.torch',
path,
split='train',
download=True,
transform=TF.Compose([
TF.Resize(img_size),
TF.RandomResizedCrop(img_size,
scale=(0.9, 1)),
TF.RandomHorizontalFlip(),
]))
if ty == 'colorize':
return UnlabeledImages(
path,
TF.Compose([
TF.Resize(img_size),
TF.RandomCrop(img_size),
TF.RandomHorizontalFlip(),
MultiBranch([
TF.Compose([
TF.Grayscale(3),
TF.ToTensor(),
]),
TF.ToTensor(),
])
]))
if ty == 'inpainting':
return UnlabeledImages(
path,
TF.Compose([
TF.Resize(img_size),
TF.RandomCrop(img_size),
TF.RandomHorizontalFlip(),
TF.ToTensor(),
MultiBranch([
TF.Compose([
TF.RandomErasing(p=1, value=(1., 1., 0)),
]),
TF.Compose([])
])
]))
if ty == 'edges':
return UnlabeledImages(
path,
TF.Compose([
TF.Resize(img_size),
TF.CenterCrop(img_size),
TF.RandomHorizontalFlip(),
MultiBranch([
TF.Compose([
tch.transforms.Canny(),
TF.Grayscale(3),
TF.ToTensor(),
]),
TF.Compose([
TF.ToTensor(),
])
])
]))
if ty == 'pairs':
return SideBySideImagePairsDataset(
path,
TF.Compose([
TF.Resize(img_size),
TF.CenterCrop(img_size),
TF.RandomHorizontalFlip(),
TF.ToTensor(),
]))
assert False, "dataset's type not understood"
@tu.experimental
def train(rank, world_size):
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('--dataset', required=True, type=lambda x: x.split(':'))
parser.add_argument('--r0-gamma', type=float)
parser.add_argument('--D-type', choices=['patch', 'unet'], default='patch')
parser.add_argument('--l1-gain', default=0, type=float)
parser.add_argument('--batch-size', default=4, type=int)
parser.add_argument('--from-ckpt')
opts = parser.parse_args()
G = pix2pix_256().to_instance_norm().to_equal_lr()
G_polyak = copy.deepcopy(G)
if opts.D_type == 'patch':
D = residual_patch286()
D.set_input_specs(6)
D.to_equal_lr()
r0_gamma = 0.1
else:
D = UNet([32, 64, 128, 256, 512], 1)
D.set_decoder_num_layers(1)
D.set_encoder_num_layers(1)
D.set_input_specs(6)
D.to_bilinear_sampling()
D.leaky()
tnn.utils.net_to_equal_lr(D, leak=0.2)
r0_gamma = 0.00001
r0_gamma = opts.r0_gamma or r0_gamma
if rank == 0:
print(G)
print(D)
G = torch.nn.parallel.DistributedDataParallel(G.to(rank), [rank], rank)
D = torch.nn.parallel.DistributedDataParallel(D.to(rank), [rank], rank)
ds = get_dataset(opts.dataset, 256)
ds = torch.utils.data.DataLoader(ds,
opts.batch_size,
num_workers=4,
shuffle=True,
pin_memory=True,
drop_last=True)
def G_fun(batch) -> dict:
x, y = batch
G.train()
D.train()
out = G(x * 2 - 1)
with D.no_sync():
loss = gan_loss.generated(D(torch.cat([out, x], dim=1) * 2 - 1))
loss += opts.l1_gain * F.l1_loss(out, y)
loss.backward()
return {'G_loss': loss.item()}
class GradientPenalty:
@tu.experimental
def __init__(self, gamma):
self.gamma = gamma
self.iters = 0
self.last_norm = float('nan')
def __call__(self, model, real, fake):
if self.iters < 100 or self.iters % 4 == 0:
real = real.detach()
fake = fake.detach()
gp, g_norm = zero_gp(model, real, fake)
# Sync the gradient on the next backward
if torch.any(torch.isnan(gp)):
gp.detach_()
else:
(4 * self.gamma * gp).backward()
self.last_norm = g_norm
self.iters += 1
return self.last_norm
gradient_penalty = GradientPenalty(r0_gamma)
def D_fun(batch) -> dict:
G.train()
D.train()
x, y = batch
with G.no_sync():
with torch.no_grad():
out = G(x * 2 - 1)
fake = torch.cat([out, x], dim=1) * 2 - 1
real = torch.cat([y, x], dim=1) * 2 - 1
with D.no_sync():
prob_fake = D(fake)
fake_correct = prob_fake.detach().lt(0).int().eq(1).sum()
fake_loss = gan_loss.fake(prob_fake)
fake_loss.backward()
with D.no_sync():
g_norm = gradient_penalty(D, real, fake)
prob_real = D(real)
real_correct = prob_real.detach().gt(0).int().eq(1).sum()
real_loss = gan_loss.real(prob_real)
real_loss.backward()
return {
'out': out.detach(),
'fake_loss': fake_loss.item(),
'prob_fake': torch.sigmoid(prob_fake).mean().item(),
'fake_heatmap': torch.sigmoid(prob_fake.detach()),
'prob_real': torch.sigmoid(prob_real).mean().item(),
'real_loss': real_loss.item(),
'g_norm': g_norm,
'D-correct': (fake_correct + real_correct) / (2 * prob_fake.numel())
}
def test_fun(batch):
x, _ = batch
G_polyak.train()
out = G_polyak(x * 2 - 1)
return {'out': out.detach()}
tag = f'pix2pix_{opts.dataset[0]}:{os.path.basename(opts.dataset[1])}'
recipe = GANRecipe(G,
D,
G_fun,
D_fun,
test_fun,
ds,
checkpoint=tag if rank == 0 else None,
visdom_env=tag if rank == 0 else None)
recipe.register('G_polyak', G_polyak)
recipe.callbacks.add_callbacks([
tch.callbacks.Optimizer(
tch.optim.RAdamW(D.parameters(),
lr=2e-3,
betas=(0., 0.99),
weight_decay=0)),
tch.callbacks.Log('out', 'out'),
tch.callbacks.Log('batch.0', 'x'),
tch.callbacks.Log('batch.1', 'y'),
tch.callbacks.Log('fake_heatmap', 'fake_heatmap'),
tch.callbacks.WindowedMetricAvg('fake_loss', 'fake_loss'),
tch.callbacks.WindowedMetricAvg('real_loss', 'real_loss'),
tch.callbacks.WindowedMetricAvg('prob_fake', 'prob_fake'),
tch.callbacks.WindowedMetricAvg('prob_real', 'prob_real'),
tch.callbacks.WindowedMetricAvg('D-correct', 'D-correct'),
tch.callbacks.Log('g_norm', 'g_norm')
])
recipe.G_loop.callbacks.add_callbacks([
tch.callbacks.Optimizer(
tch.optim.RAdamW(G.parameters(),
lr=2e-3,
betas=(0., 0.99),
weight_decay=0)),
tch.callbacks.Polyak(G.module, G_polyak),
])
recipe.test_loop.callbacks.add_callbacks([
tch.callbacks.Log('out', 'polyak_out'),
])
recipe.to(rank)
if opts.from_ckpt is not None:
recipe.load_state_dict(torch.load(opts.from_ckpt, map_location='cpu'))
recipe.run(200)
if __name__ == '__main__':
tu.parallel_run(train)
| 34.328185
| 80
| 0.499606
|
dc478c811a1cd0ae79aba1312a88de5d31cffaea
| 448
|
py
|
Python
|
main.py
|
Glados59/LAU-Net
|
928955e12a6786a84949336c7dc07b90f2cfffe0
|
[
"Apache-2.0"
] | 26
|
2021-03-22T15:57:28.000Z
|
2022-03-30T03:07:09.000Z
|
main.py
|
Glados59/LAU-Net
|
928955e12a6786a84949336c7dc07b90f2cfffe0
|
[
"Apache-2.0"
] | 4
|
2021-07-06T07:37:20.000Z
|
2022-01-07T08:02:51.000Z
|
main.py
|
Glados59/LAU-Net
|
928955e12a6786a84949336c7dc07b90f2cfffe0
|
[
"Apache-2.0"
] | 4
|
2021-08-23T05:49:11.000Z
|
2022-03-08T00:54:52.000Z
|
import utility
import data
import model
from option import args
from checkpoint import Checkpoint
from trainer import Trainer
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
utility.set_seed(args.seed)
checkpoint = Checkpoint(args)
if checkpoint.ok:
loader = data.Data(args)
model = model.Model(args, checkpoint)
t = Trainer(args, loader, model, None, checkpoint)
t.test()
checkpoint.done()
print("testing complete")
| 20.363636
| 54
| 0.736607
|
8a88a03ca6f8e015671056f42484f88103a3aa6b
| 1,938
|
py
|
Python
|
ExtraModules/phonenumbers/data/region_BS.py
|
chirantana-trust/web-chirantana
|
18e2fb105fc5a9f55586c55096780c062ad9f2bc
|
[
"Unlicense"
] | 1
|
2015-01-31T01:17:14.000Z
|
2015-01-31T01:17:14.000Z
|
ExtraModules/phonenumbers/data/region_BS.py
|
chirantana-trust/web-chirantana
|
18e2fb105fc5a9f55586c55096780c062ad9f2bc
|
[
"Unlicense"
] | null | null | null |
ExtraModules/phonenumbers/data/region_BS.py
|
chirantana-trust/web-chirantana
|
18e2fb105fc5a9f55586c55096780c062ad9f2bc
|
[
"Unlicense"
] | null | null | null |
"""Auto-generated file, do not edit by hand. BS metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_BS = PhoneMetadata(id='BS', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='[2589]\\d{9}', possible_number_pattern='\\d{7}(?:\\d{3})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='242(?:3(?:02|[236][1-9]|4[0-24-9]|5[0-68]|7[3467]|8[0-4]|9[2-467])|461|502|6(?:0[1-3]|12|7[67]|8[78]|9[89])|7(?:02|88))\\d{4}', possible_number_pattern='\\d{7}(?:\\d{3})?', example_number='2423456789'),
mobile=PhoneNumberDesc(national_number_pattern='242(?:3(?:5[79]|[79]5)|4(?:[2-4][1-9]|5[1-8]|6[2-8]|7\\d|81)|5(?:2[45]|3[35]|44|5[1-9]|65|77)|6[34]6|727)\\d{4}', possible_number_pattern='\\d{10}', example_number='2423591234'),
toll_free=PhoneNumberDesc(national_number_pattern='242300\\d{4}|8(?:00|44|55|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='8002123456'),
premium_rate=PhoneNumberDesc(national_number_pattern='900[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='9002123456'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='5(?:00|33|44|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='5002345678'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='1',
national_prefix_for_parsing='1',
leading_digits='242')
| 96.9
| 258
| 0.732198
|
527d2b739e2dbb064d8b197e3e775eab16bd1c61
| 9,467
|
py
|
Python
|
3601-2019-Herrick/modules/Analysis.AnalyticsModule/sascyber/configs/base.py
|
brgaines/sas-global-forum-2019
|
e5dcad6f5ac0cc800a2e0aeb25ee770ba0048d99
|
[
"Apache-2.0"
] | 37
|
2019-01-31T19:42:24.000Z
|
2021-12-07T09:35:03.000Z
|
3601-2019-Herrick/modules/Analysis.AnalyticsModule/sascyber/configs/base.py
|
brgaines/sas-global-forum-2019
|
e5dcad6f5ac0cc800a2e0aeb25ee770ba0048d99
|
[
"Apache-2.0"
] | 6
|
2019-02-08T20:33:27.000Z
|
2019-04-30T12:13:38.000Z
|
3601-2019-Herrick/modules/Analysis.AnalyticsModule/sascyber/configs/base.py
|
brgaines/sas-global-forum-2019
|
e5dcad6f5ac0cc800a2e0aeb25ee770ba0048d99
|
[
"Apache-2.0"
] | 46
|
2019-02-08T19:17:22.000Z
|
2021-01-18T01:04:18.000Z
|
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Config
A base class to manage various configuration settings used in sascyber.
Parameters
----------
Configuration JSON: taken as an initialization parameter in the class.
Keys Needed in the configuration JSON:
analytics_type : One of {nf, wp, au, epp, dns, dhcp, fw}
custom_analytics_allowed
custom_analytics_path
analyticsList
cas_port
cas_host
cas_authinfo : location of the authinfo file on the system.
cas_protocol : One of {'cas', 'http'} where 'cas' is the binary CAS protocol, and `http` is the REST protocol.
Returns
-------
a single Config object
'''
import json
import os
from pathlib import Path
from sascyber.utils.exceptions import SASCyberMissingConfig
cyber_keys = ['analytics_type', 'analytics_config', 'custom_analytics_allowed', 'custom_analytics_path',
'analyticsList', 'cas_port', 'cas_host', 'cas_authinfo', 'cas_protocol',
'swat_trace', 'swat_messages', 'ae_caslib', 'ev_caslib', 'ed_caslib', 'hist_caslib',
'models_caslib', 'train_caslib', 'lookups_caslib', 'se_caslib', 'queuing_server_host',
'queuing_server_port', 'queuing_server_user', 'queuing_server_password',
'elasticsearch_server_host', 'elasticsearch_server_port',
'resolution_server_host', 'resolution_server_port', 'resolution_server_ips_limit']
class BaseConfig(object):
def __init__(self, inputfile, logfile):
self._prop = None
self.load_input_json(inputfile)
self.load_log_json(logfile)
self.load_config_json()
def load_input_json(self, cfgfile):
if os.path.isfile(cfgfile):
with open(cfgfile) as json_file:
self._input = json.load(json_file)
else:
raise SASCyberMissingConfig(cfgfile)
def load_log_json(self, logfile):
if os.path.isfile(logfile):
with open(logfile) as json_file:
self._log_config = json.load(json_file)
else:
raise SASCyberMissingConfig(logfile)
def load_config_json(self):
cfgfile = self.get_property(self._input, "analytics_config")
if os.path.isfile(cfgfile):
with open(cfgfile) as json_file:
self._config = json.load(json_file)
else:
raise SASCyberMissingConfig(cfgfile)
def absolute_path(self, inpath):
pth = Path(inpath)
if pth.parts[0] == '..':
outpath = pth.resolve().as_posix()
elif pth.parts[0] == '~':
outpath = pth.expanduser().as_posix()
else:
outpath = pth.as_posix()
return outpath
def expand_directories(self, key, var, basepath):
if hasattr(var, 'items'):
for k, v in var.items():
if k == key:
var[k] = Path(basepath).joinpath(v).as_posix()
yield v
if isinstance(v, dict):
for result in self.expand_directories(key, v, basepath):
yield result
def get_property(self, cfg, property_name):
if property_name in cfg.keys():
self._prop = cfg[property_name]
else:
for key in cfg.keys():
val = cfg[key]
if isinstance(val, dict):
self.get_property(val, property_name)
return self._prop
def get_input_property(self, property_name):
self._prop = None
return self.get_property(self._input, property_name)
def get_config_property(self, property_name):
self._prop = None
return self.get_property(self._config, property_name)
# Runtime properties read from input
@property
def cas_host(self):
self._prop = None
return self.get_property(self._input, 'cas_host')
@property
def cas_port(self):
self._prop = None
return self.get_property(self._input, 'cas_port')
@property
def cas_protocol(self):
self._prop = None
return self.get_property(self._input, 'cas_protocol')
@property
def cas_authinfo(self):
self._prop = None
a_path = Path(self.get_property(
self._input, 'cas_authinfo')).expanduser()
try:
self._prop = a_path.resolve(strict=True)
except FileNotFoundError:
self._prop = None
return self._prop
@property
def queuing_server_host(self):
self._prop = None
return self.get_property(self._input, 'queuing_server_host')
@property
def queuing_server_port(self):
self._prop = None
return self.get_property(self._input, 'queuing_server_port')
@property
def queuing_server_user(self):
self._prop = None
return self.get_property(self._input, 'queuing_server_user')
@property
def queuing_server_password(self):
self._prop = None
return self.get_property(self._input, 'queuing_server_password')
@property
def resolution_server_host(self):
self._prop = None
return self.get_property(self._input, 'resolution_server_host')
@property
def resolution_server_port(self):
self._prop = None
return self.get_property(self._input, 'resolution_server_port')
@property
def elasticsearch_server_host(self):
self._prop = None
return self.get_property(self._input, 'elasticsearch_server_host')
@property
def elasticsearch_server_port(self):
self._prop = None
return self.get_property(self._input, 'elasticsearch_server_port')
# Config properties read from analytics_config
@property
def resolution_server_ips_limit(self):
self._prop = None
return self.get_property(self._config, 'resolution_server_ips_limit')
@property
def analytics_type(self):
self._prop = None
return self.get_property(self._config, 'analytics_type')
@property
def custom_analytics_allowed(self):
self._prop = None
return self.get_property(self._config, 'custom_analytics_allowed')
@property
def custom_analytics_path(self):
self._prop = None
a_path = Path(self.get_property(self._config, 'custom_analytics_path'))
try:
self._prop = a_path.resolve(strict=True)
except FileNotFoundError:
self._prop = f'CUSTOM ANALYTICS PATH DOES NOT EXIST: {a_path}'
return self._prop
@property
def filters(self):
self._prop = None
return self.get_property(self._config, 'filters')
@property
def swat_trace(self):
self._prop = None
return self.get_property(self._config, 'swat_trace')
@property
def swat_messages(self):
self._prop = None
return self.get_property(self._config, 'swat_messages')
@property
def ae_caslib(self):
self._prop = None
return self.get_property(self._config, 'ae_caslib')
@property
def ev_caslib(self):
self._prop = None
return self.get_property(self._config, 'ev_caslib')
@property
def ed_caslib(self):
self._prop = None
return self.get_property(self._config, 'ed_caslib')
@property
def hist_caslib(self):
self._prop = None
return self.get_property(self._config, 'hist_caslib')
@property
def models_caslib(self):
self._prop = None
return self.get_property(self._config, 'models_caslib')
@property
def train_caslib(self):
self._prop = None
return self.get_property(self._config, 'train_caslib')
@property
def lookups_caslib(self):
self._prop = None
return self.get_property(self._config, 'lookups_caslib')
@property
def se_caslib(self):
self._prop = None
return self.get_property(self._config, 'se_caslib')
@property
def loginfo(self):
return self._log_config
@property
def input_task(self):
task = self._input["id"]
if task is None:
task = "Finalizer"
return task
@property
def config_tasks(self):
return self._config["tasks"]
if __name__ == '__main__':
cfg = BaseConfig(os.environ["CYBER_ARGS"], os.environ["CYBER_LOG"])
print(f"analytics_type: {cfg.analytics_type}")
print(f"custom_analytics_allowed: {cfg.custom_analytics_allowed}")
print(f"custom_analytics_path: {cfg.custom_analytics_path}")
print(
f"analyticsList: {json.dumps(cfg.analyticsList, indent=4, sort_keys=True)}")
print(f"cas_host: {cfg.cas_host}")
print(f"cas_port: {cfg.cas_port}")
print(f"cas_protocol: {cfg.cas_protocol}")
print(f"cas_authinfo: {cfg.cas_authinfo}")
print(f"filters: {json.dumps(cfg.filters, indent=4, sort_keys=True)}")
print(f"loginfo: {json.dumps(cfg.loginfo, indent=4, sort_keys=True)}")
| 31.039344
| 110
| 0.650893
|
7a34170464590a8be7383701e8a86a8e27fbb024
| 1,446
|
py
|
Python
|
API/v1/VDI/list.py
|
Alex4386/XenXenXenSe
|
c60e50f26a7c3b306ee3cbb140b3ad7f39c21d93
|
[
"MIT"
] | 1
|
2021-04-23T08:56:05.000Z
|
2021-04-23T08:56:05.000Z
|
API/v1/VDI/list.py
|
Alex4386/XenXenXenSe
|
c60e50f26a7c3b306ee3cbb140b3ad7f39c21d93
|
[
"MIT"
] | null | null | null |
API/v1/VDI/list.py
|
Alex4386/XenXenXenSe
|
c60e50f26a7c3b306ee3cbb140b3ad7f39c21d93
|
[
"MIT"
] | null | null | null |
from http.client import RemoteDisconnected
from xmlrpc.client import Fault
from fastapi import APIRouter, HTTPException
from XenGarden.session import create_session
from XenGarden.VDI import VDI
from API.v1.model.vdi import VDLResponseModel
from API.v1.VDI.serialize import serialize
from app.settings import Settings
router = APIRouter()
@router.get("/{cluster_id}/vdi/list", response_model=VDLResponseModel)
async def vdi_list(cluster_id: str):
""" Get VDI by UUID """
try:
try:
session = create_session(
_id=cluster_id, get_xen_clusters=Settings.get_xen_clusters()
)
except KeyError as key_error:
raise HTTPException(
status_code=400, detail=f"{key_error} is not a valid path"
)
vdis = VDI.get_all(session=session)
__vdi_list = []
_vdi_list = __vdi_list.append
for vdi in vdis:
_vdi_list(serialize(vdi))
if vdis is not None:
ret = dict(success=True, data=__vdi_list)
else:
ret = dict(success=False)
session.xenapi.session.logout()
return ret
except Fault as xml_rpc_error:
raise HTTPException(
status_code=int(xml_rpc_error.faultCode),
detail=xml_rpc_error.faultString,
)
except RemoteDisconnected as rd_error:
raise HTTPException(status_code=500, detail=rd_error.strerror)
| 29.510204
| 76
| 0.658368
|
56f3547a1d3d637949f7b63d686617b9f27b1872
| 46,976
|
py
|
Python
|
Dialogue_lib/DialogueActor/liststructure.py
|
ZhenshengLee/zsLV
|
0e2fb01fcc3f373c4b7be97c1c01ebdf15e4b9c3
|
[
"Apache-2.0"
] | 3
|
2016-10-16T11:23:49.000Z
|
2019-06-30T00:58:16.000Z
|
Dialogue_lib/DialogueActor/liststructure.py
|
ZhenshengLee/zsLV
|
0e2fb01fcc3f373c4b7be97c1c01ebdf15e4b9c3
|
[
"Apache-2.0"
] | null | null | null |
Dialogue_lib/DialogueActor/liststructure.py
|
ZhenshengLee/zsLV
|
0e2fb01fcc3f373c4b7be97c1c01ebdf15e4b9c3
|
[
"Apache-2.0"
] | 1
|
2019-08-25T13:40:37.000Z
|
2019-08-25T13:40:37.000Z
|
#coding:utf-8
#分词与词性标注 以json格式输出
import urllib2,urllib,sys,os
import codecs,re,string,json
from subprocess import *
from collections import Counter
import mdptoolbox
from numpy import *
import aiml
def divPos(input):
uri_base = "http://api.ltp-cloud.com/analysis/"
api_key = "98H7b393hUPX5tQwEgXwpgPc6m0c1jLTVgJkJymu"
text= urllib.quote(input)
#语言云的真正调用方法
uri_base = "http://api.ltp-cloud.com/analysis/"
data = {
"api_key" : "98H7b393hUPX5tQwEgXwpgPc6m0c1jLTVgJkJymu",
"text" : text,
"format" : "json",
"has_key" : "false",
"pattern" : "pos",
}
params = urllib.urlencode(data)
try:
request = urllib2.Request(uri_base)
response = urllib2.urlopen(request, params)
content = response.read().strip()
return content
except urllib2.HTTPError, e:
print >> sys.stderr, e.reason
#json格式转化成plain格式
def jsonToPlain(contentJson):
#只保留分词和标注输出
#posResult=[word for word in sentence[0] for sentence in content for ]
resultList=[]
for paragraph in contentJson:
sentenceList=[]
if paragraph[0][-1][-1]== "wp":
sent=paragraph[0][:-1]
else:
sent=paragraph[0]
length=len(sent)
wordIndex=0
while wordIndex<length:
if u"转" in sent[wordIndex][1]:
sent[wordIndex][2]="v"
if wordIndex+2<len(sent) and u"q" == sent[wordIndex][2] and u"每" == sent[wordIndex+1][1] and u"q" == sent[wordIndex+2][2]:
merge=[wordIndex,sent[wordIndex][1]+sent[wordIndex+1][1]+sent[wordIndex+2][1],sent[wordIndex][2]]
sent=sent[:wordIndex]+[merge]+sent[wordIndex+3:]
length=len(sent)
sentenceList.append(sent[wordIndex][1]+'\t'+sent[wordIndex][2])
wordIndex+=1
sentence='\n'.join(sentenceList).encode('utf-8')
sentence1=re.sub( r'(nh|ni|nl|ns|nz)' , 'n' , sentence )
resultList.append(sentence1+'\n'*2)
return resultList
#分词调用程序
def distinguishWord(dataa):
f0=open('allTest','a')
f0.write(dataa+"\n")
f0.close()
output=divPos(dataa)
#将分词标注结果输出到txt文档中
f1=open('distinguishWordJson','w')
f1.write(output)
f1.close()
contentJson=json.loads(output)
resultList=jsonToPlain(contentJson)
f2=open('distinguishWordPlain','w')
f2.writelines(resultList)
f2.close()
return True
def save():
f6=open('semanticOutput','r')
output1=f6.read()
f6.close()
f7=open('splitOutput','r')
output2=f7.read()
f7.close()
f8=open('savesemanticOutput','a')
f8.write(output1)
f8.close()
f9=open('savesplitOutput','a')
f9.write(output2)
f9.close()
def train(template,trainTxt,model):
#returnCode = call('crf_learn.exe template crfTrain1.txt model1')
call('crf_learn.exe '+template+' '+trainTxt+' '+ model)
def test(model,testTxt,testOutput):
"""
-v1:输出标签的概率值
-n :输出几层不同概率只的选项 用处不大
-t:输出model的txt版本
-f:是一阀值,只有某词的频率大于该值 才有用
-c:跟拟合度有关的一个参数
-h: 可以随时打开帮助看看
"""
#Popen('crf_test.exe -m model1 crfTest1.txt >output1.txt', shell = True, stdout = PIPE).stdout
Popen('crf_test.exe -m '+model+' '+ testTxt +'>'+testOutput, shell = True, stdout = PIPE).communicate()
#Popen('crf_test.exe -m '+model+' '+ testTxt +'>'+testOutput, shell = True, stdout = PIPE).communicate()
def evaluate(testOutput,testResult):
evalue=Popen('conlleval.pl -d "\t" -r < '+testOutput+' > '+testResult,
shell = True, stdout = PIPE).stdout
#https://argcv.com/articles/2104.c#respond 参考资料,参数使用很全
#print evalue.read()
def semanticTrain():#http://www.hankcs.com/nlp/the-crf-model-format-description.html 不错的资料
train('semanticTemplate','semanticTrain','semanticModel')
def semanticTest():
test('semanticModel','distinguishWordPlain','semanticOutput')
#test('model','crfTest1.txt','output1.txt')
def semanticEvaluate():
evaluate('semanticEvaluate','semanticEvaluateOutput')
#将普通的字符串格式的输入转化成json列表形式
def plainToJson(semanticOutput):
fCrfTrain=open(semanticOutput,'r')
article=fCrfTrain.read()
if article[:3] == codecs.BOM_UTF8:
article = article[3:]
fCrfTrain.close()
sentenceLabel=re.compile(r"(.*?)\n\n",re.S)
sentenceList=re.findall(sentenceLabel,article)
#print sentenceList[0:10]
wordListInSentenceList=[]
for sentence in sentenceList:
wordList=sentence.split('\n')
wordPosYuyiList=[]
for word in wordList:
wordPosYuyi=word.split('\t')
wordPosYuyiList.append(wordPosYuyi)
wordListInSentenceList.append(wordPosYuyiList)
return wordListInSentenceList
#普通的字符串格式的输入(带有边缘概率需要处理)转化成json列表形式
def proPlainToJson(semanticOutput):
fCrfTrain=open(semanticOutput,'r')
article=fCrfTrain.read()
if article[:3] == codecs.BOM_UTF8:
article = article[3:]
fCrfTrain.close()
sentenceLabel=re.compile(r"(.*?)\n\n",re.S)
sentenceList=re.findall(sentenceLabel,article)
#print sentenceList[0:10]
wordListInSentenceList=[]
for sentence in sentenceList:
wordList=sentence.split('\n')
wordPosYuyiProList=[]
for word in wordList[1:]:
wordPosYuyiPro=word.split('\t')
newWordPosYuyiPro=wordPosYuyiPro[:-1]+[wordPosYuyiPro[-1].split('/')[0]]
wordPosYuyiProList.append(newWordPosYuyiPro)
wordListInSentenceList.append([float(wordList[0][2:])]+wordPosYuyiProList)
return wordListInSentenceList
#将名词和方向词合并 输出json格式
def n_f(wordListInSentenceList):
for s in xrange(len(wordListInSentenceList)):
wordPosition=0
while wordPosition<len(wordListInSentenceList[s]):
nPosition=[]
while wordPosition<len(wordListInSentenceList[s]) and '-Place' in wordListInSentenceList[s][wordPosition][-1]:
nPosition.append(wordPosition)
wordPosition+=1
#print nPosition
if len(nPosition)!=0:
#
words=[]
for p in nPosition:
words.append(wordListInSentenceList[s][p][0])
word=''.join(words)
nTotal=[word,'n','Place']
wordListInSentenceList[s]=wordListInSentenceList[s][:nPosition[0]]+[nTotal]+wordListInSentenceList[s][nPosition[-1]+1:]
#print wordListInSentenceList[s]
wordPosition=nPosition[0]
aPosition=[]
while wordPosition<len(wordListInSentenceList[s]) and '-AT' in wordListInSentenceList[s][wordPosition][-1]:
aPosition.append(wordPosition)
wordPosition+=1
#print nPosition
if len(aPosition)!=0:
#
words=[]
for a in aPosition:
words.append(wordListInSentenceList[s][a][0])
word=''.join(words)
aTotal=[word,'a','AT']
wordListInSentenceList[s]=wordListInSentenceList[s][:aPosition[0]]+[aTotal]+wordListInSentenceList[s][aPosition[-1]+1:]
#print wordListInSentenceList[s]
wordPosition=aPosition[0]
naPosition=[]
while wordPosition<len(wordListInSentenceList[s]) and '-NAT' in wordListInSentenceList[s][wordPosition][-1]:
naPosition.append(wordPosition)
wordPosition+=1
#print nPosition
if len(naPosition)!=0:
#
words=[]
for na in naPosition:
words.append(wordListInSentenceList[s][na][0])
word=''.join(words)
naTotal=[word,'a','NAT']
wordListInSentenceList[s]=wordListInSentenceList[s][:naPosition[0]]+[naTotal]+wordListInSentenceList[s][naPosition[-1]+1:]
#print wordListInSentenceList[s]
wordPosition=naPosition[0]
vPosition=[]
while wordPosition<len(wordListInSentenceList[s]) and '-DurativeVerb' in wordListInSentenceList[s][wordPosition][-1]:
vPosition.append(wordPosition)
wordPosition+=1
#print nPosition
if len(vPosition)!=0:
#
words=[]
for v in vPosition:
words.append(wordListInSentenceList[s][v][0])
word=''.join(words)
vTotal=[word,'v','DurativeVerb']
wordListInSentenceList[s]=wordListInSentenceList[s][:vPosition[0]]+[vTotal]+wordListInSentenceList[s][vPosition[-1]+1:]
#print wordListInSentenceList[s]
wordPosition=vPosition[0]
fPosition=[]
while wordPosition<len(wordListInSentenceList[s]) and '-Direction' in wordListInSentenceList[s][wordPosition][-1]:
fPosition.append(wordPosition)
wordPosition+=1
#print nPosition
if len(fPosition)!=0:
#
words=[]
for p in fPosition:
words.append(wordListInSentenceList[s][p][0])
word=''.join(words)
fTotal=[word,'nd','Direction']
wordListInSentenceList[s]=wordListInSentenceList[s][:fPosition[0]]+[fTotal]+wordListInSentenceList[s][fPosition[-1]+1:]
#print wordListInSentenceList[s]
wordPosition=fPosition[0]
#print wordPosition
wordPosition+=1
return
#将合并后的json格式转化成plain格式输出
def jsonToPlain1(contentJson):
#只保留分词和标注输出
#posResult=[word for word in sentence[0] for sentence in content for ]
resultList=[]
for paragraph in contentJson:
sentenceList=[]
for word in paragraph:
sentenceList.append(word[0]+'\t'+word[1]+'\t'+word[2])
sentence='\n'.join(sentenceList)
resultList.append(sentence+'\n'*2)
return resultList
def merge():
wordListInSentenceList=plainToJson('semanticOutput')#crfTrain.txt 包含词 词性 语义 的plain文件
n_f(wordListInSentenceList)
data=json.dumps(wordListInSentenceList)
f3=open('n_f_outputTestJson','w') #jsonData.txt对应crfTrain的合并结果
f3.writelines(data)
f3.close()
result=jsonToPlain1(wordListInSentenceList)
f4=open('n_f_outputTest','w') #n_f.data对应jsonData.txt plain格式
f4.writelines(result)
f4.close()
#所有句子的特征列表
def sentProperty(listData):
property_all=[]
for sentence in listData:
#句子中的语义列表
yuyiList=[ word[-1] for word in sentence]
yuyiStr=' '+' '.join(yuyiList)
#除0外的词占句子的比例
property1=1-Counter(yuyiList)['Other']/len(yuyiList)
#是否有动词
property2=1 if re.search(r'DurativeVerb|MomentaryVerb',yuyiStr) else 0
#是否含有方向词
property3=1 if 'Direction' in yuyiList else 0
#是否含有地名词
property4=1 if 'Place' in yuyiList else 0
#是否含有数词加量词的集合
property5=1 if re.search(r'Num DistanceUnit',yuyiStr) else 0
#方向词加动词的个数
property6=len(re.findall(r'Direction (DurativeVerb|MomentaryVerb)',yuyiStr))
#动词加地名词的个数
property7=len(re.findall(r'(DurativeVerb|MomentaryVerb) Place',yuyiStr))
#地名词加动词
property8=len(re.findall(r'Place (DurativeVerb|MomentaryVerb)',yuyiStr))
#动词的个数
property9=len(re.findall(r'(DurativeVerb|MomentaryVerb)',yuyiStr))
#关键词类别个数占句子长度的比例
property10=len(set(yuyiList))/len(yuyiList)
property_all.append([property1,property2,property3,property4,property5,property6,property7,property8,property9,property10])
return property_all
#给每个句子贴上1或-1的标签,自己标出来
def label_initialize():
set_label_1=[1]*60
label_0=[7,53,54,55,56,57,58,59]
for label in label_0:
set_label_1[label]=-1
return set_label_1
#将句子的特征和1、-1标签弄成svm的输入格式
def svm_input(property_all,label_tag):
svm_format=[]
for index in xrange(len(property_all)):
column=str(label_tag[index])
for property_index in xrange(len(property_all[index])):
column+='\t'+str(property_index+1)+':'+str(property_all[index][property_index])
svm_format.append(column+'\n')
return svm_format
def svmProperty(n_f_input):
#从合并后的带有语义标注的预料中 提取出svm的特征向量 以及自己标注的标签 存放在一个文本文档中
f5=open(n_f_input,'r') #jsonData
jsonData1=f5.read()
f5.close()
listData=json.loads(jsonData1)
sentPropertyAll=sentProperty(listData)
#sentLabel=label_initialize()
#sentLabel=[0]*len(sentPropertyAll)
return sentPropertyAll
#训练模型
def svmTrain():
sentPropertyAll=svmProperty('jsonData')
sentLabel=label_initialize()
m=svm_train(sentLabel,sentPropertyAll,'-c 4')
svm_save_model('svmModel', m)
#进行测试
def svmTest():
#y,x=svm_read_problem('svm_input_test.txt')
x=svmProperty('n_f_outputTestJson')
y=[0]*len(x)
m = svm_load_model('svmModel')
p_label,p_acc,p_val=svm_predict(y,x,m)
return p_label
def splitTrain():
train('splitTemplate','splitTrain','splitModel')
def splitTest():
test('splitModel','n_f_outputTest','splitOutput')
def startFinishTrain():
train('startFinishTemplate','startFinishTrain','startFinishmodel')
def startFinishTest():
test('startFinishmodel','splitOutput','startFinishOutput')
def isanum(str):
try:
float(str)
return True
except ValueError:
return False
digitDict ={u'零':0, u'一':1, u'二':2, u'三':3, u'四':4, u'五':5, u'六':6, u'七':7, u'八':8, u'九':9, u'十':10, u'百':100, u'千':1000,
u'万':10000,u'亿':100000000,u'0':0, u'1':1, u'2':2, u'3':3, u'4':4, u'5':5, u'6':6, u'7':7, u'8':8, u'9':9}
def getResultForDigit(a, encoding="utf-8"):
if isinstance(a, str):
a = a.decode(encoding)
count = 0
result = 0
tmp = 0
Billion = 0
while count < len(a):
tmpChr = a[count]
tmpNum = digitDict.get(tmpChr, None)
#如果等于1亿
if tmpNum == 100000000:
result = result + tmp
result = result * tmpNum
#获得亿以上的数量,将其保存在中间变量Billion中并清空result
Billion = Billion * 100000000 + result
result = 0
tmp = 0
#如果等于1万
elif tmpNum == 10000:
result = result + tmp
result = result * tmpNum
tmp = 0
#如果等于十或者百,千
elif tmpNum >= 10:
if tmp == 0:
tmp = 1
result = result + tmpNum * tmp
tmp = 0
#如果是个位数
elif tmpNum is not None:
tmp = tmp * 10 + tmpNum
count += 1
result = result + tmp
result = result + Billion
return result
def extract(output):
wordList=plainToJson(output)[0]
splitWordList=[]
beginLabel=[]
for i in xrange(len(wordList)):
if 'B' in wordList[i][3]:
beginLabel.append(i)
beginLabel.append(len(wordList))
for i in xrange(len(beginLabel)-1):
splitWordList.append(wordList[beginLabel[i]:beginLabel[i+1]])
#print splitWordList
structureList=[]
for unit in splitWordList:
structure=['__']*7
for i in xrange(len(unit)):
#if unit[i][-1]=='s':
#structure[0]=unit[i][0]
#if unit[i][-1]=='f':
# structure[1]=unit[i][0]
if 'Place' in unit[i][2]:
structure[1]=unit[i][0]
if 'DurativeVerb' in unit[i][2]:
structure[2]=unit[i][0]
elif unit[i][2]=='MomentaryVerb':
structure[2]=unit[i][0]
if unit[i][2]=='Direction':
structure[3]=unit[i][0]
if unit[i][2]=='Num' and i<len(unit)-1 and unit[i+1][2]=='DistanceUnit':
if isanum(unit[i][0]):
digit=unit[i][0]
else:
digit=str(getResultForDigit(unit[i][0]))
if digit=='0'or unit[i+1][0]=="点" or unit[i+1][0]=="些":
digit=unit[i][0]
if unit[i+1][0]=='厘米' or unit[i+1][0]=='公分':
structure[4]=str(float(digit)/100)
elif unit[i+1][0]=='分米':
structure[4]=str(float(digit)/10)
elif unit[i+1][0]=='米':
structure[4]=digit
else:
structure[4]=digit+unit[i+1][0]
if unit[i][2]=='Distance':
structure[4]=unit[i][0]
if unit[i][2]=='Speed':
structure[5]=unit[i][0]
if unit[i][2]=='Num' and i<len(unit)-1 and unit[i+1][2]=='SpeedUnit':
if isanum(unit[i][0]):
digit=unit[i][0]
else:
digit=str(getResultForDigit(unit[i][0]))
if digit=='0'or unit[i+1][0]=="点" or unit[i+1][0]=="些":
digit=unit[i][0]
if unit[i+1][0]=='厘米每秒' or unit[i+1][0]=='公分每秒':
structure[5]=str(float(digit)/100)
elif unit[i+1][0]=='分米每秒':
structure[5]=str(float(digit)/10)
elif unit[i+1][0]=='米每秒':
structure[5]=digit
elif unit[i+1][0]=='迈' or unit[i+1][0]=='码':
structure[5]=str(float(digit)*0.278)
else:
structure[5]=digit+unit[i+1][0]
if unit[i][2]=='AT':
structure[6]=unit[i][2]
if unit[i][2]=='NAT':
structure[6]=unit[i][2]
structureList.append(structure)
return structureList
def fillElement(structureList):#提取结构化指令之后的要素规范化
for i in range(len(structureList)):
if structureList[i][0]=='__':
if i != 0 and structureList[i-1][1] !='__':
structureList[i][0]=structureList[i-1][1]
else:
structureList[i][0]='当下位置'
#if "停" in structureList[i][2] or "头" in structureList[i][2]:
#continue
if structureList[i][4]=='__':
if "快" in structureList[i][5]:
structureList[i][5]="加速"
continue
if "慢" in structureList[i][5]:
structureList[i][5]="减速"
continue
if i !=len(structureList)-1 and structureList[i+1][0] !='__':
structureList[i][1]=structureList[i+1][0]
#if structureList[i][2]=='__':
#if i !=0:
#structureList[i][2]=structureList[i-1][2]
#else:
#structureList[i][2]='走'
#if "去" in structureList[i][2] or "到" in structureList[i][2]:
#structureList[i][2]='前'
if structureList[i][2]!='__' or structureList[i][2]!='__':
if "后" in structureList[i][3] or "后" in structureList[i][2]:
structureList[i][2]='后'
if "前" in structureList[i][3] or "前" in structureList[i][2]:
structureList[i][2]='前'
if "左" in structureList[i][3] or "左" in structureList[i][2]:
structureList[i][2]='左'
if "右" in structureList[i][3] or "右" in structureList[i][2]:
structureList[i][2]='右'
if structureList[i][4]!='__':
structureList[i][4]=structureList[i][4]+'米'
if "停" in structureList[i][2]:
structureList[i][2]="停"
if "退" in structureList[i][2]:
structureList[i][2]="退"
if ("拧" in structureList[i][2] or "开" in structureList[i][2]) and structureList[i][1]=='__':
structureList[i][1]=structureList[i][2].replace("开","")
print structureList[i][2]
structureList[i][2]="开"
if "回" in structureList[i][2]:
structureList[i][2]="回"
if '回' in structureList[i][2] and structureList[i][1]=='__':
structureList[i][1]="起点"
if "头" in structureList[i][2] or "转身" in structureList[i][2]:
structureList[i][2]="掉头"
if "到" in structureList[i][2] or "去" in structureList[i][2] or "走" in structureList[i][2] or "找" in structureList[i][2]:
structureList[i][2]="到"
if "推" in structureList[i][2]:
structureList[i][2]="推"
if "搬" in structureList[i][2]:
structureList[i][2]="搬"
if "拿" in structureList[i][2]:
structureList[i][2]="拿"
if "抬" in structureList[i][2]:
structureList[i][2]="抬"
if "看" in structureList[i][2] or "拍" in structureList[i][2]:
structureList[i][2]="拍照"
if "录" in structureList[i][2]:
structureList[i][2]="录象"
if "采集" in structureList[i][2]:
structureList[i][2]="采集"
if structureList[i][2]=='__' and structureList[i][1]!='__':
structureList[i][2]='到'
#if structureList[i][3]=='__' and structureList[i][1]=='__':
#动作中含有方向
#if "后" in structureList[i][3]:
#structureList[i][3]='后'
#structureList[i][2]='后'
#elif "左" in structureList[i][3]:
#structureList[i][3]='左'
#structureList[i][2]='左'
#elif "右" in structureList[i][3]:
#structureList[i][3]='右'
#structureList[i][2]='右'
#else:
#structureList[i][3]='前'
#structureList[i][2]='前'
#动作中含有速度
if structureList[i][0]=='__':
if "快" in structureList[i][2] or "急" in structureList[i][2]:
structureList[i][5]='加速'
elif "慢" in structureList[i][2] or "缓" in structureList[i][2]:
structureList[i][5]='减速'
#if structureList[i][2]!='__':
#structureList[i][2]=='走'
#动作规范
#if "停" in structureList[i][2]:
#structureList[i][2]="停"
#elif "头" in structureList[i][2] or "转身" in structureList[i][2]:
#structureList[i][2]="掉头"
#elif "转" in structureList[i][2] or "拐" in structureList[i][2]:
#structureList[i][2]="转"
#elif "退" in structureList[i][2]:
#structureList[i][2]="退"
#else:
#structureList[i][2]="走"
#地名词中C400
#begin=re.findall(r"([A-Za-z]).*(\d{3})",structureList[i][0])
#if begin != []:
#structureList[i][0]=begin[0][0]+begin[0][1]
#finish=re.findall(r"([A-Za-z]).*(\d{3})",structureList[i][1])
#if finish != []:
#structureList[i][1]=finish[0][0]+finish[0][1]
#模糊速度规范
#if re.match( r'(快|急|迅速|马上|立即|)' ,structureList[i][5] ) != None:
#structureList[i][5]='快速'
#elif "慢" in structureList[i][5] or "缓" in structureList[i][5]:
#structureList[i][5]='慢速'
#模糊距离规范
if "点" in structureList[i][4] or "些" in structureList[i][4]:
structureList[i][4]='1米'
if "步" in structureList[i][4]:
if filter(str.isdigit,structureList[i][4])!='':
structureList[i][4]=filter(str.isdigit,structureList[i][4]) + '米'
else:
structureList[i][4]='1米'
#提取MDP需要的结构化指令,以及MDP各个状态元素的值
def stateextract(structureList,storelist,statelist):
if structureList[0][1]!='__':
storelist.append(structureList[0][1])
statelist.append(1)
else:
storelist.append(structureList[0][1])
statelist.append(0)
if structureList[0][2]!='__':
storelist.append(structureList[0][2])
statelist.append(1)
else:
storelist.append(structureList[0][2])
statelist.append(0)
if structureList[0][4]!='__':
storelist.append(structureList[0][4])
statelist.append(1)
else:
storelist.append(structureList[0][4])
statelist.append(0)
if structureList[0][5]!='__':
storelist.append(structureList[0][5])
statelist.append(1)
else:
storelist.append(structureList[0][5])
statelist.append(0)
if structureList[0][6]=='__':
storelist.append(structureList[0][6])
statelist.append(0)
elif structureList[0][6]=='AT':
storelist.append(structureList[0][6])
statelist.append(1)
elif structureList[0][6]=='NAT':
storelist.append(structureList[0][6])
statelist.append(2)
def mdptrain(policy):
#训练MDP模型P为转移概率矩阵,R为回报矩阵,一共81一个状态以此为速度、距离、动作、终点组成。
P = ones( (10,144,144) )/144
#R = array( [ [100,0,0,0,0], [100,0,0,0,-100], [100,0,0,0,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100], [0,0,100,0,0], [0,0,0,0,200], [0,100,0,100,-100] ] )
R = array( [[50,-10,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,50,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,50,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,50,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,50,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,-10,200], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,50,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,-10,-10,50,-10], [-10,-10,-10,-10,-10,-10,50,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,50,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,50,-10,-10,-10], [-10,50,-10,-10,-10,-10,-10,-10,-10,-10], [-10,-10,-10,-10,-10,-10,50,-10,-10,-10]] )
vi = mdptoolbox.mdp.RelativeValueIteration(P, R)
#vi = mdptoolbox.mdp.ValueIteration(P, R, 0.95)
vi.verbose
vi.run()
policy+=list(vi.policy)
'''def policycs(policy,statelist,policy_c,storelist):
state_c=(statelist[0]*2**3+statelist[1]*2**2+statelist[2]*2**1+statelist[3])*3+statelist[4]
print policy[state_c]
if policy[state_c]==0:
policy_c[0]='提示'
elif policy[state_c]==1:
policy_c[0]='再提示'
elif policy[state_c]==2:
policy_c[0]='确认'
elif policy[state_c]==3:
policy_c[0]='再确认'
elif policy[state_c]==4:
policy_c[0]='完成'
for i in range(len(storelist)):
if storelist[i]!='__':
policy_c[0]+=storelist[i]
print state_c
print policy_c[0]'''
def store(storelist,laststore,statelist):
#非否定情况下保存下来的历史信息与当前信息的融合
if laststore[0]!='__' and storelist[0]=='__':
storelist[0]=laststore[0]
statelist[0]=1
if laststore[1]!='__' and storelist[1]=='__':
storelist[1]=laststore[1]
statelist[1]=1
if laststore[2]!='__' and storelist[2]=='__':
storelist[2]=laststore[2]
statelist[2]=1
if laststore[3]!='__' and storelist[3]=='__':
storelist[3]=laststore[3]
statelist[3]=1
#elif laststore[4]!='__' and storelist[4]=='__':
#storelist[4]=laststore[4]
#if storelist[4]]
def natstore(storelist,laststore,statelist):
#否定情况下保存下来的历史信息与当前信息的融合
if laststore[0]!='__' and storelist[0]=='__' and (storelist[1]!='__' or storelist[2]!='__' or storelist[3]!='__') :
storelist[0]=laststore[0]
statelist[0]=1
if laststore[1]!='__' and storelist[1]=='__' and (storelist[0]!='__' or storelist[2]!='__' or storelist[3]!='__') :
storelist[1]=laststore[1]
statelist[1]=1
if laststore[2]!='__' and storelist[2]=='__' and (storelist[0]!='__' or storelist[1]!='__' or storelist[3]!='__') :
storelist[2]=laststore[2]
statelist[2]=1
if laststore[3]!='__' and storelist[3]=='__' and (storelist[0]!='__' or storelist[1]!='__' or storelist[2]!='__') :
storelist[3]=laststore[3]
statelist[3]=1
def changes(statelist):
if statelist[4]==1:
for j in range(4):
if statelist[j]!=0:
statelist[j]=2
def policycs(policy,statelist,policy_c,storelist):
state_c=statelist[0]*3*3*4+statelist[1]*3*3+statelist[2]*3+statelist[3]
if policy[state_c]==0:
policy_c[0]='提示'
elif policy[state_c]==1:
policy_c[0]='总确认'
for i in range(4):
if storelist[i]!='__':
policy_c[0]+=storelist[i]
elif policy[state_c]==2:
policy_c[0]='速度确认'
policy_c[0]+=storelist[3]
elif policy[state_c]==3:
policy_c[0]='距离确认'
policy_c[0]+=storelist[2]
elif policy[state_c]==4:
policy_c[0]='动作确认'
policy_c[0]+=storelist[1]
elif policy[state_c]==5:
policy_c[0]='目的确认'
policy_c[0]+=storelist[0]
elif policy[state_c]==6:
policy_c[0]='未收录项目总确认'
for i in range(4):
if storelist[i]!='__':
policy_c[0]+=storelist[i]
elif policy[state_c]==7:
policy_c[0]='未收录动作确认'
policy_c[0]+=storelist[1]
elif policy[state_c]==8:
policy_c[0]='未收录目的确认'
policy_c[0]+=storelist[0]
elif policy[state_c]==9:
policy_c[0]='完成'
for i in range(4):
if storelist[i]!='__':
policy_c[0]+=storelist[i]
def loc(structureList,statelist,storelist):
f6=open('location')
Location=f6.readlines()
structureList[0][1]=structureList[0][1].decode('utf-8')
sum1=[0]*len(Location)
if structureList[0][1]!=u'__':
if statelist[0]==1:
for i in range(len(Location)):
Location[i]=Location[i].replace('\n',"").decode('utf-8')
if Location[i]==structureList[0][1]:
statelist[0]=1
break
else:
statelist[0]=3
if statelist[0]==3:
for j in range(len(Location)):
for k in range(min(len(Location[j]),len(structureList[0][1]))):
if Location[j][k]==structureList[0][1][k]:
sum1[j]=sum1[j]+1
if max(sum1)!=0:
index1=sum1.index(max(sum1))
storelist[0]=Location[index1].encode('utf-8')
statelist[0]=1
else:
storelist[0]="无匹配"
statelist[0]=3
def act(structureList,statelist,storelist):
f7=open('action')
Action=f7.readlines()
structureList[0][2]=structureList[0][2].decode('utf-8')
sum2=[0]*len(Action)
if structureList[0][2]!=u'__':
if statelist[1]==1:
for i in range(len(Action)):
Action[i]=Action[i].replace('\n',"").decode('utf-8')
if Action[i]==structureList[0][2]:
statelist[1]=1
break
else:
statelist[1]=3
if statelist[1]==3:
for j in range(len(Action)):
for k in range(min(len(Action[j]),len(structureList[0][2]))):
if Action[j][k]==structureList[0][2][k]:
sum2[j]=sum2[j]+1
if max(sum2)!=0:
index2=sum2.index(max(sum2))
storelist[1]=Action[index2].encode('utf-8')
statelist[1]=1
else:
storelist[1]="无匹配"
statelist[1]=3
def testAll(dataa,step,cmNum):
laststore=['__']*5
policy=[]
mdptrain(policy)
liststructure=[]
structureList=[]
allinquiry=""
prompt=""
respond=[]
allrespond=""
step=int(sys.argv[2])
cmNum=int(sys.argv[3])
#导航指令输入
if step==0:
'''CRF'''
distinguishWord(dataa)
semanticTest()
merge()
splitTest()
startFinishTest()
save()
structureList=extract('startFinishOutput')
fillElement(structureList)
liststructure=structureList
f3=open('liststructure','w')
for i in range(len(liststructure)):
for j in range(7):
liststructure[i][j]=liststructure[i][j]+'\n'
f3.writelines(liststructure[i][j])
f3.close()
f5=open('respond','w')
f5.writelines('')
f5.close
print len(liststructure)
elif step==1:
'''MDP'''
storelist=[]
statelist=[]
policy_c=['']
cmNum=int(sys.argv[3])
f4=open('liststructure')
liststructure=f4.readlines()
structureList.append(liststructure[cmNum*7:cmNum*7+7])
f4.close()
for i in range(len(structureList[0])):
structureList[0][i]=structureList[0][i].replace('\n','')
stateextract(structureList,storelist,statelist)
if storelist[4]!='NAT':
store(storelist,laststore,statelist)
#print statelist
else:
natstore(storelist,laststore,statelist)
loc(structureList,statelist,storelist)
act(structureList,statelist,storelist)
changes(statelist)
laststore=storelist
for i in range(len(storelist)):
storelist[i]=storelist[i].replace('\n','')
policycs(policy,statelist,policy_c,storelist)
k = aiml.Kernel()
k.learn("cn-startup.xml")
k.respond("load aiml cn")
if '确认' in policy_c[0]:
if cmNum==0:
print k.respond(policy_c[0])
else:
f5=open('respond','r')
respond=f5.readlines()
f5.close()
for j in range(len(respond)):
respond[j]=respond[j].replace('好的,','').replace('我','',1).replace('这','').replace('就','').replace('马上','').replace('\n','')
if j!=0 and respond[j]!='':
respond[j]="然后" + respond[j]
allinquiry=allinquiry + respond[j]
print "请问是要让我先" + allinquiry +"然后"+ k.respond(policy_c[0]).replace('请问','').replace('是','').replace('要','').replace('让','').replace('我','',1)
if "无匹配" in policy_c[0]:
for i in range(len(laststore)):
laststore[i]='__'
storelist[i]='__'
if '提示' in policy_c[0]:
if cmNum==0:
print k.respond(policy_c[0])
else:
f5=open('respond','r')
respond=f5.readlines()
f5.close()
for j in range(len(respond)):
respond[j]=respond[j].replace('好的,','').replace('我','',1).replace('这','').replace('就','').replace('马上','').replace('\n','')
if j!=0 and respond[j]!='':
respond[j]="然后" + respond[j]
prompt=prompt + respond[j]
print "你好,请问我先" + prompt + "然后怎么做?"
for i in range(len(laststore)):
laststore[i]='__'
storelist[i]='__'
f7=open('laststore','w')
for i in range(len(laststore)):
laststore[i]=laststore[i]+'\n'
f7.writelines(laststore[i])
f7.close()
f3=open('laststore','w')
for i in range(len(laststore)):
laststore[i]=laststore[i]+'\n'
f3.writelines(laststore[i])
f3.close()
elif step==2:
'''CRF'''
distinguishWord(dataa)
semanticTest()
merge()
splitTest()
startFinishTest()
save()
structureList=extract('startFinishOutput')
fillElement(structureList)
'''MDP'''
storelist=[]
statelist=[]
policy_c=['']
f5=open('laststore','r')
laststore=f5.readlines()
f5.close()
for i in range(len(laststore)):
laststore[i]=laststore[i].replace('\n','')
stateextract(structureList,storelist,statelist)
if storelist[4]!='NAT':
store(storelist,laststore,statelist)
#print statelist
else:
natstore(storelist,laststore,statelist)
loc(structureList,statelist,storelist)
act(structureList,statelist,storelist)
changes(statelist)
laststore=storelist
for i in range(len(storelist)):
storelist[i]=storelist[i].replace('\n','')
policycs(policy,statelist,policy_c,storelist)
k = aiml.Kernel()
k.learn("cn-startup.xml")
k.respond("load aiml cn")
if '确认' in policy_c[0]:
if cmNum==0:
print "_"
print k.respond(policy_c[0])
else:
print "_"
f5=open('respond','r')
respond=f5.readlines()
f5.close()
for j in range(len(respond)):
respond[j]=respond[j].replace('好的,','').replace('我','',1).replace('这','').replace('就','').replace('马上','').replace('\n','')
if j!=0 and respond[j]!='':
respond[j]="然后" + respond[j]
allinquiry=allinquiry + respond[j]
print "请问是要让我先" + allinquiry +"然后"+ k.respond(policy_c[0]).replace('请问','').replace('是','').replace('要','').replace('让','').replace('我','',1)
if "无匹配" in policy_c[0]:
for i in range(len(laststore)):
laststore[i]='__'
storelist[i]='__'
f7=open('laststore','w')
for i in range(len(laststore)):
laststore[i]=laststore[i]+'\n'
f7.writelines(laststore[i])
f7.close()
if '完成' in policy_c[0]:
if "两" in storelist[2]:
storelist[2]=storelist[2].replace('两','2')
storelist[0]=str.lower(storelist[0])
print storelist[0].replace('\n',''),storelist[1].replace('\n',''),storelist[2].replace('\n',''),storelist[3].replace('\n',''),storelist[4].replace('\n','')
laststore=storelist
f7=open('laststore','w')
for i in range(len(laststore)):
laststore[i]=laststore[i]+'\n'
f7.writelines(laststore[i])
f7.close()
laststore=['__']*5
f5=open('respond','r')
respond=f5.readlines()
f5.close()
respond.append(k.respond(policy_c[0]))
f0=open('respond','w')
for i in range(len(respond)):
respond[i]=respond[i]+'\n'
f0.writelines(respond[i])
f0.close()
else:
if '提示' in policy_c[0]:
print "_"
if cmNum==0:
print k.respond(policy_c[0])
else:
f5=open('respond','r')
respond=f5.readlines()
f5.close()
for j in range(len(respond)):
respond[j]=respond[j].replace('好的,','').replace('我','',1).replace('这','').replace('就','').replace('马上','').replace('\n','')
if j!=0 and respond[j]!='':
respond[j]="然后" + respond[j]
prompt=prompt + respond[j]
print "你好,请问我先" + prompt + "然后怎么做?"
for i in range(len(laststore)):
laststore[i]='__'
storelist[i]='__'
f7=open('laststore','w')
for i in range(len(laststore)):
laststore[i]=laststore[i]+'\n'
f7.writelines(laststore[i])
f7.close()
elif step==3:
f5=open('respond','r')
respond=f5.readlines()
f5.close()
if len(respond)!=1:
for i in range(len(respond)):
respond[i]=respond[i].replace('好的,','').replace('我','',1).replace('这','').replace('就','').replace('马上','').replace('\n','')
if i!=0 and respond[i]!='':
respond[i]="然后" + respond[i]
allrespond=allrespond + respond[i]
print "好的,我先" + allrespond
else:
print respond[0]
elif step==4:
print "网络好像不给力,请稍后再试!"
#print "\n"
if __name__=="__main__":
testAll(sys.argv[1].decode('gbk').encode('utf-8'),sys.argv[2],sys.argv[3])
| 41.571681
| 6,080
| 0.51869
|
a1c4c6b67046bc831a95cb094736b35877f4d8cd
| 11,777
|
py
|
Python
|
fedml_api/standalone/fedavg_affinity/fedavg_api.py
|
forestnoobie/FedML
|
266c10ac96a0ab35aff73dc12d8afe84da625c75
|
[
"Apache-2.0"
] | null | null | null |
fedml_api/standalone/fedavg_affinity/fedavg_api.py
|
forestnoobie/FedML
|
266c10ac96a0ab35aff73dc12d8afe84da625c75
|
[
"Apache-2.0"
] | null | null | null |
fedml_api/standalone/fedavg_affinity/fedavg_api.py
|
forestnoobie/FedML
|
266c10ac96a0ab35aff73dc12d8afe84da625c75
|
[
"Apache-2.0"
] | null | null | null |
import copy
import logging
import random
import numpy as np
import torch
import wandb
from fedml_api.standalone.fedavg_affinity.client import Client
class FedAvgAPI(object):
def __init__(self, dataset, device, args, model_trainer):
self.device = device
self.args = args
[train_data_num, test_data_num, train_data_global, test_data_global,
train_data_local_num_dict, train_data_local_dict, test_data_local_dict, class_num] = dataset
self.train_global = train_data_global
self.test_global = test_data_global
self.val_global = None
self.train_data_num_in_total = train_data_num
self.test_data_num_in_total = test_data_num
self.client_list = []
self.train_data_local_num_dict = train_data_local_num_dict
self.train_data_local_dict = train_data_local_dict
self.test_data_local_dict = test_data_local_dict
self.model_trainer = model_trainer
self._setup_clients(train_data_local_num_dict, train_data_local_dict, test_data_local_dict, model_trainer)
self._setup_server(train_data_num, train_data_global, test_data_global, model_trainer)
def _setup_clients(self, train_data_local_num_dict, train_data_local_dict, test_data_local_dict, model_trainer):
logging.info("############setup_clients (START)#############")
for client_idx in range(self.args.client_num_per_round):
c = Client(client_idx, train_data_local_dict[client_idx], test_data_local_dict[client_idx],
train_data_local_num_dict[client_idx], self.args, self.device, model_trainer)
self.client_list.append(c)
logging.info("############setup_clients (END)#############")
def _setup_server(self, train_data_num, train_data_global, test_data_global, model_trainer):
logging.info("############setup_server (START)#############")
# Client class but act as Server
self.server = Client(-1, train_data_global, test_data_global,
train_data_num, self.args, self.device, model_trainer)
logging.info("############setup_server (END)#############")
def train(self):
w_global = self.model_trainer.get_model_params()
for round_idx in range(self.args.comm_round):
logging.info("################Communication round : {}".format(round_idx))
w_locals = []
"""
for scalability: following the original FedAvg algorithm, we uniformly sample a fraction of clients in each round.
Instead of changing the 'Client' instances, our implementation keeps the 'Client' instances and then updates their local dataset
"""
client_indexes = self._client_sampling(round_idx, self.args.client_num_in_total,
self.args.client_num_per_round)
logging.info("client_indexes = " + str(client_indexes))
for idx, client in enumerate(self.client_list):
# update dataset
client_idx = client_indexes[idx]
client.update_local_dataset(client_idx, self.train_data_local_dict[client_idx],
self.test_data_local_dict[client_idx],
self.train_data_local_num_dict[client_idx])
# train on new dataset
## w = client.train(copy.deepcopy(w_global))
w = client.train_and_test(copy.deepcopy(w_global), round_idx)
# self.logger.info("local weights = " + str(w))
w_locals.append((client.get_sample_number(), copy.deepcopy(w)))
# update global weights
w_global = self._aggregate(w_locals)
self.model_trainer.set_model_params(w_global)
# test results
# at last round
if round_idx == self.args.comm_round - 1:
self._test_on_server(round_idx)
self._local_test_on_all_clients(round_idx)
# per {frequency_of_the_test} round
elif round_idx % self.args.frequency_of_the_test == 0:
if self.args.dataset.startswith("stackoverflow"):
self._local_test_on_validation_set(round_idx)
else:
self._test_on_server(round_idx)
self._local_test_on_all_clients(round_idx)
def _client_sampling(self, round_idx, client_num_in_total, client_num_per_round):
if client_num_in_total == client_num_per_round:
client_indexes = [client_index for client_index in range(client_num_in_total)]
else:
num_clients = min(client_num_per_round, client_num_in_total)
np.random.seed(round_idx) # make sure for each comparison, we are selecting the same clients each round
client_indexes = np.random.choice(range(client_num_in_total), num_clients, replace=False)
return client_indexes
def _generate_validation_set(self, num_samples=10000):
test_data_num = len(self.test_global.dataset)
sample_indices = random.sample(range(test_data_num), min(num_samples, test_data_num))
subset = torch.utils.data.Subset(self.test_global.dataset, sample_indices)
sample_testset = torch.utils.data.DataLoader(subset, batch_size=self.args.batch_size)
self.val_global = sample_testset
def _aggregate(self, w_locals):
training_num = 0
for idx in range(len(w_locals)):
(sample_num, averaged_params) = w_locals[idx]
training_num += sample_num
(sample_num, averaged_params) = w_locals[0]
for k in averaged_params.keys():
for i in range(0, len(w_locals)):
local_sample_number, local_model_params = w_locals[i]
w = local_sample_number / training_num
if i == 0:
averaged_params[k] = local_model_params[k] * w
else:
averaged_params[k] += local_model_params[k] * w
return averaged_params
def _test_on_server(self, round_idx):
logging.info("################test_on_server on round: {}".format(round_idx))
affinity_metrics = {
"round_idx" : round_idx,
"client_idx" : -1,
"epoch" : -1,
"test_acc" : 0
}
# Test on server by uploading global data and parameter to client
# 1. Update parameter, 2. Test on test dataset
server = self.server
w_globals = self.model_trainer.get_model_params()
server.model_trainer.set_model_params(w_globals)
metrics = server.local_test(True)
test_acc = metrics['test_correct'] / metrics['test_total']
affinity_metrics["test_acc"] = test_acc
wandb.log({"Server Test/Acc": test_acc, "round": round_idx})
logging.info(affinity_metrics)
def _local_test_on_all_clients(self, round_idx):
logging.info("################local_test_on_all_clients : {}".format(round_idx))
train_metrics = {
'num_samples': [],
'num_correct': [],
'losses': []
}
test_metrics = {
'num_samples': [],
'num_correct': [],
'losses': []
}
client = self.client_list[0]
for client_idx in range(self.args.client_num_in_total):
"""
Note: for datasets like "fed_CIFAR100" and "fed_shakespheare",
the training client number is larger than the testing client number
"""
if self.test_data_local_dict[client_idx] is None:
continue
client.update_local_dataset(0, self.train_data_local_dict[client_idx],
self.test_data_local_dict[client_idx],
self.train_data_local_num_dict[client_idx])
# train data
train_local_metrics = client.local_test(False)
train_metrics['num_samples'].append(copy.deepcopy(train_local_metrics['test_total']))
train_metrics['num_correct'].append(copy.deepcopy(train_local_metrics['test_correct']))
train_metrics['losses'].append(copy.deepcopy(train_local_metrics['test_loss']))
# test data
test_local_metrics = client.local_test(True)
test_metrics['num_samples'].append(copy.deepcopy(test_local_metrics['test_total']))
test_metrics['num_correct'].append(copy.deepcopy(test_local_metrics['test_correct']))
test_metrics['losses'].append(copy.deepcopy(test_local_metrics['test_loss']))
"""
Note: CI environment is CPU-based computing.
The training speed for RNN training is to slow in this setting, so we only test a client to make sure there is no programming error.
If test dataset is identical for all clients, ci == 1 will save time. Due to same test_metrics results for all clients.
"""
if self.args.ci == 1:
break
# test on training dataset
train_acc = sum(train_metrics['num_correct']) / sum(train_metrics['num_samples'])
train_loss = sum(train_metrics['losses']) / sum(train_metrics['num_samples'])
# test on test dataset
test_acc = sum(test_metrics['num_correct']) / sum(test_metrics['num_samples'])
test_loss = sum(test_metrics['losses']) / sum(test_metrics['num_samples'])
stats = {'training_acc': train_acc, 'training_loss': train_loss}
wandb.log({"Train/Acc": train_acc, "round": round_idx})
wandb.log({"Train/Loss": train_loss, "round": round_idx})
logging.info(stats)
stats = {'test_acc': test_acc, 'test_loss': test_loss}
wandb.log({"Test/Acc": test_acc, "round": round_idx})
wandb.log({"Test/Loss": test_loss, "round": round_idx})
logging.info(stats)
def _local_test_on_validation_set(self, round_idx):
logging.info("################local_test_on_validation_set : {}".format(round_idx))
if self.val_global is None:
self._generate_validation_set()
client = self.client_list[0]
client.update_local_dataset(0, None, self.val_global, None)
# test data
test_metrics = client.local_test(True)
if self.args.dataset == "stackoverflow_nwp":
test_acc = test_metrics['test_correct'] / test_metrics['test_total']
test_loss = test_metrics['test_loss'] / test_metrics['test_total']
stats = {'test_acc': test_acc, 'test_loss': test_loss}
wandb.log({"Test/Acc": test_acc, "round": round_idx})
wandb.log({"Test/Loss": test_loss, "round": round_idx})
elif self.args.dataset == "stackoverflow_lr":
test_acc = test_metrics['test_correct'] / test_metrics['test_total']
test_pre = test_metrics['test_precision'] / test_metrics['test_total']
test_rec = test_metrics['test_recall'] / test_metrics['test_total']
test_loss = test_metrics['test_loss'] / test_metrics['test_total']
stats = {'test_acc': test_acc, 'test_pre': test_pre, 'test_rec': test_rec, 'test_loss': test_loss}
wandb.log({"Test/Acc": test_acc, "round": round_idx})
wandb.log({"Test/Pre": test_pre, "round": round_idx})
wandb.log({"Test/Rec": test_rec, "round": round_idx})
wandb.log({"Test/Loss": test_loss, "round": round_idx})
else:
raise Exception("Unknown format to log metrics for dataset {}!" % self.args.dataset)
logging.info(stats)
| 46.920319
| 144
| 0.624862
|
95d6e5a93772ac50ec226bbfce6cded1721c1a7b
| 628
|
py
|
Python
|
pandas_ta/momentum/__init__.py
|
YuvalWein/pandas-ta
|
5c4a72ee993fe556c4e05590e1b5ee70d221889a
|
[
"MIT"
] | 1
|
2020-06-18T10:19:12.000Z
|
2020-06-18T10:19:12.000Z
|
pandas_ta/momentum/__init__.py
|
ajmal017/pandas-ta
|
98099f71de7c4a8b293b8de4dd62fa2399e5a12a
|
[
"MIT"
] | null | null | null |
pandas_ta/momentum/__init__.py
|
ajmal017/pandas-ta
|
98099f71de7c4a8b293b8de4dd62fa2399e5a12a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .ao import ao
from .apo import apo
from .bias import bias
from .bop import bop
from .brar import brar
from .cci import cci
from .cg import cg
from .cmo import cmo
from .coppock import coppock
from .fisher import fisher
from .inertia import inertia
from .kdj import kdj
from .kst import kst
from .macd import macd
from .mom import mom
from .ppo import ppo
from .psl import psl
from .pvo import pvo
from .roc import roc
from .rsi import rsi
from .rvgi import rvgi
from .slope import slope
from .stoch import stoch
from .trix import trix
from .tsi import tsi
from .uo import uo
from .willr import willr
| 22.428571
| 28
| 0.764331
|
7ae9aacb98b3d2ce39dd4deac59730b0ccdbbabe
| 823
|
py
|
Python
|
src/chemphys/urls.py
|
Nnonexistent/chemphys
|
d2f34364d006a494bb965bb83d1967d7dd56f9ba
|
[
"MIT"
] | null | null | null |
src/chemphys/urls.py
|
Nnonexistent/chemphys
|
d2f34364d006a494bb965bb83d1967d7dd56f9ba
|
[
"MIT"
] | 19
|
2015-03-08T08:46:09.000Z
|
2019-10-01T05:16:43.000Z
|
src/chemphys/urls.py
|
Nnonexistent/chemphys
|
d2f34364d006a494bb965bb83d1967d7dd56f9ba
|
[
"MIT"
] | null | null | null |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
urlpatterns = patterns(
'', # prefix
url(r'^i18n/setlang/$', 'chemphys.views.set_language_ex'), # override django.views.i18n.set_language
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^admin/', include(admin.site.urls)),
url(r'^mailauth/', include('mailauth.urls')),
url(r'^ctxhelp/', include('ctxhelp.urls')),
url(r'^logout$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
url(r'^', include('journal.urls')),
url(r'^([\w-]+)/$', 'pages.views.pages_page', name='pages_page'),
)
if settings.DEBUG:
from django.conf.urls.static import static
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 34.291667
| 105
| 0.676792
|
f38e9ec7371a18db36911d05f0fad5dcc1349918
| 9,611
|
py
|
Python
|
nbviewer/providers/local/handlers.py
|
tashay/nbviewer
|
51317e6185cc606495b29062f15c9bf08477162c
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
nbviewer/providers/local/handlers.py
|
tashay/nbviewer
|
51317e6185cc606495b29062f15c9bf08477162c
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
nbviewer/providers/local/handlers.py
|
tashay/nbviewer
|
51317e6185cc606495b29062f15c9bf08477162c
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
from datetime import datetime
import errno
import io
import os
import stat
from tornado import (
gen,
web,
iostream,
)
from tornado.log import app_log
from ...utils import url_path_join
from ..base import (
cached,
RenderingHandler,
)
from .. import _load_handler_from_location
class LocalFileHandler(RenderingHandler):
"""Renderer for /localfile
Serving notebooks from the local filesystem
"""
# cache key is full uri to avoid mixing download vs view paths
_cache_key_attr = 'uri'
# provider root path
_localfile_path = '/localfile'
@property
def localfile_path(self):
if self.settings.get('localfile_follow_symlinks'):
return os.path.realpath(self.settings.get('localfile_path', ''))
else:
return os.path.abspath(self.settings.get('localfile_path', ''))
def breadcrumbs(self, path):
"""Build a list of breadcrumbs leading up to and including the
given local path.
Parameters
----------
path: str
Relative path up to and including the leaf directory or file to include
in the breadcrumbs list
Returns
-------
list
Breadcrumbs suitable for the link_breadcrumbs() jinja macro
"""
breadcrumbs = [{
'url': url_path_join(self.base_url, self._localfile_path),
'name': 'home'
}]
breadcrumbs.extend(super(LocalFileHandler, self).breadcrumbs(path, self._localfile_path))
return breadcrumbs
@gen.coroutine
def download(self, fullpath):
"""Download the file at the given absolute path.
Parameters
==========
fullpath: str
Absolute path to the file
"""
filename = os.path.basename(fullpath)
st = os.stat(fullpath)
self.set_header('Content-Length', st.st_size)
# Escape commas to workaround Chrome issue with commas in download filenames
self.set_header('Content-Disposition',
'attachment; filename={};'.format(filename.replace(',', '_')))
content = web.StaticFileHandler.get_content(fullpath)
if isinstance(content, bytes):
content = [content]
for chunk in content:
try:
self.write(chunk)
yield self.flush()
except iostream.StreamClosedError:
return
def can_show(self, path):
"""
Generally determine whether the given path is displayable.
This function is useful for failing fast - further checks may
be applied at notebook render to confirm a file may be shown.
"""
if self.settings.get('localfile_follow_symlinks'):
fullpath = os.path.realpath(os.path.join(
self.localfile_path,
path
))
else:
fullpath = os.path.abspath(os.path.normpath(os.path.join(
self.localfile_path,
path
)))
if not fullpath.startswith(self.localfile_path):
app_log.warn("directory traversal attempt: '%s'" %
fullpath)
return False
if not os.path.exists(fullpath):
app_log.warn("path: '%s' does not exist", fullpath)
return False
if any(part.startswith('.') or part.startswith('_')
for part in fullpath.split(os.sep)):
return False
if not self.settings.get('localfile_any_user'):
fstat = os.stat(fullpath)
# Ensure the file/directory has other read access for all.
if not fstat.st_mode & stat.S_IROTH:
app_log.warn("path: '%s' does not have read permissions", fullpath)
return False
if os.path.isdir(fullpath) and not fstat.st_mode & stat.S_IXOTH:
# skip directories we can't execute (i.e. list)
app_log.warn("path: '%s' does not have execute permissions", fullpath)
return False
return True
@cached
@gen.coroutine
def get(self, path):
"""Get a directory listing, rendered notebook, or raw file
at the given path based on the type and URL query parameters.
If the path points to an accessible directory, render its contents.
If the path points to an accessible notebook file, render it.
If the path points to an accessible file and the URL contains a
'download' query parameter, respond with the file as a download.
Parameters
==========
path: str
Local filesystem path
"""
fullpath = os.path.join(self.localfile_path, path)
if not self.can_show(fullpath):
app_log.info("path: '%s' is not visible from within nbviewer", fullpath)
raise web.HTTPError(404)
if os.path.isdir(fullpath):
html = self.show_dir(fullpath, path)
raise gen.Return(self.cache_and_finish(html))
is_download = self.get_query_arguments('download')
if is_download:
self.download(fullpath)
return
try:
with io.open(fullpath, encoding='utf-8') as f:
nbdata = f.read()
except IOError as ex:
if ex.errno == errno.EACCES:
# py2/3: can't read the file, so don't give away it exists
app_log.info("path : '%s' is not readable from within nbviewer", fullpath)
raise web.HTTPError(404)
raise ex
yield self.finish_notebook(nbdata,
download_url='?download',
msg="file from localfile: %s" % path,
public=False,
breadcrumbs=self.breadcrumbs(path),
title=os.path.basename(path))
# Make available to increase modularity for subclassing
# E.g. so subclasses can implement templates with custom logic
# without having to copy-paste the entire show_dir method
def render_dirview_template(self, entries, breadcrumbs, title, **namespace):
return self.render_template('dirview.html',
entries=entries, breadcrumbs=breadcrumbs,
title=title, **namespace)
def show_dir(self, fullpath, path, **namespace):
"""Render the directory view template for a given filesystem path.
Parameters
==========
fullpath: string
Absolute path on disk to show
path: string
URL path equating to the path on disk
Returns
=======
str
Rendered HTML
"""
entries = []
dirs = []
ipynbs = []
try:
contents = os.listdir(fullpath)
except IOError as ex:
if ex.errno == errno.EACCES:
# py2/3: can't access the dir, so don't give away its presence
app_log.info("contents of path: '%s' cannot be listed from within nbviewer", fullpath)
raise web.HTTPError(404)
for f in contents:
absf = os.path.join(fullpath, f)
if not self.can_show(absf):
continue
entry = {}
entry['name'] = f
# We need to make UTC timestamps conform to true ISO-8601 by
# appending Z(ulu). Without a timezone, the spec says it should be
# treated as local time which is not what we want and causes
# moment.js on the frontend to show times in the past or future
# depending on the user's timezone.
# https://en.wikipedia.org/wiki/ISO_8601#Time_zone_designators
if os.path.isdir(absf):
st = os.stat(absf)
dt = datetime.utcfromtimestamp(st.st_mtime)
entry['modtime'] = dt.isoformat() + 'Z'
entry['url'] = url_path_join(self._localfile_path, path, f)
entry['class'] = 'fa fa-folder-open'
dirs.append(entry)
elif f.endswith('.ipynb'):
st = os.stat(absf)
dt = datetime.utcfromtimestamp(st.st_mtime)
entry['modtime'] = dt.isoformat() + 'Z'
entry['url'] = url_path_join(self._localfile_path, path, f)
entry['class'] = 'fa fa-book'
ipynbs.append(entry)
dirs.sort(key=lambda e: e['name'])
ipynbs.sort(key=lambda e: e['name'])
entries.extend(dirs)
entries.extend(ipynbs)
html = self.render_dirview_template(entries=entries,
breadcrumbs=self.breadcrumbs(path),
title=url_path_join(path, '/'),
**namespace)
return html
def default_handlers(handlers=[], **handler_names):
"""Tornado handlers"""
local_handler = _load_handler_from_location(handler_names['local_handler'])
return handlers + [
(r'/localfile/?(.*)', local_handler, {}),
]
| 34.571942
| 102
| 0.556862
|
7514a0c0ccf00949a21497379d66530b377d1093
| 14,658
|
py
|
Python
|
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/community/postgresql/plugins/modules/postgresql_ext.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/community/postgresql/plugins/modules/postgresql_ext.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/community/postgresql/plugins/modules/postgresql_ext.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: postgresql_ext
short_description: Add or remove PostgreSQL extensions from a database
description:
- Add or remove PostgreSQL extensions from a database.
options:
name:
description:
- Name of the extension to add or remove.
required: true
type: str
aliases:
- ext
db:
description:
- Name of the database to add or remove the extension to/from.
required: true
type: str
aliases:
- login_db
schema:
description:
- Name of the schema to add the extension to.
type: str
session_role:
description:
- Switch to session_role after connecting.
- The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though the session_role were the one that had logged in originally.
type: str
state:
description:
- The database extension state.
default: present
choices: [ absent, present ]
type: str
cascade:
description:
- Automatically install/remove any extensions that this extension depends on
that are not already installed/removed (supported since PostgreSQL 9.6).
type: bool
default: no
login_unix_socket:
description:
- Path to a Unix domain socket for local connections.
type: str
ssl_mode:
description:
- Determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
- See U(https://www.postgresql.org/docs/current/static/libpq-ssl.html) for more information on the modes.
- Default of C(prefer) matches libpq default.
type: str
default: prefer
choices: [ allow, disable, prefer, require, verify-ca, verify-full ]
ca_cert:
description:
- Specifies the name of a file containing SSL certificate authority (CA) certificate(s).
- If the file exists, the server's certificate will be verified to be signed by one of these authorities.
type: str
aliases: [ ssl_rootcert ]
version:
description:
- Extension version to add or update to. Has effect with I(state=present) only.
- If not specified, the latest extension version will be created.
- It can't downgrade an extension version.
When version downgrade is needed, remove the extension and create new one with appropriate version.
- Set I(version=latest) to update the extension to the latest available version.
type: str
trust_input:
description:
- If C(no), check whether values of parameters I(ext), I(schema),
I(version), I(session_role) are potentially dangerous.
- It makes sense to use C(no) only when SQL injections via the parameters are possible.
type: bool
default: yes
version_added: '0.2.0'
seealso:
- name: PostgreSQL extensions
description: General information about PostgreSQL extensions.
link: https://www.postgresql.org/docs/current/external-extensions.html
- name: CREATE EXTENSION reference
description: Complete reference of the CREATE EXTENSION command documentation.
link: https://www.postgresql.org/docs/current/sql-createextension.html
- name: ALTER EXTENSION reference
description: Complete reference of the ALTER EXTENSION command documentation.
link: https://www.postgresql.org/docs/current/sql-alterextension.html
- name: DROP EXTENSION reference
description: Complete reference of the DROP EXTENSION command documentation.
link: https://www.postgresql.org/docs/current/sql-droppublication.html
notes:
- Supports C(check_mode).
- The default authentication assumes that you are either logging in as
or sudo'ing to the C(postgres) account on the host.
- This module uses I(psycopg2), a Python PostgreSQL database adapter.
- You must ensure that C(psycopg2) is installed on the host before using this module.
- If the remote host is the PostgreSQL server (which is the default case),
then PostgreSQL must also be installed on the remote host.
- For Ubuntu-based systems, install the C(postgresql), C(libpq-dev),
and C(python-psycopg2) packages on the remote host before using this module.
- Incomparable versions, for example PostGIS ``unpackaged``, cannot be installed.
requirements: [ psycopg2 ]
author:
- Daniel Schep (@dschep)
- Thomas O'Donnell (@andytom)
- Sandro Santilli (@strk)
- Andrew Klychkov (@Andersson007)
extends_documentation_fragment:
- community.postgresql.postgres
'''
EXAMPLES = r'''
- name: Adds postgis extension to the database acme in the schema foo
community.postgresql.postgresql_ext:
name: postgis
db: acme
schema: foo
- name: Removes postgis extension to the database acme
community.postgresql.postgresql_ext:
name: postgis
db: acme
state: absent
- name: Adds earthdistance extension to the database template1 cascade
community.postgresql.postgresql_ext:
name: earthdistance
db: template1
cascade: true
# In the example below, if earthdistance extension is installed,
# it will be removed too because it depends on cube:
- name: Removes cube extension from the database acme cascade
community.postgresql.postgresql_ext:
name: cube
db: acme
cascade: yes
state: absent
- name: Create extension foo of version 1.2 or update it if it's already created
community.postgresql.postgresql_ext:
db: acme
name: foo
version: 1.2
- name: Assuming extension foo is created, update it to the latest version
community.postgresql.postgresql_ext:
db: acme
name: foo
version: latest
'''
RETURN = r'''
query:
description: List of executed queries.
returned: always
type: list
sample: ["DROP EXTENSION \"acme\""]
'''
import traceback
from distutils.version import LooseVersion
try:
from psycopg2.extras import DictCursor
except ImportError:
# psycopg2 is checked by connect_to_db()
# from ansible.module_utils.postgres
pass
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.postgresql.plugins.module_utils.database import (
check_input,
)
from ansible_collections.community.postgresql.plugins.module_utils.postgres import (
connect_to_db,
get_conn_params,
postgres_common_argument_spec,
)
from ansible.module_utils._text import to_native
executed_queries = []
# ===========================================
# PostgreSQL module specific support methods.
#
def ext_exists(cursor, ext):
query = "SELECT * FROM pg_extension WHERE extname=%(ext)s"
cursor.execute(query, {'ext': ext})
return cursor.rowcount == 1
def ext_delete(cursor, ext, cascade):
if ext_exists(cursor, ext):
query = "DROP EXTENSION \"%s\"" % ext
if cascade:
query += " CASCADE"
cursor.execute(query)
executed_queries.append(query)
return True
else:
return False
def ext_update_version(cursor, ext, version):
"""Update extension version.
Return True if success.
Args:
cursor (cursor) -- cursor object of psycopg2 library
ext (str) -- extension name
version (str) -- extension version
"""
query = "ALTER EXTENSION \"%s\" UPDATE" % ext
params = {}
if version != 'latest':
query += " TO %(ver)s"
params['ver'] = version
cursor.execute(query, params)
executed_queries.append(cursor.mogrify(query, params))
return True
def ext_create(cursor, ext, schema, cascade, version):
query = "CREATE EXTENSION \"%s\"" % ext
params = {}
if schema:
query += " WITH SCHEMA \"%s\"" % schema
if version:
query += " VERSION %(ver)s"
params['ver'] = version
if cascade:
query += " CASCADE"
cursor.execute(query, params)
executed_queries.append(cursor.mogrify(query, params))
return True
def ext_get_versions(cursor, ext):
"""
Get the current created extension version and available versions.
Return tuple (current_version, [list of available versions]).
Note: the list of available versions contains only versions
that higher than the current created version.
If the extension is not created, this list will contain all
available versions.
Args:
cursor (cursor) -- cursor object of psycopg2 library
ext (str) -- extension name
"""
# 1. Get the current extension version:
query = ("SELECT extversion FROM pg_catalog.pg_extension "
"WHERE extname = %(ext)s")
current_version = '0'
cursor.execute(query, {'ext': ext})
res = cursor.fetchone()
if res:
current_version = res[0]
# 2. Get available versions:
query = ("SELECT version FROM pg_available_extension_versions "
"WHERE name = %(ext)s")
cursor.execute(query, {'ext': ext})
res = cursor.fetchall()
available_versions = parse_ext_versions(current_version, res)
if current_version == '0':
current_version = False
return (current_version, available_versions)
def parse_ext_versions(current_version, ext_ver_list):
"""Parse ext versions.
Args:
current_version (str) -- version to compare elements of ext_ver_list with
ext_ver_list (list) -- list containing dicts with versions
Return a sorted list with versions that are higher than current_version.
Note: Incomparable versions (e.g., postgis version "unpackaged") are skipped.
"""
available_versions = []
for line in ext_ver_list:
if line['version'] == 'unpackaged':
continue
try:
if LooseVersion(line['version']) > LooseVersion(current_version):
available_versions.append(line['version'])
except Exception:
# When a version cannot be compared, skip it
# (there's a note in the documentation)
continue
return sorted(available_versions, key=LooseVersion)
# ===========================================
# Module execution.
#
def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
db=dict(type="str", required=True, aliases=["login_db"]),
ext=dict(type="str", required=True, aliases=["name"]),
schema=dict(type="str"),
state=dict(type="str", default="present", choices=["absent", "present"]),
cascade=dict(type="bool", default=False),
session_role=dict(type="str"),
version=dict(type="str"),
trust_input=dict(type="bool", default=True),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
ext = module.params["ext"]
schema = module.params["schema"]
state = module.params["state"]
cascade = module.params["cascade"]
version = module.params["version"]
session_role = module.params["session_role"]
trust_input = module.params["trust_input"]
changed = False
if not trust_input:
check_input(module, ext, schema, version, session_role)
if version and state == 'absent':
module.warn("Parameter version is ignored when state=absent")
conn_params = get_conn_params(module, module.params)
db_connection = connect_to_db(module, conn_params, autocommit=True)
cursor = db_connection.cursor(cursor_factory=DictCursor)
try:
# Get extension info and available versions:
curr_version, available_versions = ext_get_versions(cursor, ext)
if state == "present":
if version == 'latest':
if available_versions:
version = available_versions[-1]
else:
version = ''
if version:
# If the specific version is passed and it is not available for update:
if version not in available_versions:
if not curr_version:
module.fail_json(msg="Passed version '%s' is not available" % version)
elif LooseVersion(curr_version) == LooseVersion(version):
changed = False
else:
module.fail_json(msg="Passed version '%s' is lower than "
"the current created version '%s' or "
"the passed version is not available" % (version, curr_version))
# If the specific version is passed and it is higher that the current version:
if curr_version:
if LooseVersion(curr_version) < LooseVersion(version):
if module.check_mode:
changed = True
else:
changed = ext_update_version(cursor, ext, version)
# If the specific version is passed and it is created now:
if curr_version == version:
changed = False
# If the ext doesn't exist and installed:
elif not curr_version and available_versions:
if module.check_mode:
changed = True
else:
changed = ext_create(cursor, ext, schema, cascade, version)
# If version is not passed:
else:
if not curr_version:
# If the ext doesn't exist and it's installed:
if available_versions:
if module.check_mode:
changed = True
else:
changed = ext_create(cursor, ext, schema, cascade, version)
# If the ext doesn't exist and not installed:
else:
module.fail_json(msg="Extension %s is not installed" % ext)
elif state == "absent":
if curr_version:
if module.check_mode:
changed = True
else:
changed = ext_delete(cursor, ext, cascade)
else:
changed = False
except Exception as e:
db_connection.close()
module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
db_connection.close()
module.exit_json(changed=changed, db=module.params["db"], ext=ext, queries=executed_queries)
if __name__ == '__main__':
main()
| 32.939326
| 129
| 0.648792
|
e08cf217df3c4e9b8b60ef20d7c5504d85cc58ec
| 2,773
|
py
|
Python
|
tests/cli/test_incorrect_calls.py
|
evhart/nasty
|
1b14977d1ba61bdb78d0906c76dd57242a8c8923
|
[
"Apache-2.0"
] | 49
|
2019-11-30T15:04:08.000Z
|
2022-01-14T08:25:29.000Z
|
tests/cli/test_incorrect_calls.py
|
evhart/nasty
|
1b14977d1ba61bdb78d0906c76dd57242a8c8923
|
[
"Apache-2.0"
] | 16
|
2019-12-06T19:10:07.000Z
|
2020-12-05T09:40:45.000Z
|
tests/cli/test_incorrect_calls.py
|
evhart/nasty
|
1b14977d1ba61bdb78d0906c76dd57242a8c8923
|
[
"Apache-2.0"
] | 8
|
2020-04-16T11:38:52.000Z
|
2021-07-21T09:17:01.000Z
|
#
# Copyright 2019-2020 Lukas Schmelzeisen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from logging import getLogger
import pytest
from _pytest.capture import CaptureFixture
from nasty import main
logger = getLogger(__name__)
@pytest.mark.parametrize(
"args_string",
[
"trump",
"332308211321425920",
"search trump",
"search --query trump --since 2019",
"search --query trump --until 2019",
"search --query trump --filter latest",
"search --query trump --max-tweets five",
"search --query trump --batch-size 3.0",
"search --query trump --to-batch",
"search --query trump --daily",
"search --query trump --to-batch file --daily",
"search --query trump --since 2019-03-21 --to-batch file --daily",
"search --query trump --until 2019-03-21 --to-batch file --daily",
"replies 332308211321425920",
"replies --tweet-id 332308211321425920 --max-tweets five",
"replies --tweet-id 332308211321425920 --batch-size 3.0",
"replies --tweet-id 332308211321425920 --to-batch",
"thread 332308211321425920",
"thread --tweet-id 332308211321425920 --max-tweets five",
"thread --tweet-id 332308211321425920 --batch-size 3.0",
"thread --tweet-id 332308211321425920 --to-batch",
"batch",
"batch --batch-file",
"batch --batch-file batch.jsonl",
"batch --batch-file batch.jsonl --results-dir",
"batch --results-dir",
"batch --batch-file --results-dir out/",
"idify --in-dir",
"idify --out-dir",
"idify --out-dir out/",
"unidify --in-dir",
"unidify --out-dir",
"unidify --out-dir out/",
],
ids=repr,
)
def test_incorrect_calls(args_string: str, capsys: CaptureFixture) -> None:
args = args_string.split(" ") if args_string != "" else []
logger.debug("Raw arguments: {}".format(args))
with pytest.raises(SystemExit) as excinfo:
main(*args)
assert excinfo.value.code == 2
captured = capsys.readouterr().err
logger.debug("Captured Error:")
for line in captured.split("\n"):
logger.debug(" " + line)
assert "usage: nasty" in captured and ": error: " in captured
| 35.101266
| 75
| 0.636855
|
cd36cc426e980ff3cbf1e3904a811f0bdf207258
| 210
|
py
|
Python
|
BasicPrograms/isComposite.py
|
shelcia/InterviewQuestionPython
|
c1bff9598da01e3b75472e78f7a1b28fdcb2d935
|
[
"Apache-2.0"
] | 1
|
2020-09-30T19:06:15.000Z
|
2020-09-30T19:06:15.000Z
|
BasicPrograms/isComposite.py
|
shelcia/InterviewQuestionPython
|
c1bff9598da01e3b75472e78f7a1b28fdcb2d935
|
[
"Apache-2.0"
] | null | null | null |
BasicPrograms/isComposite.py
|
shelcia/InterviewQuestionPython
|
c1bff9598da01e3b75472e78f7a1b28fdcb2d935
|
[
"Apache-2.0"
] | null | null | null |
# TO CHECK IF IT IS A COMPOSITE NUMBER
N = int(input(""))
count = 0
# SRTICT DIVISION
for i in range(2, N//2):
if(N % i == 0):
count = count+1
if(count == 0):
print("no")
else:
print("yes")
| 17.5
| 38
| 0.547619
|
161c39416cfa2810137d1f1c5458255229601570
| 2,749
|
py
|
Python
|
nglview/layout.py
|
vhorvath/nglview
|
7e31c40efe6cfc45e04d6374dc6a43fd62e68b90
|
[
"MIT"
] | 161
|
2020-07-28T14:05:57.000Z
|
2022-03-31T08:38:06.000Z
|
nglview/layout.py
|
vhorvath/nglview
|
7e31c40efe6cfc45e04d6374dc6a43fd62e68b90
|
[
"MIT"
] | 123
|
2020-07-27T15:02:27.000Z
|
2022-03-30T18:31:51.000Z
|
nglview/layout.py
|
vhorvath/nglview
|
7e31c40efe6cfc45e04d6374dc6a43fd62e68b90
|
[
"MIT"
] | 42
|
2020-07-28T09:50:06.000Z
|
2022-03-11T18:50:22.000Z
|
# TODO: reorg
# simplify code
from ipywidgets import Box, Button, Label, Layout, Tab, ToggleButton
def make_form_item_layout():
# protect nglview if it is not used in notebook (wheen there is not `comm`
return Layout(
display='flex',
flex_flow='row',
justify_content='space-between',
)
def _make_box_layout(width='100%'):
return Layout(display='flex',
flex_flow='column',
align_items='stretch',
width=width)
def _relayout(box, form_item_layout):
form_items = []
for kid in box.children:
if hasattr(kid,
'description') and not isinstance(kid,
(Button, ToggleButton)):
label_value = kid.description
kid.description = ''
else:
label_value = ''
if isinstance(kid, Button):
box2 = Box([
kid,
], layout=form_item_layout)
else:
box2 = Box([Label(value=label_value), kid],
layout=form_item_layout)
form_items.append(box2)
return form_items
def _relayout_master(box, width='20%'):
"""make nicer layout for box.
This method will take the `description` of each child to make corresponding Label
The `description` will be cleared.
"""
old_children = box.children[:]
form_items = _relayout(box, make_form_item_layout())
form = Box(form_items, layout=_make_box_layout(width=width))
form._ngl_children = old_children
return form
def _make_autofit(box):
'''
Parameters
----------
box : ipywidgets.Box
children is a list of buttons
Returns
-------
relayouted box
'''
items_layout = Layout(flex='1 1 auto', width='auto')
box.layout = items_layout
return box
def _make_delay_tab(box_factory, selected_index=0):
"""
Parameters
----------
box_factory : list of (func, tab_name)
Example of box_factory: [(_make_gen_box, 'General'),
(_make_repr_box, 'Representation')]
"""
tab = Tab([Box() for box, _ in box_factory])
[tab.set_title(i, title) for i, (_, title) in enumerate(box_factory)]
# trick
if not tab.children[selected_index].children:
tab.selected_index = 1
def on_update_selected_index(change):
index = change['new']
if not tab.children[index].children:
# make widget on demand
tab.children[index].children = [
box_factory[index][0](),
]
tab.observe(on_update_selected_index, names='selected_index')
# trigger
tab.selected_index = selected_index
return tab
| 25.691589
| 85
| 0.586031
|
ef271214f7f19bc42401f3bb6b7500744b2fce9c
| 2,189
|
py
|
Python
|
tests/batch_genome_mutator_test.py
|
martinghunt/simutator
|
0218c8a5b37fd72eb4e5b2df4cba9f6118f96788
|
[
"MIT"
] | 7
|
2020-01-09T15:25:17.000Z
|
2021-08-05T15:58:25.000Z
|
tests/batch_genome_mutator_test.py
|
martinghunt/simutator
|
0218c8a5b37fd72eb4e5b2df4cba9f6118f96788
|
[
"MIT"
] | null | null | null |
tests/batch_genome_mutator_test.py
|
martinghunt/simutator
|
0218c8a5b37fd72eb4e5b2df4cba9f6118f96788
|
[
"MIT"
] | 1
|
2020-01-09T13:10:02.000Z
|
2020-01-09T13:10:02.000Z
|
import os
import pytest
import shutil
from simutator import batch_genome_mutator
this_dir = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(this_dir, "data", "batch_genome_mutator")
def test_parse_indels_option_string():
with pytest.raises(ValueError):
batch_genome_mutator._parse_indels_option_string("this_is_not_even_close")
with pytest.raises(ValueError):
batch_genome_mutator._parse_indels_option_string("1:not_a_number")
expect = [{"dist": 100, "len": 3}, {"dist": 500, "len": 10}]
got = batch_genome_mutator._parse_indels_option_string("100:3,500:10")
assert got == expect
def test_parse_complex_option_string():
with pytest.raises(ValueError):
batch_genome_mutator._parse_complex_option_string("totally_unexpected")
got = batch_genome_mutator._parse_complex_option_string(
"1000:10:1:2:3:4,500:20:2:3:4:5"
)
expect = [
{"dist": 1000, "len": 10, "snp": 1, "ins": 2, "del": 3, "max_indel_len": 4},
{"dist": 500, "len": 20, "snp": 2, "ins": 3, "del": 4, "max_indel_len": 5},
]
assert got == expect
def test_run_all_mutations():
infile = os.path.join(data_dir, "run_all_mutations.fa")
mutations = {
"snp": [{"dist": 200}, {"dist": 300}],
"ins": [{"dist": 200, "len": 10}],
"del": [{"dist": 250, "len": 5}],
"complex": [
{"dist": 500, "len": 20, "snp": 2, "ins": 3, "del": 4, "max_indel_len": 5}
],
}
outdir = "tmp.run_all_mutations"
if os.path.exists(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
outprefix = os.path.join(outdir, "out")
batch_genome_mutator.run_all_mutations(infile, outprefix, mutations)
expect_prefixes = [
outprefix + "." + x
for x in [
"complex.del-4.dist-500.ins-3.len-20.max_indel_len-5.snp-2",
"del.dist-250.len-5",
"ins.dist-200.len-10",
"snp.dist-200",
"snp.dist-300",
]
]
for prefix in expect_prefixes:
for suffix in "fa", "mutated.vcf", "original.vcf":
assert os.path.exists(f"{prefix}.{suffix}")
shutil.rmtree(outdir)
| 31.724638
| 86
| 0.613979
|
850cb4348601ea2ae4ce19dc88732a5293c9f715
| 1,409
|
py
|
Python
|
hututoo/api/manager.py
|
Shamsherocode/docker-test
|
880d61cf645c2ed203e1373c78f0fd9ecf3bdfb7
|
[
"MIT"
] | null | null | null |
hututoo/api/manager.py
|
Shamsherocode/docker-test
|
880d61cf645c2ed203e1373c78f0fd9ecf3bdfb7
|
[
"MIT"
] | null | null | null |
hututoo/api/manager.py
|
Shamsherocode/docker-test
|
880d61cf645c2ed203e1373c78f0fd9ecf3bdfb7
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.base_user import BaseUserManager
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, email, password, **extra_fields):
"""
Creates and saves a User with the given email and password.
"""
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
if not email:
raise ValueError(_('The Email must be set'))
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save()
return user
def create_superuser(self, email, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
extra_fields.setdefault('is_active', True)
if extra_fields.get('is_staff') is not True:
raise ValueError(_('Superuser must have is_staff=True.'))
if extra_fields.get('is_superuser') is not True:
raise ValueError(_('Superuser must have is_superuser=True.'))
return self.create_user(email, password, **extra_fields)
| 39.138889
| 73
| 0.655075
|
00c5898f0b3cf671347bd9a39e94e98368b76f75
| 610
|
py
|
Python
|
plotly/validators/scatterpolargl/marker/colorbar/_showtickprefix.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/scatterpolargl/marker/colorbar/_showtickprefix.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/scatterpolargl/marker/colorbar/_showtickprefix.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
import _plotly_utils.basevalidators
class ShowtickprefixValidator(
_plotly_utils.basevalidators.EnumeratedValidator
):
def __init__(
self,
plotly_name='showtickprefix',
parent_name='scatterpolargl.marker.colorbar',
**kwargs
):
super(ShowtickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'style'),
values=kwargs.pop('values', ['all', 'first', 'last', 'none']),
**kwargs
)
| 27.727273
| 74
| 0.609836
|
fa6415828b2021593ae82a57af05c5ee01a7c0f6
| 985
|
py
|
Python
|
pypesto/startpoint/latin_hypercube.py
|
beimbusch/pyPESTO
|
8275751eba87efedd6ad1e6923a94d7de0603ddd
|
[
"BSD-3-Clause"
] | null | null | null |
pypesto/startpoint/latin_hypercube.py
|
beimbusch/pyPESTO
|
8275751eba87efedd6ad1e6923a94d7de0603ddd
|
[
"BSD-3-Clause"
] | null | null | null |
pypesto/startpoint/latin_hypercube.py
|
beimbusch/pyPESTO
|
8275751eba87efedd6ad1e6923a94d7de0603ddd
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from .util import rescale
def latin_hypercube(**kwargs) -> np.ndarray:
"""
Generate latin hypercube points.
"""
# extract input
n_starts = kwargs['n_starts']
lb = kwargs['lb']
ub = kwargs['ub']
smooth = kwargs.get('smooth', True)
# parse
dim = lb.size
lb = lb.reshape((1, -1))
ub = ub.reshape((1, -1))
# sample
xs = _latin_hypercube(n_starts, dim, smooth)
# re-scale
xs = rescale(xs, lb, ub)
return xs
def _latin_hypercube(
n_starts: int, dim: int, smooth: bool = True
) -> np.ndarray:
"""
Generate simple latin hypercube points in [0, 1].
"""
# uniform points
xs = np.random.random((n_starts, dim))
# assign sorted indices
for j_dim in range(0, dim):
indices = np.argsort(xs[:, j_dim])
xs[:, j_dim] = indices
if smooth:
xs += np.random.random((n_starts, dim))
else:
xs += 0.5
xs /= n_starts
return xs
| 19.313725
| 53
| 0.568528
|
f3bf87d38cdc9056de00fe1f32023c5c7c565e5b
| 23,231
|
py
|
Python
|
src/deployment/deploy.py
|
tunz/onefuzz
|
08d1ea03df852e9468076d76a7f58a4ed51b1555
|
[
"MIT"
] | 1
|
2020-09-28T05:02:18.000Z
|
2020-09-28T05:02:18.000Z
|
src/deployment/deploy.py
|
sandrinejoy/onefuzz
|
ab41b8986b4d8baa4f2b1f18c1daa57660edb9c0
|
[
"MIT"
] | null | null | null |
src/deployment/deploy.py
|
sandrinejoy/onefuzz
|
ab41b8986b4d8baa4f2b1f18c1daa57660edb9c0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import argparse
import json
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import uuid
import zipfile
from datetime import datetime, timedelta
from data_migration import migrate
from azure.common.client_factory import get_client_from_cli_profile
from azure.common.credentials import get_cli_profile
from azure.cosmosdb.table.tableservice import TableService
from azure.core.exceptions import ResourceExistsError
from azure.graphrbac import GraphRbacManagementClient
from azure.graphrbac.models import (
ApplicationCreateParameters,
AppRole,
GraphErrorException,
OptionalClaims,
RequiredResourceAccess,
ResourceAccess,
ServicePrincipalCreateParameters,
)
from azure.mgmt.eventgrid import EventGridManagementClient
from azure.mgmt.eventgrid.models import (
EventSubscription,
EventSubscriptionFilter,
RetryPolicy,
StorageQueueEventSubscriptionDestination,
)
from azure.mgmt.resource import ResourceManagementClient, SubscriptionClient
from azure.mgmt.resource.resources.models import (
Deployment,
DeploymentMode,
DeploymentProperties,
)
from azure.mgmt.storage import StorageManagementClient
from azure.storage.blob import (
BlobServiceClient,
ContainerSasPermissions,
generate_container_sas,
)
from azure.storage.queue import QueueServiceClient
from msrest.serialization import TZ_UTC
from urllib3.util.retry import Retry
from register_pool_application import (
add_application_password,
authorize_application,
update_registration,
get_application,
register_application,
)
USER_IMPERSONATION = "311a71cc-e848-46a1-bdf8-97ff7156d8e6"
ONEFUZZ_CLI_APP = "72f1562a-8c0c-41ea-beb9-fa2b71c80134"
ONEFUZZ_CLI_AUTHORITY = (
"https://login.microsoftonline.com/72f988bf-86f1-41af-91ab-2d7cd011db47"
)
TELEMETRY_NOTICE = (
"Telemetry collection on stats and OneFuzz failures are sent to Microsoft. "
"To disable, delete the ONEFUZZ_TELEMETRY application setting in the "
"Azure Functions instance"
)
FUNC_TOOLS_ERROR = (
"azure-functions-core-tools is not installed, "
"install v3 using instructions: "
"https://github.com/Azure/azure-functions-core-tools#installing"
)
logger = logging.getLogger("deploy")
def gen_guid():
return str(uuid.uuid4())
class Client:
def __init__(
self,
resource_group,
location,
application_name,
owner,
client_id,
client_secret,
app_zip,
tools,
instance_specific,
third_party,
arm_template,
workbook_data,
create_registration,
migrations,
):
self.resource_group = resource_group
self.arm_template = arm_template
self.location = location
self.application_name = application_name
self.owner = owner
self.app_zip = app_zip
self.tools = tools
self.instance_specific = instance_specific
self.third_party = third_party
self.create_registration = create_registration
self.results = {
"client_id": client_id,
"client_secret": client_secret,
}
self.cli_config = {
"client_id": ONEFUZZ_CLI_APP,
"authority": ONEFUZZ_CLI_AUTHORITY,
}
self.migrations = migrations
if os.name == "nt":
self.azcopy = os.path.join(self.tools, "win64", "azcopy.exe")
else:
self.azcopy = os.path.join(self.tools, "linux", "azcopy")
subprocess.check_output(["chmod", "+x", self.azcopy])
with open(workbook_data) as f:
self.workbook_data = json.load(f)
def get_subscription_id(self):
profile = get_cli_profile()
return profile.get_subscription_id()
def get_location_display_name(self):
location_client = get_client_from_cli_profile(SubscriptionClient)
locations = location_client.subscriptions.list_locations(
self.get_subscription_id()
)
for location in locations:
if location.name == self.location:
return location.display_name
raise Exception("unknown location: %s", self.location)
def check_region(self):
# At the moment, this only checks are the specified providers available
# in the selected region
location = self.get_location_display_name()
with open(self.arm_template, "r") as handle:
arm = json.load(handle)
client = get_client_from_cli_profile(ResourceManagementClient)
providers = {x.namespace: x for x in client.providers.list()}
unsupported = []
for resource in arm["resources"]:
namespace, name = resource["type"].split("/", 1)
# resource types are in the form of a/b/c....
# only the top two are listed as resource types within providers
name = "/".join(name.split("/")[:2])
if namespace not in providers:
unsupported.append("Unsupported provider: %s" % namespace)
continue
provider = providers[namespace]
resource_types = {x.resource_type: x for x in provider.resource_types}
if name not in resource_types:
unsupported.append(
"Unsupported resource type: %s/%s" % (namespace, name)
)
continue
resource_type = resource_types[name]
if (
location not in resource_type.locations
and len(resource_type.locations) > 0
):
unsupported.append(
"%s/%s is unsupported in %s" % (namespace, name, self.location)
)
if unsupported:
print("The following resources required by onefuzz are not supported:")
print("\n".join(["* " + x for x in unsupported]))
sys.exit(1)
def setup_rbac(self):
"""
Setup the client application for the OneFuzz instance.
By default, Service Principals do not have access to create
client applications in AAD.
"""
if self.results["client_id"] and self.results["client_secret"]:
logger.info("using existing client application")
return
client = get_client_from_cli_profile(GraphRbacManagementClient)
logger.info("checking if RBAC already exists")
try:
existing = list(
client.applications.list(
filter="displayName eq '%s'" % self.application_name
)
)
except GraphErrorException:
logger.error("unable to query RBAC. Provide client_id and client_secret")
sys.exit(1)
if not existing:
logger.info("creating Application registration")
url = "https://%s.azurewebsites.net" % self.application_name
params = ApplicationCreateParameters(
display_name=self.application_name,
identifier_uris=[url],
reply_urls=[url + "/.auth/login/aad/callback"],
optional_claims=OptionalClaims(id_token=[], access_token=[]),
required_resource_access=[
RequiredResourceAccess(
resource_access=[
ResourceAccess(id=USER_IMPERSONATION, type="Scope")
],
resource_app_id="00000002-0000-0000-c000-000000000000",
)
],
app_roles=[
AppRole(
allowed_member_types=["Application"],
display_name="CliClient",
id=str(uuid.uuid4()),
is_enabled=True,
description="Allows access from the CLI.",
value="CliClient",
),
AppRole(
allowed_member_types=["Application"],
display_name="LabMachine",
id=str(uuid.uuid4()),
is_enabled=True,
description="Allow access from a lab machine.",
value="LabMachine",
),
],
)
app = client.applications.create(params)
logger.info("creating service principal")
service_principal_params = ServicePrincipalCreateParameters(
account_enabled=True,
app_role_assignment_required=False,
service_principal_type="Application",
app_id=app.app_id,
)
client.service_principals.create(service_principal_params)
else:
app = existing[0]
creds = list(client.applications.list_password_credentials(app.object_id))
client.applications.update_password_credentials(app.object_id, creds)
(password_id, password) = add_application_password(app.object_id)
onefuzz_cli_app_uuid = uuid.UUID(ONEFUZZ_CLI_APP)
cli_app = get_application(onefuzz_cli_app_uuid)
if cli_app is None:
logger.info(
"Could not find the default CLI application under the current subscription, creating a new one"
)
app_info = register_application("onefuzz-cli", self.application_name)
self.cli_config = {
"client_id": app_info.client_id,
"authority": app_info.authority,
}
else:
authorize_application(onefuzz_cli_app_uuid, app.app_id)
self.results["client_id"] = app.app_id
self.results["client_secret"] = password
# Log `client_secret` for consumption by CI.
logger.debug("client_id: %s client_secret: %s", app.app_id, password)
def deploy_template(self):
logger.info("deploying arm template: %s", self.arm_template)
with open(self.arm_template, "r") as template_handle:
template = json.load(template_handle)
client = get_client_from_cli_profile(ResourceManagementClient)
client.resource_groups.create_or_update(
self.resource_group, {"location": self.location}
)
expiry = (datetime.now(TZ_UTC) + timedelta(days=365)).strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
params = {
"name": {"value": self.application_name},
"owner": {"value": self.owner},
"clientId": {"value": self.results["client_id"]},
"clientSecret": {"value": self.results["client_secret"]},
"signedExpiry": {"value": expiry},
"workbookData": {"value": self.workbook_data},
}
deployment = Deployment(
properties=DeploymentProperties(
mode=DeploymentMode.incremental, template=template, parameters=params
)
)
result = client.deployments.create_or_update(
self.resource_group, gen_guid(), deployment
).result()
if result.properties.provisioning_state != "Succeeded":
logger.error(
"error deploying: %s",
json.dumps(result.as_dict(), indent=4, sort_keys=True),
)
sys.exit(1)
self.results["deploy"] = result.properties.outputs
def apply_migrations(self):
self.results["deploy"]["func-storage"]["value"]
name = self.results["deploy"]["func-name"]["value"]
key = self.results["deploy"]["func-key"]["value"]
table_service = TableService(account_name=name, account_key=key)
migrate(table_service, self.migrations)
def create_queues(self):
logger.info("creating eventgrid destination queue")
name = self.results["deploy"]["func-name"]["value"]
key = self.results["deploy"]["func-key"]["value"]
account_url = "https://%s.queue.core.windows.net" % name
client = QueueServiceClient(
account_url=account_url,
credential={"account_name": name, "account_key": key},
)
for queue in ["file-changes", "heartbeat", "proxy", "update-queue"]:
try:
client.create_queue(queue)
except ResourceExistsError:
pass
def create_eventgrid(self):
logger.info("creating eventgrid subscription")
src_resource_id = self.results["deploy"]["fuzz-storage"]["value"]
dst_resource_id = self.results["deploy"]["func-storage"]["value"]
client = get_client_from_cli_profile(StorageManagementClient)
event_subscription_info = EventSubscription(
destination=StorageQueueEventSubscriptionDestination(
resource_id=dst_resource_id, queue_name="file-changes"
),
filter=EventSubscriptionFilter(
included_event_types=[
"Microsoft.Storage.BlobCreated",
"Microsoft.Storage.BlobDeleted",
]
),
retry_policy=RetryPolicy(
max_delivery_attempts=30,
event_time_to_live_in_minutes=1440,
),
)
client = get_client_from_cli_profile(EventGridManagementClient)
result = client.event_subscriptions.create_or_update(
src_resource_id, "onefuzz1", event_subscription_info
).result()
if result.provisioning_state != "Succeeded":
raise Exception(
"eventgrid subscription failed: %s"
% json.dumps(result.as_dict(), indent=4, sort_keys=True),
)
def upload_tools(self):
logger.info("uploading tools from %s", self.tools)
account_name = self.results["deploy"]["func-name"]["value"]
key = self.results["deploy"]["func-key"]["value"]
account_url = "https://%s.blob.core.windows.net" % account_name
client = BlobServiceClient(account_url, credential=key)
if "tools" not in [x["name"] for x in client.list_containers()]:
client.create_container("tools")
expiry = datetime.utcnow() + timedelta(minutes=30)
sas = generate_container_sas(
account_name,
"tools",
account_key=key,
permission=ContainerSasPermissions(
read=True, write=True, delete=True, list=True
),
expiry=expiry,
)
url = "%s/%s?%s" % (account_url, "tools", sas)
subprocess.check_output(
[self.azcopy, "sync", self.tools, url, "--delete-destination", "true"]
)
def upload_instance_setup(self):
logger.info("uploading instance-specific-setup from %s", self.instance_specific)
account_name = self.results["deploy"]["func-name"]["value"]
key = self.results["deploy"]["func-key"]["value"]
account_url = "https://%s.blob.core.windows.net" % account_name
client = BlobServiceClient(account_url, credential=key)
if "instance-specific-setup" not in [
x["name"] for x in client.list_containers()
]:
client.create_container("instance-specific-setup")
expiry = datetime.utcnow() + timedelta(minutes=30)
sas = generate_container_sas(
account_name,
"instance-specific-setup",
account_key=key,
permission=ContainerSasPermissions(
read=True, write=True, delete=True, list=True
),
expiry=expiry,
)
url = "%s/%s?%s" % (account_url, "instance-specific-setup", sas)
subprocess.check_output(
[
self.azcopy,
"sync",
self.instance_specific,
url,
"--delete-destination",
"true",
]
)
def upload_third_party(self):
logger.info("uploading third-party tools from %s", self.third_party)
account_name = self.results["deploy"]["fuzz-name"]["value"]
key = self.results["deploy"]["fuzz-key"]["value"]
account_url = "https://%s.blob.core.windows.net" % account_name
client = BlobServiceClient(account_url, credential=key)
containers = [x["name"] for x in client.list_containers()]
for name in os.listdir(self.third_party):
path = os.path.join(self.third_party, name)
if not os.path.isdir(path):
continue
if name not in containers:
client.create_container(name)
expiry = datetime.utcnow() + timedelta(minutes=30)
sas = generate_container_sas(
account_name,
name,
account_key=key,
permission=ContainerSasPermissions(
read=True, write=True, delete=True, list=True
),
expiry=expiry,
)
url = "%s/%s?%s" % (account_url, name, sas)
subprocess.check_output(
[self.azcopy, "sync", path, url, "--delete-destination", "true"]
)
def deploy_app(self):
logger.info("deploying function app %s", self.app_zip)
current_dir = os.getcwd()
with tempfile.TemporaryDirectory() as tmpdirname:
with zipfile.ZipFile(self.app_zip, "r") as zip_ref:
zip_ref.extractall(tmpdirname)
os.chdir(tmpdirname)
subprocess.check_output(
[
shutil.which("func"),
"azure",
"functionapp",
"publish",
self.application_name,
"--python",
"--no-build",
],
env=dict(os.environ, CLI_DEBUG="1"),
)
os.chdir(current_dir)
def update_registration(self):
if not self.create_registration:
return
update_registration(self.application_name)
def done(self):
logger.info(TELEMETRY_NOTICE)
client_secret_arg = (
("--client_secret %s" % self.cli_config["client_secret"])
if "client_secret" in self.cli_config
else ""
)
logger.info(
"Update your CLI config via: onefuzz config --endpoint https://%s.azurewebsites.net --authority %s --client_id %s %s",
self.application_name,
self.cli_config["authority"],
self.cli_config["client_id"],
client_secret_arg,
)
def arg_dir(arg):
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError("not a directory: %s" % arg)
return arg
def arg_file(arg):
if not os.path.isfile(arg):
raise argparse.ArgumentTypeError("not a file: %s" % arg)
return arg
def main():
states = [
("check_region", Client.check_region),
("rbac", Client.setup_rbac),
("arm", Client.deploy_template),
("apply_migrations", Client.apply_migrations),
("queues", Client.create_queues),
("eventgrid", Client.create_eventgrid),
("tools", Client.upload_tools),
("instance-specific-setup", Client.upload_instance_setup),
("third-party", Client.upload_third_party),
("api", Client.deploy_app),
("update_registration", Client.update_registration),
]
formatter = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(formatter_class=formatter)
parser.add_argument("location")
parser.add_argument("resource_group")
parser.add_argument("application_name")
parser.add_argument("owner")
parser.add_argument(
"--arm-template",
type=arg_file,
default="azuredeploy.json",
help="(default: %(default)s)",
)
parser.add_argument(
"--workbook-data",
type=arg_file,
default="workbook-data.json",
help="(default: %(default)s)",
)
parser.add_argument(
"--app-zip",
type=arg_file,
default="api-service.zip",
help="(default: %(default)s)",
)
parser.add_argument(
"--tools", type=arg_dir, default="tools", help="(default: %(default)s)"
)
parser.add_argument(
"--instance_specific",
type=arg_dir,
default="instance-specific-setup",
help="(default: %(default)s)",
)
parser.add_argument(
"--third-party",
type=arg_dir,
default="third-party",
help="(default: %(default)s)",
)
parser.add_argument("--client_id")
parser.add_argument("--client_secret")
parser.add_argument(
"--start_at",
default=states[0][0],
choices=[x[0] for x in states],
help=(
"Debug deployments by starting at a specific state. "
"NOT FOR PRODUCTION USE. (default: %(default)s)"
),
)
parser.add_argument("-v", "--verbose", action="store_true")
parser.add_argument(
"--create_pool_registration",
default=False,
type=bool,
help="Create an application registration and/or generate a "
"password for the pool agent (default: False)",
)
parser.add_argument(
"--apply_migrations",
type=str,
nargs="+",
default=[],
help="list of migration to apply to the azure table",
)
args = parser.parse_args()
if shutil.which("func") is None:
logger.error(FUNC_TOOLS_ERROR)
sys.exit(1)
client = Client(
args.resource_group,
args.location,
args.application_name,
args.owner,
args.client_id,
args.client_secret,
args.app_zip,
args.tools,
args.instance_specific,
args.third_party,
args.arm_template,
args.workbook_data,
args.create_pool_registration,
args.apply_migrations,
)
if args.verbose:
level = logging.DEBUG
else:
level = logging.WARN
logging.basicConfig(level=level)
logging.getLogger("deploy").setLevel(logging.INFO)
# TODO: using az_cli resets logging defaults. For now, force these
# to be WARN level
if not args.verbose:
for entry in [
"adal-python",
"msrest.universal_http",
"urllib3.connectionpool",
"az_command_data_logger",
"msrest.service_client",
"azure.core.pipeline.policies.http_logging_policy",
]:
logging.getLogger(entry).setLevel(logging.WARN)
if args.start_at != states[0][0]:
logger.warning(
"*** Starting at a non-standard deployment state. "
"This may result in a broken deployment. ***"
)
started = False
for state in states:
if args.start_at == state[0]:
started = True
if started:
state[1](client)
client.done()
if __name__ == "__main__":
main()
| 34.365385
| 130
| 0.591666
|
a9695720a078e5b15a763b061a59c5e992690cba
| 4,323
|
py
|
Python
|
train.py
|
yashbonde/text2sql
|
a8202c2bb9c6cd845674492d900c13c07df6c69b
|
[
"MIT"
] | 11
|
2020-08-07T13:25:43.000Z
|
2021-09-16T10:13:15.000Z
|
train.py
|
yashbonde/text2sql
|
a8202c2bb9c6cd845674492d900c13c07df6c69b
|
[
"MIT"
] | 1
|
2021-01-31T13:13:11.000Z
|
2021-09-16T13:32:45.000Z
|
train.py
|
yashbonde/text2sql
|
a8202c2bb9c6cd845674492d900c13c07df6c69b
|
[
"MIT"
] | 6
|
2020-08-07T05:12:32.000Z
|
2021-05-25T12:29:15.000Z
|
"""going by o2f format and using huggingface library
12.11.2020 - @yashbonde"""
import os
from types import SimpleNamespace
from argparse import ArgumentParser
from text2sql.data import T2SDataset, T2SDatasetConfig
from text2sql.model import Text2SQLModel, Text2SQLModelConfig
from text2sql.trainer import *
# --- arguments
args = ArgumentParser(description="Text2SQL Model Trainer")
# --- paths
args.add_argument("--save_folder", default = "models", type = str, help = "folder to save all models")
args.add_argument("--name", type = str, help = "name of this particular model")
args.add_argument("--schema_file", type = str, help = "path to schema file", default="/workspace/text2sql/fdata/all_schema.json")
args.add_argument("--questions_tsv", type = str, help = "path to text/sql tsv", default="/workspace/text2sql/fdata/all_questions.tsv")
args.add_argument("--tokenizer_path", type = str, help = "path to sentencepiece model file", default="/workspace/text2sql/fdata/model.model")
args.add_argument("--seed", default = None, type = int, help = "seed value for training")
# --- arch
args.add_argument("--n_embd", default = 256, type = int, help = "Embedding Dim")
args.add_argument("--n_decoder_layers", default = 4, type = int, help = "Num Decoder layers")
args.add_argument("--n_sent_layers", default = 4, type = int, help = "Num layers for sentence encoder")
args.add_argument("--n_db_layers", default = 4, type = int, help = "Num layers for DB encoder")
args.add_argument("--n_head", default = 4, type = int, help = "Num Heads")
args.add_argument("--maxlen", default = 390, type = int, help = "Maximum length of decoder")
# --- data
args.add_argument("--mult", default = 3, type = int, help = "Size of dataset")
args.add_argument("--pf", default = 0.6, type = float, help = "Probability of using fields in training sequence")
args.add_argument("--fmax", default = 0.8, type = float, help = "Max fields probability")
args.add_argument("--fmin", default = 0.1, type = float, help = "Min fields probability")
# --- trainer
args.add_argument("--n_epochs", default = 3, type = int, help = "Number of epochs to train")
args.add_argument("--batch_size", default = 32, type = int, help = "Mini-Batch Size")
args.add_argument("--lr", default = 1e-4, type = float, help = "Learning Rate")
args.add_argument("--sample_every", default = 5, type = int, help = "After t")
args.add_argument("--train_ratio", default = 0.9, type = float, help = "Ratio of train data, rest is testing")
args.add_argument("--beta1", default = 0.9, type = float, help = "Adam.beta1")
args.add_argument("--beta2", default = 0.99, type = float, help = "Adam.beta2")
args.add_argument("--grad_norm_clip", default = 1.0, type = float, help = "Adam.beta2")
args.add_argument("--patience", default = 6, type = int, help = "training stops after patience runs out")
# --- parse and add more
args = args.parse_args()
tb_path = os.path.join(args.save_folder, args.name)
ckpt_path = os.path.join(tb_path, f"{args.name}.pt")
args = SimpleNamespace(**vars(args), ckpt_path = ckpt_path, tb_path = tb_path)
# make folders
os.makedirs(args.save_folder, exist_ok=True)
os.makedirs(args.tb_path, exist_ok=True)
# DataSet
datasetConf = T2SDatasetConfig(
schema_file=args.schema_file,
questions_file=args.questions_tsv,
maxlen=args.maxlen,
tokenizer_path=args.tokenizer_path
)
print(datasetConf)
dtrain = T2SDataset(config=datasetConf, mode="train")
dtest = T2SDataset(config=datasetConf, mode="test")
# Model
modelConfig = Text2SQLModelConfig(
vocab_size=datasetConf.tokenizer.vocab_size(),
n_embd=args.n_embd,
maxlen=args.maxlen,
n_decoder_layers=args.n_decoder_layers,
n_sent_layers=args.n_sent_layers,
n_db_layers=args.n_db_layers,
n_head=args.n_head,
)
print(modelConfig)
model = Text2SQLModel(modelConfig)
# Trainer
trainConfig = TrainerConfig(
lr=args.lr,
max_epochs=args.n_epochs,
batch_size=args.batch_size,
betas=(args.beta1, args.beta2),
grad_norm_clip=args.grad_norm_clip,
sample_every=args.sample_every,
num_batch=(len(dtrain) // args.batch_size) + int(len(dtrain) % args.batch_size != 0),
patience=args.patience,
tb_path=args.tb_path,
ckpt_path=args.ckpt_path
)
print(trainConfig)
trainer = Trainer(model, dtrain, dtest, trainConfig)
trainer.train(datasetConf)
| 44.112245
| 141
| 0.724034
|
76242ebab72de9145400ca8ddabe53a9e49bd27c
| 7,183
|
py
|
Python
|
cirq-core/cirq/ion/ion_device_test.py
|
Hosseinberg/Cirq
|
8b64834ba601e8b48394753c24800e16b36a59b1
|
[
"Apache-2.0"
] | null | null | null |
cirq-core/cirq/ion/ion_device_test.py
|
Hosseinberg/Cirq
|
8b64834ba601e8b48394753c24800e16b36a59b1
|
[
"Apache-2.0"
] | null | null | null |
cirq-core/cirq/ion/ion_device_test.py
|
Hosseinberg/Cirq
|
8b64834ba601e8b48394753c24800e16b36a59b1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import timedelta
import numpy as np
import pytest
import cirq
import cirq.ion as ci
import cirq.testing
def ion_device(chain_length: int, use_timedelta=False) -> ci.IonDevice:
ms = 1000 * cirq.Duration(nanos=1) if not use_timedelta else timedelta(microseconds=1)
return ci.IonDevice( # type: ignore
measurement_duration=100 * ms, # type: ignore
twoq_gates_duration=200 * ms, # type: ignore
oneq_gates_duration=10 * ms, # type: ignore
qubits=cirq.LineQubit.range(chain_length),
)
class NotImplementedOperation(cirq.Operation):
def with_qubits(self, *new_qubits) -> 'NotImplementedOperation':
raise NotImplementedError()
@property
def qubits(self):
raise NotImplementedError()
def test_init():
d = ion_device(3)
ms = 1000 * cirq.Duration(nanos=1)
q0 = cirq.LineQubit(0)
q1 = cirq.LineQubit(1)
q2 = cirq.LineQubit(2)
assert d.qubits == {q0, q1, q2}
assert d.duration_of(cirq.Z(q0)) == 10 * ms
assert d.duration_of(cirq.measure(q0)) == 100 * ms
assert d.duration_of(cirq.measure(q0, q1)) == 100 * ms
assert d.duration_of(cirq.ops.XX(q0, q1)) == 200 * ms
with pytest.raises(ValueError):
_ = d.duration_of(cirq.SingleQubitGate().on(q0))
with pytest.raises(TypeError, match="NamedQubit"):
_ = cirq.IonDevice(
measurement_duration=ms,
twoq_gates_duration=ms,
oneq_gates_duration=ms,
qubits=[cirq.LineQubit(0), cirq.NamedQubit("a")],
)
def test_init_timedelta():
d = ion_device(3, use_timedelta=True)
ms = 1000 * cirq.Duration(nanos=1)
q0 = cirq.LineQubit(0)
q1 = cirq.LineQubit(1)
q2 = cirq.LineQubit(2)
assert d.qubits == {q0, q1, q2}
assert d.duration_of(cirq.Z(q0)) == 10 * ms
assert d.duration_of(cirq.measure(q0)) == 100 * ms
assert d.duration_of(cirq.measure(q0, q1)) == 100 * ms
assert d.duration_of(cirq.ops.XX(q0, q1)) == 200 * ms
with pytest.raises(ValueError):
_ = d.duration_of(cirq.SingleQubitGate().on(q0))
def test_decomposition_deprecated():
d = ion_device(3)
q0 = cirq.LineQubit(0)
q1 = cirq.LineQubit(1)
with cirq.testing.assert_deprecated('ConvertToIonGates', deadline='v0.15'):
assert d.decompose_operation(cirq.H(q0)) == [
cirq.rx(np.pi * 1.0).on(cirq.LineQubit(0)),
cirq.ry(np.pi * -0.5).on(cirq.LineQubit(0)),
]
circuit = cirq.Circuit()
circuit.append([cirq.X(q0), cirq.CNOT(q0, q1)])
ion_circuit = d.decompose_circuit(circuit)
d.validate_circuit(ion_circuit)
cirq.testing.assert_circuits_with_terminal_measurements_are_equivalent(
circuit, ion_circuit, atol=1e-6
)
def test_repr():
d = ion_device(3)
assert repr(d) == (
"IonDevice("
"measurement_duration=cirq.Duration(micros=100), "
"twoq_gates_duration=cirq.Duration(micros=200), "
"oneq_gates_duration=cirq.Duration(micros=10) "
"qubits=[cirq.LineQubit(0), cirq.LineQubit(1), "
"cirq.LineQubit(2)])"
)
def test_validate_measurement_non_adjacent_qubits_ok():
d = ion_device(3)
d.validate_operation(
cirq.GateOperation(cirq.MeasurementGate(2, 'key'), (cirq.LineQubit(0), cirq.LineQubit(1)))
)
def test_validate_operation_existing_qubits():
d = ion_device(3)
d.validate_operation(cirq.GateOperation(cirq.XX, (cirq.LineQubit(0), cirq.LineQubit(1))))
d.validate_operation(cirq.Z(cirq.LineQubit(0)))
d.validate_operation(
cirq.PhasedXPowGate(phase_exponent=0.75, exponent=0.25, global_shift=0.1).on(
cirq.LineQubit(1)
)
)
with pytest.raises(ValueError):
d.validate_operation(cirq.CZ(cirq.LineQubit(0), cirq.LineQubit(-1)))
with pytest.raises(ValueError):
d.validate_operation(cirq.Z(cirq.LineQubit(-1)))
with pytest.raises(ValueError):
d.validate_operation(cirq.CZ(cirq.LineQubit(1), cirq.LineQubit(1)))
with pytest.raises(ValueError):
d.validate_operation(cirq.X(cirq.NamedQubit("q1")))
def test_validate_operation_supported_gate():
d = ion_device(3)
class MyGate(cirq.Gate):
def num_qubits(self):
return 1
d.validate_operation(cirq.GateOperation(cirq.Z, [cirq.LineQubit(0)]))
assert MyGate().num_qubits() == 1
with pytest.raises(ValueError):
d.validate_operation(cirq.GateOperation(MyGate(), [cirq.LineQubit(0)]))
with pytest.raises(ValueError):
d.validate_operation(NotImplementedOperation())
def test_can_add_operation_into_moment_device_deprecated():
with cirq.testing.assert_deprecated('can_add_operation_into_moment', deadline='v0.15', count=5):
d = ion_device(3)
q0 = cirq.LineQubit(0)
q1 = cirq.LineQubit(1)
q2 = cirq.LineQubit(2)
q3 = cirq.LineQubit(3)
circuit = cirq.Circuit()
circuit.append(cirq.XX(q0, q1))
for moment in circuit:
assert not d.can_add_operation_into_moment(cirq.XX(q2, q0), moment)
assert not d.can_add_operation_into_moment(cirq.XX(q1, q2), moment)
assert d.can_add_operation_into_moment(cirq.XX(q2, q3), moment)
assert d.can_add_operation_into_moment(cirq.Z(q3), moment)
circuit = cirq.Circuit([cirq.X(q0)])
assert d.can_add_operation_into_moment(cirq.XX(q1, q2), circuit[0])
def test_ion_device_eq():
eq = cirq.testing.EqualsTester()
eq.make_equality_group(lambda: ion_device(3))
eq.make_equality_group(lambda: ion_device(4))
def test_validate_circuit_repeat_measurement_keys():
d = ion_device(3)
circuit = cirq.Circuit()
circuit.append(
[cirq.measure(cirq.LineQubit(0), key='a'), cirq.measure(cirq.LineQubit(1), key='a')]
)
with pytest.raises(ValueError, match='Measurement key a repeated'):
d.validate_circuit(circuit)
def test_ion_device_str():
assert str(ion_device(3)) == "0───1───2"
def test_ion_device_pretty_repr():
cirq.testing.assert_repr_pretty(ion_device(3), "0───1───2")
cirq.testing.assert_repr_pretty(ion_device(3), "IonDevice(...)", cycle=True)
def test_at():
d = ion_device(3)
assert d.at(-1) is None
assert d.at(0) == cirq.LineQubit(0)
assert d.at(2) == cirq.LineQubit(2)
def test_qubit_set():
assert ion_device(3).qubit_set() == frozenset(cirq.LineQubit.range(3))
def test_qid_pairs_deprecated():
with cirq.testing.assert_deprecated('device.metadata', deadline='v0.15', count=1):
assert len(ion_device(10).qid_pairs()) == 45
| 32.799087
| 100
| 0.673396
|
7982d2dd66ee3a4bcf2c58a9712f55712b65116e
| 366
|
py
|
Python
|
cafe/migrations/0002_cafe_comment.py
|
MoGakCo/MogakCo
|
c0a5c6366179e3e2ad9daeb7cf4c8ca59790ac63
|
[
"MIT"
] | 7
|
2018-02-06T07:42:20.000Z
|
2019-03-25T03:49:11.000Z
|
cafe/migrations/0002_cafe_comment.py
|
MoGakCo/MogakCo
|
c0a5c6366179e3e2ad9daeb7cf4c8ca59790ac63
|
[
"MIT"
] | null | null | null |
cafe/migrations/0002_cafe_comment.py
|
MoGakCo/MogakCo
|
c0a5c6366179e3e2ad9daeb7cf4c8ca59790ac63
|
[
"MIT"
] | 1
|
2018-01-21T12:15:00.000Z
|
2018-01-21T12:15:00.000Z
|
# Generated by Django 2.0.1 on 2018-01-23 07:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cafe', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='cafe',
name='comment',
field=models.TextField(default=''),
),
]
| 19.263158
| 47
| 0.571038
|
3c7c7a642644032c1b9fbd8e44ffe8307c869183
| 2,620
|
py
|
Python
|
behavior_regularized_offline_rl/brac/train_online.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 23,901
|
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
behavior_regularized_offline_rl/brac/train_online.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 891
|
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
behavior_regularized_offline_rl/brac/train_online.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 6,047
|
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Online training binary."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import gin
import tensorflow.compat.v1 as tf
from behavior_regularized_offline_rl.brac import agents
from behavior_regularized_offline_rl.brac import train_eval_online
from behavior_regularized_offline_rl.brac import utils
tf.compat.v1.enable_v2_behavior()
flags.DEFINE_string('root_dir',
os.path.join(os.getenv('HOME', '/'),
'tmp/offlinerl/policies'),
'Root directory for writing logs/summaries/checkpoints.')
flags.DEFINE_string('sub_dir', '0', '')
flags.DEFINE_string('agent_name', 'sac', 'agent name.')
flags.DEFINE_float('eval_target', 1000,
'threshold for a paritally trained policy')
flags.DEFINE_string('env_name', 'Walker2d-v2', 'env name.')
flags.DEFINE_integer('seed', 0, 'random seed.')
flags.DEFINE_integer('total_train_steps', int(5e5), '')
flags.DEFINE_integer('n_eval_episodes', 20, '')
flags.DEFINE_multi_string('gin_file', None, 'Paths to the gin-config files.')
flags.DEFINE_multi_string('gin_bindings', None, 'Gin binding parameters.')
FLAGS = flags.FLAGS
def main(_):
logging.set_verbosity(logging.INFO)
gin.parse_config_files_and_bindings(FLAGS.gin_file, FLAGS.gin_bindings)
if FLAGS.sub_dir == 'auto':
sub_dir = utils.get_datetime()
else:
sub_dir = FLAGS.sub_dir
log_dir = os.path.join(
FLAGS.root_dir,
FLAGS.env_name,
FLAGS.agent_name,
sub_dir,
)
utils.maybe_makedirs(log_dir)
train_eval_online.train_eval_online(
log_dir=log_dir,
agent_module=agents.AGENT_MODULES_DICT[FLAGS.agent_name],
env_name=FLAGS.env_name,
total_train_steps=FLAGS.total_train_steps,
n_eval_episodes=FLAGS.n_eval_episodes,
eval_target=FLAGS.eval_target,
)
if __name__ == '__main__':
app.run(main)
| 32.345679
| 77
| 0.732443
|
02eb6e9d885dfbb12df89a83f2227ef99bcd91ca
| 74,252
|
py
|
Python
|
src/twisted/web/client.py
|
p12tic/twisted
|
9a7ce38d10e28dda92ecf7174856ba59096d6b83
|
[
"MIT",
"Unlicense"
] | 32
|
2019-11-14T07:49:33.000Z
|
2022-02-16T00:49:22.000Z
|
src/twisted/web/client.py
|
p12tic/twisted
|
9a7ce38d10e28dda92ecf7174856ba59096d6b83
|
[
"MIT",
"Unlicense"
] | 9
|
2019-09-06T18:21:59.000Z
|
2022-01-13T03:04:11.000Z
|
src/twisted/web/client.py
|
p12tic/twisted
|
9a7ce38d10e28dda92ecf7174856ba59096d6b83
|
[
"MIT",
"Unlicense"
] | 16
|
2019-06-25T13:26:43.000Z
|
2022-03-07T07:29:12.000Z
|
# -*- test-case-name: twisted.web.test.test_webclient,twisted.web.test.test_agent -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
HTTP client.
"""
from __future__ import division, absolute_import
import os
import warnings
try:
from urlparse import urlunparse, urljoin, urldefrag
except ImportError:
from urllib.parse import urljoin, urldefrag
from urllib.parse import urlunparse as _urlunparse
def urlunparse(parts):
result = _urlunparse(tuple([p.decode("charmap") for p in parts]))
return result.encode("charmap")
import zlib
from functools import wraps
from zope.interface import implementer
from twisted.python.compat import _PY3, networkString
from twisted.python.compat import nativeString, intToBytes, unicode, itervalues
from twisted.python.deprecate import deprecatedModuleAttribute, deprecated
from twisted.python.failure import Failure
from incremental import Version
from twisted.web.iweb import IPolicyForHTTPS, IAgentEndpointFactory
from twisted.python.deprecate import getDeprecationWarningString
from twisted.web import http
from twisted.internet import defer, protocol, task, reactor
from twisted.internet.abstract import isIPv6Address
from twisted.internet.interfaces import IProtocol, IOpenSSLContextFactory
from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.python.util import InsensitiveDict
from twisted.python.components import proxyForInterface
from twisted.web import error
from twisted.web.iweb import UNKNOWN_LENGTH, IAgent, IBodyProducer, IResponse
from twisted.web.http_headers import Headers
from twisted.logger import Logger
class PartialDownloadError(error.Error):
"""
Page was only partially downloaded, we got disconnected in middle.
@ivar response: All of the response body which was downloaded.
"""
class HTTPPageGetter(http.HTTPClient):
"""
Gets a resource via HTTP, then quits.
Typically used with L{HTTPClientFactory}. Note that this class does not, by
itself, do anything with the response. If you want to download a resource
into a file, use L{HTTPPageDownloader} instead.
@ivar _completelyDone: A boolean indicating whether any further requests are
necessary after this one completes in order to provide a result to
C{self.factory.deferred}. If it is C{False}, then a redirect is going
to be followed. Otherwise, this protocol's connection is the last one
before firing the result Deferred. This is used to make sure the result
Deferred is only fired after the connection is cleaned up.
"""
quietLoss = 0
followRedirect = True
failed = 0
_completelyDone = True
_specialHeaders = set((b'host', b'user-agent', b'cookie', b'content-length'))
def connectionMade(self):
method = getattr(self.factory, 'method', b'GET')
self.sendCommand(method, self.factory.path)
if self.factory.scheme == b'http' and self.factory.port != 80:
host = self.factory.host + b':' + intToBytes(self.factory.port)
elif self.factory.scheme == b'https' and self.factory.port != 443:
host = self.factory.host + b':' + intToBytes(self.factory.port)
else:
host = self.factory.host
self.sendHeader(b'Host', self.factory.headers.get(b"host", host))
self.sendHeader(b'User-Agent', self.factory.agent)
data = getattr(self.factory, 'postdata', None)
if data is not None:
self.sendHeader(b"Content-Length", intToBytes(len(data)))
cookieData = []
for (key, value) in self.factory.headers.items():
if key.lower() not in self._specialHeaders:
# we calculated it on our own
self.sendHeader(key, value)
if key.lower() == b'cookie':
cookieData.append(value)
for cookie, cookval in self.factory.cookies.items():
cookieData.append(cookie + b'=' + cookval)
if cookieData:
self.sendHeader(b'Cookie', b'; '.join(cookieData))
self.endHeaders()
self.headers = {}
if data is not None:
self.transport.write(data)
def handleHeader(self, key, value):
"""
Called every time a header is received. Stores the header information
as key-value pairs in the C{headers} attribute.
@type key: C{str}
@param key: An HTTP header field name.
@type value: C{str}
@param value: An HTTP header field value.
"""
key = key.lower()
l = self.headers.setdefault(key, [])
l.append(value)
def handleStatus(self, version, status, message):
"""
Handle the HTTP status line.
@param version: The HTTP version.
@type version: L{bytes}
@param status: The HTTP status code, an integer represented as a
bytestring.
@type status: L{bytes}
@param message: The HTTP status message.
@type message: L{bytes}
"""
self.version, self.status, self.message = version, status, message
self.factory.gotStatus(version, status, message)
def handleEndHeaders(self):
self.factory.gotHeaders(self.headers)
m = getattr(self, 'handleStatus_' + nativeString(self.status),
self.handleStatusDefault)
m()
def handleStatus_200(self):
pass
handleStatus_201 = lambda self: self.handleStatus_200()
handleStatus_202 = lambda self: self.handleStatus_200()
def handleStatusDefault(self):
self.failed = 1
def handleStatus_301(self):
l = self.headers.get(b'location')
if not l:
self.handleStatusDefault()
return
url = l[0]
if self.followRedirect:
self.factory._redirectCount += 1
if self.factory._redirectCount >= self.factory.redirectLimit:
err = error.InfiniteRedirection(
self.status,
b'Infinite redirection detected',
location=url)
self.factory.noPage(Failure(err))
self.quietLoss = True
self.transport.loseConnection()
return
self._completelyDone = False
self.factory.setURL(url)
if self.factory.scheme == b'https':
from twisted.internet import ssl
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(nativeString(self.factory.host),
self.factory.port,
self.factory, contextFactory)
else:
reactor.connectTCP(nativeString(self.factory.host),
self.factory.port,
self.factory)
else:
self.handleStatusDefault()
self.factory.noPage(
Failure(
error.PageRedirect(
self.status, self.message, location = url)))
self.quietLoss = True
self.transport.loseConnection()
def handleStatus_302(self):
if self.afterFoundGet:
self.handleStatus_303()
else:
self.handleStatus_301()
def handleStatus_303(self):
self.factory.method = b'GET'
self.handleStatus_301()
def connectionLost(self, reason):
"""
When the connection used to issue the HTTP request is closed, notify the
factory if we have not already, so it can produce a result.
"""
if not self.quietLoss:
http.HTTPClient.connectionLost(self, reason)
self.factory.noPage(reason)
if self._completelyDone:
# Only if we think we're completely done do we tell the factory that
# we're "disconnected". This way when we're following redirects,
# only the last protocol used will fire the _disconnectedDeferred.
self.factory._disconnectedDeferred.callback(None)
def handleResponse(self, response):
if self.quietLoss:
return
if self.failed:
self.factory.noPage(
Failure(
error.Error(
self.status, self.message, response)))
if self.factory.method == b'HEAD':
# Callback with empty string, since there is never a response
# body for HEAD requests.
self.factory.page(b'')
elif self.length != None and self.length != 0:
self.factory.noPage(Failure(
PartialDownloadError(self.status, self.message, response)))
else:
self.factory.page(response)
# server might be stupid and not close connection. admittedly
# the fact we do only one request per connection is also
# stupid...
self.transport.loseConnection()
def timeout(self):
self.quietLoss = True
self.transport.abortConnection()
self.factory.noPage(defer.TimeoutError("Getting %s took longer than %s seconds." % (self.factory.url, self.factory.timeout)))
class HTTPPageDownloader(HTTPPageGetter):
transmittingPage = 0
def handleStatus_200(self, partialContent=0):
HTTPPageGetter.handleStatus_200(self)
self.transmittingPage = 1
self.factory.pageStart(partialContent)
def handleStatus_206(self):
self.handleStatus_200(partialContent=1)
def handleResponsePart(self, data):
if self.transmittingPage:
self.factory.pagePart(data)
def handleResponseEnd(self):
if self.length:
self.transmittingPage = 0
self.factory.noPage(
Failure(
PartialDownloadError(self.status)))
if self.transmittingPage:
self.factory.pageEnd()
self.transmittingPage = 0
if self.failed:
self.factory.noPage(
Failure(
error.Error(
self.status, self.message, None)))
self.transport.loseConnection()
class HTTPClientFactory(protocol.ClientFactory):
"""Download a given URL.
@type deferred: Deferred
@ivar deferred: A Deferred that will fire when the content has
been retrieved. Once this is fired, the ivars `status', `version',
and `message' will be set.
@type status: bytes
@ivar status: The status of the response.
@type version: bytes
@ivar version: The version of the response.
@type message: bytes
@ivar message: The text message returned with the status.
@type response_headers: dict
@ivar response_headers: The headers that were specified in the
response from the server.
@type method: bytes
@ivar method: The HTTP method to use in the request. This should be one of
OPTIONS, GET, HEAD, POST, PUT, DELETE, TRACE, or CONNECT (case
matters). Other values may be specified if the server being contacted
supports them.
@type redirectLimit: int
@ivar redirectLimit: The maximum number of HTTP redirects that can occur
before it is assumed that the redirection is endless.
@type afterFoundGet: C{bool}
@ivar afterFoundGet: Deviate from the HTTP 1.1 RFC by handling redirects
the same way as most web browsers; if the request method is POST and a
302 status is encountered, the redirect is followed with a GET method
@type _redirectCount: int
@ivar _redirectCount: The current number of HTTP redirects encountered.
@ivar _disconnectedDeferred: A L{Deferred} which only fires after the last
connection associated with the request (redirects may cause multiple
connections to be required) has closed. The result Deferred will only
fire after this Deferred, so that callers can be assured that there are
no more event sources in the reactor once they get the result.
"""
protocol = HTTPPageGetter
url = None
scheme = None
host = b''
port = None
path = None
def __init__(self, url, method=b'GET', postdata=None, headers=None,
agent=b"Twisted PageGetter", timeout=0, cookies=None,
followRedirect=True, redirectLimit=20,
afterFoundGet=False):
self.followRedirect = followRedirect
self.redirectLimit = redirectLimit
self._redirectCount = 0
self.timeout = timeout
self.agent = agent
self.afterFoundGet = afterFoundGet
if cookies is None:
cookies = {}
self.cookies = cookies
if headers is not None:
self.headers = InsensitiveDict(headers)
else:
self.headers = InsensitiveDict()
if postdata is not None:
self.headers.setdefault(b'Content-Length',
intToBytes(len(postdata)))
# just in case a broken http/1.1 decides to keep connection alive
self.headers.setdefault(b"connection", b"close")
self.postdata = postdata
self.method = method
self.setURL(url)
self.waiting = 1
self._disconnectedDeferred = defer.Deferred()
self.deferred = defer.Deferred()
# Make sure the first callback on the result Deferred pauses the
# callback chain until the request connection is closed.
self.deferred.addBoth(self._waitForDisconnect)
self.response_headers = None
def _waitForDisconnect(self, passthrough):
"""
Chain onto the _disconnectedDeferred, preserving C{passthrough}, so that
the result is only available after the associated connection has been
closed.
"""
self._disconnectedDeferred.addCallback(lambda ignored: passthrough)
return self._disconnectedDeferred
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.url)
def setURL(self, url):
self.url = url
uri = URI.fromBytes(url)
if uri.scheme and uri.host:
self.scheme = uri.scheme
self.host = uri.host
self.port = uri.port
self.path = uri.originForm
def buildProtocol(self, addr):
p = protocol.ClientFactory.buildProtocol(self, addr)
p.followRedirect = self.followRedirect
p.afterFoundGet = self.afterFoundGet
if self.timeout:
timeoutCall = reactor.callLater(self.timeout, p.timeout)
self.deferred.addBoth(self._cancelTimeout, timeoutCall)
return p
def _cancelTimeout(self, result, timeoutCall):
if timeoutCall.active():
timeoutCall.cancel()
return result
def gotHeaders(self, headers):
"""
Parse the response HTTP headers.
@param headers: The response HTTP headers.
@type headers: L{dict}
"""
self.response_headers = headers
if b'set-cookie' in headers:
for cookie in headers[b'set-cookie']:
if b'=' in cookie:
cookparts = cookie.split(b';')
cook = cookparts[0]
cook.lstrip()
k, v = cook.split(b'=', 1)
self.cookies[k.lstrip()] = v.lstrip()
def gotStatus(self, version, status, message):
"""
Set the status of the request on us.
@param version: The HTTP version.
@type version: L{bytes}
@param status: The HTTP status code, an integer represented as a
bytestring.
@type status: L{bytes}
@param message: The HTTP status message.
@type message: L{bytes}
"""
self.version, self.status, self.message = version, status, message
def page(self, page):
if self.waiting:
self.waiting = 0
self.deferred.callback(page)
def noPage(self, reason):
if self.waiting:
self.waiting = 0
self.deferred.errback(reason)
def clientConnectionFailed(self, _, reason):
"""
When a connection attempt fails, the request cannot be issued. If no
result has yet been provided to the result Deferred, provide the
connection failure reason as an error result.
"""
if self.waiting:
self.waiting = 0
# If the connection attempt failed, there is nothing more to
# disconnect, so just fire that Deferred now.
self._disconnectedDeferred.callback(None)
self.deferred.errback(reason)
class HTTPDownloader(HTTPClientFactory):
"""
Download to a file.
"""
protocol = HTTPPageDownloader
value = None
_log = Logger()
def __init__(self, url, fileOrName,
method=b'GET', postdata=None, headers=None,
agent=b"Twisted client", supportPartial=False,
timeout=0, cookies=None, followRedirect=True,
redirectLimit=20, afterFoundGet=False):
self.requestedPartial = 0
if isinstance(fileOrName, (str, unicode)):
self.fileName = fileOrName
self.file = None
if supportPartial and os.path.exists(self.fileName):
fileLength = os.path.getsize(self.fileName)
if fileLength:
self.requestedPartial = fileLength
if headers == None:
headers = {}
headers[b"range"] = b"bytes=" + intToBytes(fileLength) + b"-"
else:
self.file = fileOrName
HTTPClientFactory.__init__(
self, url, method=method, postdata=postdata, headers=headers,
agent=agent, timeout=timeout, cookies=cookies,
followRedirect=followRedirect, redirectLimit=redirectLimit,
afterFoundGet=afterFoundGet)
def gotHeaders(self, headers):
HTTPClientFactory.gotHeaders(self, headers)
if self.requestedPartial:
contentRange = headers.get(b"content-range", None)
if not contentRange:
# server doesn't support partial requests, oh well
self.requestedPartial = 0
return
start, end, realLength = http.parseContentRange(contentRange[0])
if start != self.requestedPartial:
# server is acting weirdly
self.requestedPartial = 0
def openFile(self, partialContent):
if partialContent:
file = open(self.fileName, 'rb+')
file.seek(0, 2)
else:
file = open(self.fileName, 'wb')
return file
def pageStart(self, partialContent):
"""Called on page download start.
@param partialContent: tells us if the download is partial download we requested.
"""
if partialContent and not self.requestedPartial:
raise ValueError("we shouldn't get partial content response if we didn't want it!")
if self.waiting:
try:
if not self.file:
self.file = self.openFile(partialContent)
except IOError:
#raise
self.deferred.errback(Failure())
def pagePart(self, data):
if not self.file:
return
try:
self.file.write(data)
except IOError:
#raise
self.file = None
self.deferred.errback(Failure())
def noPage(self, reason):
"""
Close the storage file and errback the waiting L{Deferred} with the
given reason.
"""
if self.waiting:
self.waiting = 0
if self.file:
try:
self.file.close()
except:
self._log.failure("Error closing HTTPDownloader file")
self.deferred.errback(reason)
def pageEnd(self):
self.waiting = 0
if not self.file:
return
try:
self.file.close()
except IOError:
self.deferred.errback(Failure())
return
self.deferred.callback(self.value)
class URI(object):
"""
A URI object.
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-21}
"""
def __init__(self, scheme, netloc, host, port, path, params, query,
fragment):
"""
@type scheme: L{bytes}
@param scheme: URI scheme specifier.
@type netloc: L{bytes}
@param netloc: Network location component.
@type host: L{bytes}
@param host: Host name. For IPv6 address literals the brackets are
stripped.
@type port: L{int}
@param port: Port number.
@type path: L{bytes}
@param path: Hierarchical path.
@type params: L{bytes}
@param params: Parameters for last path segment.
@type query: L{bytes}
@param query: Query string.
@type fragment: L{bytes}
@param fragment: Fragment identifier.
"""
self.scheme = scheme
self.netloc = netloc
self.host = host.strip(b'[]')
self.port = port
self.path = path
self.params = params
self.query = query
self.fragment = fragment
@classmethod
def fromBytes(cls, uri, defaultPort=None):
"""
Parse the given URI into a L{URI}.
@type uri: C{bytes}
@param uri: URI to parse.
@type defaultPort: C{int} or L{None}
@param defaultPort: An alternate value to use as the port if the URI
does not include one.
@rtype: L{URI}
@return: Parsed URI instance.
"""
uri = uri.strip()
scheme, netloc, path, params, query, fragment = http.urlparse(uri)
if defaultPort is None:
if scheme == b'https':
defaultPort = 443
else:
defaultPort = 80
if b':' in netloc:
host, port = netloc.rsplit(b':', 1)
try:
port = int(port)
except ValueError:
host, port = netloc, defaultPort
else:
host, port = netloc, defaultPort
return cls(scheme, netloc, host, port, path, params, query, fragment)
def toBytes(self):
"""
Assemble the individual parts of the I{URI} into a fully formed I{URI}.
@rtype: C{bytes}
@return: A fully formed I{URI}.
"""
return urlunparse(
(self.scheme, self.netloc, self.path, self.params, self.query,
self.fragment))
@property
def originForm(self):
"""
The absolute I{URI} path including I{URI} parameters, query string and
fragment identifier.
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-21#section-5.3}
@return: The absolute path in original form.
@rtype: L{bytes}
"""
# The HTTP bis draft says the origin form should not include the
# fragment.
path = urlunparse(
(b'', b'', self.path, self.params, self.query, b''))
if path == b'':
path = b'/'
return path
def _urljoin(base, url):
"""
Construct a full ("absolute") URL by combining a "base URL" with another
URL. Informally, this uses components of the base URL, in particular the
addressing scheme, the network location and (part of) the path, to provide
missing components in the relative URL.
Additionally, the fragment identifier is preserved according to the HTTP
1.1 bis draft.
@type base: C{bytes}
@param base: Base URL.
@type url: C{bytes}
@param url: URL to combine with C{base}.
@return: An absolute URL resulting from the combination of C{base} and
C{url}.
@see: L{urlparse.urljoin}
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p2-semantics-22#section-7.1.2}
"""
base, baseFrag = urldefrag(base)
url, urlFrag = urldefrag(urljoin(base, url))
return urljoin(url, b'#' + (urlFrag or baseFrag))
def _makeGetterFactory(url, factoryFactory, contextFactory=None,
*args, **kwargs):
"""
Create and connect an HTTP page getting factory.
Any additional positional or keyword arguments are used when calling
C{factoryFactory}.
@param factoryFactory: Factory factory that is called with C{url}, C{args}
and C{kwargs} to produce the getter
@param contextFactory: Context factory to use when creating a secure
connection, defaulting to L{None}
@return: The factory created by C{factoryFactory}
"""
uri = URI.fromBytes(url)
factory = factoryFactory(url, *args, **kwargs)
if uri.scheme == b'https':
from twisted.internet import ssl
if contextFactory is None:
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(
nativeString(uri.host), uri.port, factory, contextFactory)
else:
reactor.connectTCP(nativeString(uri.host), uri.port, factory)
return factory
_GETPAGE_REPLACEMENT_TEXT = "https://pypi.org/project/treq/ or twisted.web.client.Agent"
def _deprecateGetPageClasses():
"""
Mark the protocols and factories associated with L{getPage} and
L{downloadPage} as deprecated.
"""
for klass in [
HTTPPageGetter, HTTPPageDownloader,
HTTPClientFactory, HTTPDownloader
]:
deprecatedModuleAttribute(
Version("Twisted", 16, 7, 0),
getDeprecationWarningString(
klass,
Version("Twisted", 16, 7, 0),
replacement=_GETPAGE_REPLACEMENT_TEXT)
.split("; ")[1],
klass.__module__,
klass.__name__)
_deprecateGetPageClasses()
@deprecated(Version("Twisted", 16, 7, 0),
_GETPAGE_REPLACEMENT_TEXT)
def getPage(url, contextFactory=None, *args, **kwargs):
"""
Download a web page as a string.
Download a page. Return a deferred, which will callback with a
page (as a string) or errback with a description of the error.
See L{HTTPClientFactory} to see what extra arguments can be passed.
"""
return _makeGetterFactory(
url,
HTTPClientFactory,
contextFactory=contextFactory,
*args, **kwargs).deferred
@deprecated(Version("Twisted", 16, 7, 0),
_GETPAGE_REPLACEMENT_TEXT)
def downloadPage(url, file, contextFactory=None, *args, **kwargs):
"""
Download a web page to a file.
@param file: path to file on filesystem, or file-like object.
See HTTPDownloader to see what extra args can be passed.
"""
factoryFactory = lambda url, *a, **kw: HTTPDownloader(url, file, *a, **kw)
return _makeGetterFactory(
url,
factoryFactory,
contextFactory=contextFactory,
*args, **kwargs).deferred
# The code which follows is based on the new HTTP client implementation. It
# should be significantly better than anything above, though it is not yet
# feature equivalent.
from twisted.web.error import SchemeNotSupported
from twisted.web._newclient import (
HTTP11ClientProtocol,
PotentialDataLoss,
Request,
RequestGenerationFailed,
RequestNotSent,
RequestTransmissionFailed,
Response,
ResponseDone,
ResponseFailed,
ResponseNeverReceived,
_WrapperException,
)
try:
from OpenSSL import SSL
except ImportError:
SSL = None
else:
from twisted.internet.ssl import (CertificateOptions,
platformTrust,
optionsForClientTLS)
def _requireSSL(decoratee):
"""
The decorated method requires pyOpenSSL to be present, or it raises
L{NotImplementedError}.
@param decoratee: A function which requires pyOpenSSL.
@type decoratee: L{callable}
@return: A function which raises L{NotImplementedError} if pyOpenSSL is not
installed; otherwise, if it is installed, simply return C{decoratee}.
@rtype: L{callable}
"""
if SSL is None:
@wraps(decoratee)
def raiseNotImplemented(*a, **kw):
"""
pyOpenSSL is not available.
@param a: The positional arguments for C{decoratee}.
@param kw: The keyword arguments for C{decoratee}.
@raise NotImplementedError: Always.
"""
raise NotImplementedError("SSL support unavailable")
return raiseNotImplemented
return decoratee
class WebClientContextFactory(object):
"""
This class is deprecated. Please simply use L{Agent} as-is, or if you want
to customize something, use L{BrowserLikePolicyForHTTPS}.
A L{WebClientContextFactory} is an HTTPS policy which totally ignores the
hostname and port. It performs basic certificate verification, however the
lack of validation of service identity (e.g. hostname validation) means it
is still vulnerable to man-in-the-middle attacks. Don't use it any more.
"""
def _getCertificateOptions(self, hostname, port):
"""
Return a L{CertificateOptions}.
@param hostname: ignored
@param port: ignored
@return: A new CertificateOptions instance.
@rtype: L{CertificateOptions}
"""
return CertificateOptions(
method=SSL.SSLv23_METHOD,
trustRoot=platformTrust()
)
@_requireSSL
def getContext(self, hostname, port):
"""
Return an L{OpenSSL.SSL.Context}.
@param hostname: ignored
@param port: ignored
@return: A new SSL context.
@rtype: L{OpenSSL.SSL.Context}
"""
return self._getCertificateOptions(hostname, port).getContext()
@implementer(IPolicyForHTTPS)
class BrowserLikePolicyForHTTPS(object):
"""
SSL connection creator for web clients.
"""
def __init__(self, trustRoot=None):
self._trustRoot = trustRoot
@_requireSSL
def creatorForNetloc(self, hostname, port):
"""
Create a L{client connection creator
<twisted.internet.interfaces.IOpenSSLClientConnectionCreator>} for a
given network location.
@param tls: The TLS protocol to create a connection for.
@type tls: L{twisted.protocols.tls.TLSMemoryBIOProtocol}
@param hostname: The hostname part of the URI.
@type hostname: L{bytes}
@param port: The port part of the URI.
@type port: L{int}
@return: a connection creator with appropriate verification
restrictions set
@rtype: L{client connection creator
<twisted.internet.interfaces.IOpenSSLClientConnectionCreator>}
"""
return optionsForClientTLS(hostname.decode("ascii"),
trustRoot=self._trustRoot)
deprecatedModuleAttribute(Version("Twisted", 14, 0, 0),
getDeprecationWarningString(
WebClientContextFactory,
Version("Twisted", 14, 0, 0),
replacement=BrowserLikePolicyForHTTPS)
.split("; ")[1],
WebClientContextFactory.__module__,
WebClientContextFactory.__name__)
@implementer(IOpenSSLContextFactory)
class _ContextFactoryWithContext(object):
"""
A L{_ContextFactoryWithContext} is like a
L{twisted.internet.ssl.ContextFactory} with a pre-created context.
@ivar _context: A Context.
@type _context: L{OpenSSL.SSL.Context}
"""
def __init__(self, context):
"""
Initialize a L{_ContextFactoryWithContext} with a context.
@param context: An SSL context.
@type context: L{OpenSSL.SSL.Context}
"""
self._context = context
def getContext(self):
"""
Return the context created by
L{_DeprecatedToCurrentPolicyForHTTPS._webContextFactory}.
@return: A context.
@rtype context: L{OpenSSL.SSL.Context}
"""
return self._context
@implementer(IPolicyForHTTPS)
class _DeprecatedToCurrentPolicyForHTTPS(object):
"""
Adapt a web context factory to a normal context factory.
@ivar _webContextFactory: An object providing a getContext method with
C{hostname} and C{port} arguments.
@type _webContextFactory: L{WebClientContextFactory} (or object with a
similar C{getContext} method).
"""
def __init__(self, webContextFactory):
"""
Wrap a web context factory in an L{IPolicyForHTTPS}.
@param webContextFactory: An object providing a getContext method with
C{hostname} and C{port} arguments.
@type webContextFactory: L{WebClientContextFactory} (or object with a
similar C{getContext} method).
"""
self._webContextFactory = webContextFactory
def creatorForNetloc(self, hostname, port):
"""
Called the wrapped web context factory's C{getContext} method with a
hostname and port number and return the resulting context object.
@param hostname: The hostname part of the URI.
@type hostname: L{bytes}
@param port: The port part of the URI.
@type port: L{int}
@return: A context factory.
@rtype: L{IOpenSSLContextFactory}
"""
context = self._webContextFactory.getContext(hostname, port)
return _ContextFactoryWithContext(context)
@implementer(IBodyProducer)
class FileBodyProducer(object):
"""
L{FileBodyProducer} produces bytes from an input file object incrementally
and writes them to a consumer.
Since file-like objects cannot be read from in an event-driven manner,
L{FileBodyProducer} uses a L{Cooperator} instance to schedule reads from
the file. This process is also paused and resumed based on notifications
from the L{IConsumer} provider being written to.
The file is closed after it has been read, or if the producer is stopped
early.
@ivar _inputFile: Any file-like object, bytes read from which will be
written to a consumer.
@ivar _cooperate: A method like L{Cooperator.cooperate} which is used to
schedule all reads.
@ivar _readSize: The number of bytes to read from C{_inputFile} at a time.
"""
def __init__(self, inputFile, cooperator=task, readSize=2 ** 16):
self._inputFile = inputFile
self._cooperate = cooperator.cooperate
self._readSize = readSize
self.length = self._determineLength(inputFile)
def _determineLength(self, fObj):
"""
Determine how many bytes can be read out of C{fObj} (assuming it is not
modified from this point on). If the determination cannot be made,
return C{UNKNOWN_LENGTH}.
"""
try:
seek = fObj.seek
tell = fObj.tell
except AttributeError:
return UNKNOWN_LENGTH
originalPosition = tell()
seek(0, os.SEEK_END)
end = tell()
seek(originalPosition, os.SEEK_SET)
return end - originalPosition
def stopProducing(self):
"""
Permanently stop writing bytes from the file to the consumer by
stopping the underlying L{CooperativeTask}.
"""
self._inputFile.close()
self._task.stop()
def startProducing(self, consumer):
"""
Start a cooperative task which will read bytes from the input file and
write them to C{consumer}. Return a L{Deferred} which fires after all
bytes have been written.
@param consumer: Any L{IConsumer} provider
"""
self._task = self._cooperate(self._writeloop(consumer))
d = self._task.whenDone()
def maybeStopped(reason):
# IBodyProducer.startProducing's Deferred isn't support to fire if
# stopProducing is called.
reason.trap(task.TaskStopped)
return defer.Deferred()
d.addCallbacks(lambda ignored: None, maybeStopped)
return d
def _writeloop(self, consumer):
"""
Return an iterator which reads one chunk of bytes from the input file
and writes them to the consumer for each time it is iterated.
"""
while True:
bytes = self._inputFile.read(self._readSize)
if not bytes:
self._inputFile.close()
break
consumer.write(bytes)
yield None
def pauseProducing(self):
"""
Temporarily suspend copying bytes from the input file to the consumer
by pausing the L{CooperativeTask} which drives that activity.
"""
self._task.pause()
def resumeProducing(self):
"""
Undo the effects of a previous C{pauseProducing} and resume copying
bytes to the consumer by resuming the L{CooperativeTask} which drives
the write activity.
"""
self._task.resume()
class _HTTP11ClientFactory(protocol.Factory):
"""
A factory for L{HTTP11ClientProtocol}, used by L{HTTPConnectionPool}.
@ivar _quiescentCallback: The quiescent callback to be passed to protocol
instances, used to return them to the connection pool.
@ivar _metadata: Metadata about the low-level connection details,
used to make the repr more useful.
@since: 11.1
"""
def __init__(self, quiescentCallback, metadata):
self._quiescentCallback = quiescentCallback
self._metadata = metadata
def __repr__(self):
return '_HTTP11ClientFactory({}, {})'.format(
self._quiescentCallback,
self._metadata)
def buildProtocol(self, addr):
return HTTP11ClientProtocol(self._quiescentCallback)
class _RetryingHTTP11ClientProtocol(object):
"""
A wrapper for L{HTTP11ClientProtocol} that automatically retries requests.
@ivar _clientProtocol: The underlying L{HTTP11ClientProtocol}.
@ivar _newConnection: A callable that creates a new connection for a
retry.
"""
def __init__(self, clientProtocol, newConnection):
self._clientProtocol = clientProtocol
self._newConnection = newConnection
def _shouldRetry(self, method, exception, bodyProducer):
"""
Indicate whether request should be retried.
Only returns C{True} if method is idempotent, no response was
received, the reason for the failed request was not due to
user-requested cancellation, and no body was sent. The latter
requirement may be relaxed in the future, and PUT added to approved
method list.
@param method: The method of the request.
@type method: L{bytes}
"""
if method not in (b"GET", b"HEAD", b"OPTIONS", b"DELETE", b"TRACE"):
return False
if not isinstance(exception, (RequestNotSent,
RequestTransmissionFailed,
ResponseNeverReceived)):
return False
if isinstance(exception, _WrapperException):
for aFailure in exception.reasons:
if aFailure.check(defer.CancelledError):
return False
if bodyProducer is not None:
return False
return True
def request(self, request):
"""
Do a request, and retry once (with a new connection) if it fails in
a retryable manner.
@param request: A L{Request} instance that will be requested using the
wrapped protocol.
"""
d = self._clientProtocol.request(request)
def failed(reason):
if self._shouldRetry(request.method, reason.value,
request.bodyProducer):
return self._newConnection().addCallback(
lambda connection: connection.request(request))
else:
return reason
d.addErrback(failed)
return d
class HTTPConnectionPool(object):
"""
A pool of persistent HTTP connections.
Features:
- Cached connections will eventually time out.
- Limits on maximum number of persistent connections.
Connections are stored using keys, which should be chosen such that any
connections stored under a given key can be used interchangeably.
Failed requests done using previously cached connections will be retried
once if they use an idempotent method (e.g. GET), in case the HTTP server
timed them out.
@ivar persistent: Boolean indicating whether connections should be
persistent. Connections are persistent by default.
@ivar maxPersistentPerHost: The maximum number of cached persistent
connections for a C{host:port} destination.
@type maxPersistentPerHost: C{int}
@ivar cachedConnectionTimeout: Number of seconds a cached persistent
connection will stay open before disconnecting.
@ivar retryAutomatically: C{boolean} indicating whether idempotent
requests should be retried once if no response was received.
@ivar _factory: The factory used to connect to the proxy.
@ivar _connections: Map (scheme, host, port) to lists of
L{HTTP11ClientProtocol} instances.
@ivar _timeouts: Map L{HTTP11ClientProtocol} instances to a
C{IDelayedCall} instance of their timeout.
@since: 12.1
"""
_factory = _HTTP11ClientFactory
maxPersistentPerHost = 2
cachedConnectionTimeout = 240
retryAutomatically = True
_log = Logger()
def __init__(self, reactor, persistent=True):
self._reactor = reactor
self.persistent = persistent
self._connections = {}
self._timeouts = {}
def getConnection(self, key, endpoint):
"""
Supply a connection, newly created or retrieved from the pool, to be
used for one HTTP request.
The connection will remain out of the pool (not available to be
returned from future calls to this method) until one HTTP request has
been completed over it.
Afterwards, if the connection is still open, it will automatically be
added to the pool.
@param key: A unique key identifying connections that can be used
interchangeably.
@param endpoint: An endpoint that can be used to open a new connection
if no cached connection is available.
@return: A C{Deferred} that will fire with a L{HTTP11ClientProtocol}
(or a wrapper) that can be used to send a single HTTP request.
"""
# Try to get cached version:
connections = self._connections.get(key)
while connections:
connection = connections.pop(0)
# Cancel timeout:
self._timeouts[connection].cancel()
del self._timeouts[connection]
if connection.state == "QUIESCENT":
if self.retryAutomatically:
newConnection = lambda: self._newConnection(key, endpoint)
connection = _RetryingHTTP11ClientProtocol(
connection, newConnection)
return defer.succeed(connection)
return self._newConnection(key, endpoint)
def _newConnection(self, key, endpoint):
"""
Create a new connection.
This implements the new connection code path for L{getConnection}.
"""
def quiescentCallback(protocol):
self._putConnection(key, protocol)
factory = self._factory(quiescentCallback, repr(endpoint))
return endpoint.connect(factory)
def _removeConnection(self, key, connection):
"""
Remove a connection from the cache and disconnect it.
"""
connection.transport.loseConnection()
self._connections[key].remove(connection)
del self._timeouts[connection]
def _putConnection(self, key, connection):
"""
Return a persistent connection to the pool. This will be called by
L{HTTP11ClientProtocol} when the connection becomes quiescent.
"""
if connection.state != "QUIESCENT":
# Log with traceback for debugging purposes:
try:
raise RuntimeError(
"BUG: Non-quiescent protocol added to connection pool.")
except:
self._log.failure(
"BUG: Non-quiescent protocol added to connection pool.")
return
connections = self._connections.setdefault(key, [])
if len(connections) == self.maxPersistentPerHost:
dropped = connections.pop(0)
dropped.transport.loseConnection()
self._timeouts[dropped].cancel()
del self._timeouts[dropped]
connections.append(connection)
cid = self._reactor.callLater(self.cachedConnectionTimeout,
self._removeConnection,
key, connection)
self._timeouts[connection] = cid
def closeCachedConnections(self):
"""
Close all persistent connections and remove them from the pool.
@return: L{defer.Deferred} that fires when all connections have been
closed.
"""
results = []
for protocols in itervalues(self._connections):
for p in protocols:
results.append(p.abort())
self._connections = {}
for dc in itervalues(self._timeouts):
dc.cancel()
self._timeouts = {}
return defer.gatherResults(results).addCallback(lambda ign: None)
class _AgentBase(object):
"""
Base class offering common facilities for L{Agent}-type classes.
@ivar _reactor: The C{IReactorTime} implementation which will be used by
the pool, and perhaps by subclasses as well.
@ivar _pool: The L{HTTPConnectionPool} used to manage HTTP connections.
"""
def __init__(self, reactor, pool):
if pool is None:
pool = HTTPConnectionPool(reactor, False)
self._reactor = reactor
self._pool = pool
def _computeHostValue(self, scheme, host, port):
"""
Compute the string to use for the value of the I{Host} header, based on
the given scheme, host name, and port number.
"""
if (isIPv6Address(nativeString(host))):
host = b'[' + host + b']'
if (scheme, port) in ((b'http', 80), (b'https', 443)):
return host
return host + b":" + intToBytes(port)
def _requestWithEndpoint(self, key, endpoint, method, parsedURI,
headers, bodyProducer, requestPath):
"""
Issue a new request, given the endpoint and the path sent as part of
the request.
"""
# Create minimal headers, if necessary:
if headers is None:
headers = Headers()
if not headers.hasHeader(b'host'):
headers = headers.copy()
headers.addRawHeader(
b'host', self._computeHostValue(parsedURI.scheme,
parsedURI.host,
parsedURI.port))
d = self._pool.getConnection(key, endpoint)
def cbConnected(proto):
return proto.request(
Request._construct(method, requestPath, headers, bodyProducer,
persistent=self._pool.persistent,
parsedURI=parsedURI))
d.addCallback(cbConnected)
return d
@implementer(IAgentEndpointFactory)
class _StandardEndpointFactory(object):
"""
Standard HTTP endpoint destinations - TCP for HTTP, TCP+TLS for HTTPS.
@ivar _policyForHTTPS: A web context factory which will be used to create
SSL context objects for any SSL connections the agent needs to make.
@ivar _connectTimeout: If not L{None}, the timeout passed to
L{HostnameEndpoint} for specifying the connection timeout.
@ivar _bindAddress: If not L{None}, the address passed to
L{HostnameEndpoint} for specifying the local address to bind to.
"""
def __init__(self, reactor, contextFactory, connectTimeout, bindAddress):
"""
@param reactor: A provider to use to create endpoints.
@type reactor: see L{HostnameEndpoint.__init__} for acceptable reactor
types.
@param contextFactory: A factory for TLS contexts, to control the
verification parameters of OpenSSL.
@type contextFactory: L{IPolicyForHTTPS}.
@param connectTimeout: The amount of time that this L{Agent} will wait
for the peer to accept a connection.
@type connectTimeout: L{float} or L{None}
@param bindAddress: The local address for client sockets to bind to.
@type bindAddress: L{bytes} or L{None}
"""
self._reactor = reactor
self._policyForHTTPS = contextFactory
self._connectTimeout = connectTimeout
self._bindAddress = bindAddress
def endpointForURI(self, uri):
"""
Connect directly over TCP for C{b'http'} scheme, and TLS for
C{b'https'}.
@param uri: L{URI} to connect to.
@return: Endpoint to connect to.
@rtype: L{IStreamClientEndpoint}
"""
kwargs = {}
if self._connectTimeout is not None:
kwargs['timeout'] = self._connectTimeout
kwargs['bindAddress'] = self._bindAddress
try:
host = nativeString(uri.host)
except UnicodeDecodeError:
raise ValueError(("The host of the provided URI ({uri.host!r}) "
"contains non-ASCII octets, it should be ASCII "
"decodable.").format(uri=uri))
endpoint = HostnameEndpoint(self._reactor, host, uri.port, **kwargs)
if uri.scheme == b'http':
return endpoint
elif uri.scheme == b'https':
connectionCreator = self._policyForHTTPS.creatorForNetloc(uri.host,
uri.port)
return wrapClientTLS(connectionCreator, endpoint)
else:
raise SchemeNotSupported("Unsupported scheme: %r" % (uri.scheme,))
@implementer(IAgent)
class Agent(_AgentBase):
"""
L{Agent} is a very basic HTTP client. It supports I{HTTP} and I{HTTPS}
scheme URIs.
@ivar _pool: An L{HTTPConnectionPool} instance.
@ivar _endpointFactory: The L{IAgentEndpointFactory} which will
be used to create endpoints for outgoing connections.
@since: 9.0
"""
def __init__(self, reactor,
contextFactory=BrowserLikePolicyForHTTPS(),
connectTimeout=None, bindAddress=None,
pool=None):
"""
Create an L{Agent}.
@param reactor: A reactor for this L{Agent} to place outgoing
connections.
@type reactor: see L{HostnameEndpoint.__init__} for acceptable reactor
types.
@param contextFactory: A factory for TLS contexts, to control the
verification parameters of OpenSSL. The default is to use a
L{BrowserLikePolicyForHTTPS}, so unless you have special
requirements you can leave this as-is.
@type contextFactory: L{IPolicyForHTTPS}.
@param connectTimeout: The amount of time that this L{Agent} will wait
for the peer to accept a connection.
@type connectTimeout: L{float}
@param bindAddress: The local address for client sockets to bind to.
@type bindAddress: L{bytes}
@param pool: An L{HTTPConnectionPool} instance, or L{None}, in which
case a non-persistent L{HTTPConnectionPool} instance will be
created.
@type pool: L{HTTPConnectionPool}
"""
if not IPolicyForHTTPS.providedBy(contextFactory):
warnings.warn(
repr(contextFactory) +
" was passed as the HTTPS policy for an Agent, but it does "
"not provide IPolicyForHTTPS. Since Twisted 14.0, you must "
"pass a provider of IPolicyForHTTPS.",
stacklevel=2, category=DeprecationWarning
)
contextFactory = _DeprecatedToCurrentPolicyForHTTPS(contextFactory)
endpointFactory = _StandardEndpointFactory(
reactor, contextFactory, connectTimeout, bindAddress)
self._init(reactor, endpointFactory, pool)
@classmethod
def usingEndpointFactory(cls, reactor, endpointFactory, pool=None):
"""
Create a new L{Agent} that will use the endpoint factory to figure
out how to connect to the server.
@param reactor: A reactor for this L{Agent} to place outgoing
connections.
@type reactor: see L{HostnameEndpoint.__init__} for acceptable reactor
types.
@param endpointFactory: Used to construct endpoints which the
HTTP client will connect with.
@type endpointFactory: an L{IAgentEndpointFactory} provider.
@param pool: An L{HTTPConnectionPool} instance, or L{None}, in which
case a non-persistent L{HTTPConnectionPool} instance will be
created.
@type pool: L{HTTPConnectionPool}
@return: A new L{Agent}.
"""
agent = cls.__new__(cls)
agent._init(reactor, endpointFactory, pool)
return agent
def _init(self, reactor, endpointFactory, pool):
"""
Initialize a new L{Agent}.
@param reactor: A reactor for this L{Agent} to place outgoing
connections.
@type reactor: see L{HostnameEndpoint.__init__} for acceptable reactor
types.
@param endpointFactory: Used to construct endpoints which the
HTTP client will connect with.
@type endpointFactory: an L{IAgentEndpointFactory} provider.
@param pool: An L{HTTPConnectionPool} instance, or L{None}, in which
case a non-persistent L{HTTPConnectionPool} instance will be
created.
@type pool: L{HTTPConnectionPool}
@return: A new L{Agent}.
"""
_AgentBase.__init__(self, reactor, pool)
self._endpointFactory = endpointFactory
def _getEndpoint(self, uri):
"""
Get an endpoint for the given URI, using C{self._endpointFactory}.
@param uri: The URI of the request.
@type uri: L{URI}
@return: An endpoint which can be used to connect to given address.
"""
return self._endpointFactory.endpointForURI(uri)
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Issue a request to the server indicated by the given C{uri}.
An existing connection from the connection pool may be used or a new
one may be created.
I{HTTP} and I{HTTPS} schemes are supported in C{uri}.
@see: L{twisted.web.iweb.IAgent.request}
"""
parsedURI = URI.fromBytes(uri)
try:
endpoint = self._getEndpoint(parsedURI)
except SchemeNotSupported:
return defer.fail(Failure())
key = (parsedURI.scheme, parsedURI.host, parsedURI.port)
return self._requestWithEndpoint(key, endpoint, method, parsedURI,
headers, bodyProducer,
parsedURI.originForm)
@implementer(IAgent)
class ProxyAgent(_AgentBase):
"""
An HTTP agent able to cross HTTP proxies.
@ivar _proxyEndpoint: The endpoint used to connect to the proxy.
@since: 11.1
"""
def __init__(self, endpoint, reactor=None, pool=None):
if reactor is None:
from twisted.internet import reactor
_AgentBase.__init__(self, reactor, pool)
self._proxyEndpoint = endpoint
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Issue a new request via the configured proxy.
"""
# Cache *all* connections under the same key, since we are only
# connecting to a single destination, the proxy:
key = ("http-proxy", self._proxyEndpoint)
# To support proxying HTTPS via CONNECT, we will use key
# ("http-proxy-CONNECT", scheme, host, port), and an endpoint that
# wraps _proxyEndpoint with an additional callback to do the CONNECT.
return self._requestWithEndpoint(key, self._proxyEndpoint, method,
URI.fromBytes(uri), headers,
bodyProducer, uri)
class _FakeUrllib2Request(object):
"""
A fake C{urllib2.Request} object for C{cookielib} to work with.
@see: U{http://docs.python.org/library/urllib2.html#request-objects}
@type uri: native L{str}
@ivar uri: Request URI.
@type headers: L{twisted.web.http_headers.Headers}
@ivar headers: Request headers.
@type type: native L{str}
@ivar type: The scheme of the URI.
@type host: native L{str}
@ivar host: The host[:port] of the URI.
@since: 11.1
"""
def __init__(self, uri):
"""
Create a fake Urllib2 request.
@param uri: Request URI.
@type uri: L{bytes}
"""
self.uri = nativeString(uri)
self.headers = Headers()
_uri = URI.fromBytes(uri)
self.type = nativeString(_uri.scheme)
self.host = nativeString(_uri.host)
if (_uri.scheme, _uri.port) not in ((b'http', 80), (b'https', 443)):
# If it's not a schema on the regular port, add the port.
self.host += ":" + str(_uri.port)
if _PY3:
self.origin_req_host = nativeString(_uri.host)
self.unverifiable = lambda _: False
def has_header(self, header):
return self.headers.hasHeader(networkString(header))
def add_unredirected_header(self, name, value):
self.headers.addRawHeader(networkString(name), networkString(value))
def get_full_url(self):
return self.uri
def get_header(self, name, default=None):
headers = self.headers.getRawHeaders(networkString(name), default)
if headers is not None:
headers = [nativeString(x) for x in headers]
return headers[0]
return None
def get_host(self):
return self.host
def get_type(self):
return self.type
def is_unverifiable(self):
# In theory this shouldn't be hardcoded.
return False
class _FakeUrllib2Response(object):
"""
A fake C{urllib2.Response} object for C{cookielib} to work with.
@type response: C{twisted.web.iweb.IResponse}
@ivar response: Underlying Twisted Web response.
@since: 11.1
"""
def __init__(self, response):
self.response = response
def info(self):
class _Meta(object):
def getheaders(zelf, name):
# PY2
headers = self.response.headers.getRawHeaders(name, [])
return headers
def get_all(zelf, name, default):
# PY3
headers = self.response.headers.getRawHeaders(
networkString(name), default)
h = [nativeString(x) for x in headers]
return h
return _Meta()
@implementer(IAgent)
class CookieAgent(object):
"""
L{CookieAgent} extends the basic L{Agent} to add RFC-compliant
handling of HTTP cookies. Cookies are written to and extracted
from a C{cookielib.CookieJar} instance.
The same cookie jar instance will be used for any requests through this
agent, mutating it whenever a I{Set-Cookie} header appears in a response.
@type _agent: L{twisted.web.client.Agent}
@ivar _agent: Underlying Twisted Web agent to issue requests through.
@type cookieJar: C{cookielib.CookieJar}
@ivar cookieJar: Initialized cookie jar to read cookies from and store
cookies to.
@since: 11.1
"""
def __init__(self, agent, cookieJar):
self._agent = agent
self.cookieJar = cookieJar
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Issue a new request to the wrapped L{Agent}.
Send a I{Cookie} header if a cookie for C{uri} is stored in
L{CookieAgent.cookieJar}. Cookies are automatically extracted and
stored from requests.
If a C{'cookie'} header appears in C{headers} it will override the
automatic cookie header obtained from the cookie jar.
@see: L{Agent.request}
"""
if headers is None:
headers = Headers()
lastRequest = _FakeUrllib2Request(uri)
# Setting a cookie header explicitly will disable automatic request
# cookies.
if not headers.hasHeader(b'cookie'):
self.cookieJar.add_cookie_header(lastRequest)
cookieHeader = lastRequest.get_header('Cookie', None)
if cookieHeader is not None:
headers = headers.copy()
headers.addRawHeader(b'cookie', networkString(cookieHeader))
d = self._agent.request(method, uri, headers, bodyProducer)
d.addCallback(self._extractCookies, lastRequest)
return d
def _extractCookies(self, response, request):
"""
Extract response cookies and store them in the cookie jar.
@type response: L{twisted.web.iweb.IResponse}
@param response: Twisted Web response.
@param request: A urllib2 compatible request object.
"""
resp = _FakeUrllib2Response(response)
self.cookieJar.extract_cookies(resp, request)
return response
class GzipDecoder(proxyForInterface(IResponse)):
"""
A wrapper for a L{Response} instance which handles gzip'ed body.
@ivar original: The original L{Response} object.
@since: 11.1
"""
def __init__(self, response):
self.original = response
self.length = UNKNOWN_LENGTH
def deliverBody(self, protocol):
"""
Override C{deliverBody} to wrap the given C{protocol} with
L{_GzipProtocol}.
"""
self.original.deliverBody(_GzipProtocol(protocol, self.original))
class _GzipProtocol(proxyForInterface(IProtocol)):
"""
A L{Protocol} implementation which wraps another one, transparently
decompressing received data.
@ivar _zlibDecompress: A zlib decompress object used to decompress the data
stream.
@ivar _response: A reference to the original response, in case of errors.
@since: 11.1
"""
def __init__(self, protocol, response):
self.original = protocol
self._response = response
self._zlibDecompress = zlib.decompressobj(16 + zlib.MAX_WBITS)
def dataReceived(self, data):
"""
Decompress C{data} with the zlib decompressor, forwarding the raw data
to the original protocol.
"""
try:
rawData = self._zlibDecompress.decompress(data)
except zlib.error:
raise ResponseFailed([Failure()], self._response)
if rawData:
self.original.dataReceived(rawData)
def connectionLost(self, reason):
"""
Forward the connection lost event, flushing remaining data from the
decompressor if any.
"""
try:
rawData = self._zlibDecompress.flush()
except zlib.error:
raise ResponseFailed([reason, Failure()], self._response)
if rawData:
self.original.dataReceived(rawData)
self.original.connectionLost(reason)
@implementer(IAgent)
class ContentDecoderAgent(object):
"""
An L{Agent} wrapper to handle encoded content.
It takes care of declaring the support for content in the
I{Accept-Encoding} header, and automatically decompresses the received data
if it's effectively using compression.
@param decoders: A list or tuple of (name, decoder) objects. The name
declares which decoding the decoder supports, and the decoder must
return a response object when called/instantiated. For example,
C{(('gzip', GzipDecoder))}. The order determines how the decoders are
going to be advertized to the server.
@since: 11.1
"""
def __init__(self, agent, decoders):
self._agent = agent
self._decoders = dict(decoders)
self._supported = b','.join([decoder[0] for decoder in decoders])
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Send a client request which declares supporting compressed content.
@see: L{Agent.request}.
"""
if headers is None:
headers = Headers()
else:
headers = headers.copy()
headers.addRawHeader(b'accept-encoding', self._supported)
deferred = self._agent.request(method, uri, headers, bodyProducer)
return deferred.addCallback(self._handleResponse)
def _handleResponse(self, response):
"""
Check if the response is encoded, and wrap it to handle decompression.
"""
contentEncodingHeaders = response.headers.getRawHeaders(
b'content-encoding', [])
contentEncodingHeaders = b','.join(contentEncodingHeaders).split(b',')
while contentEncodingHeaders:
name = contentEncodingHeaders.pop().strip()
decoder = self._decoders.get(name)
if decoder is not None:
response = decoder(response)
else:
# Add it back
contentEncodingHeaders.append(name)
break
if contentEncodingHeaders:
response.headers.setRawHeaders(
b'content-encoding', [b','.join(contentEncodingHeaders)])
else:
response.headers.removeHeader(b'content-encoding')
return response
@implementer(IAgent)
class RedirectAgent(object):
"""
An L{Agent} wrapper which handles HTTP redirects.
The implementation is rather strict: 301 and 302 behaves like 307, not
redirecting automatically on methods different from I{GET} and I{HEAD}.
See L{BrowserLikeRedirectAgent} for a redirecting Agent that behaves more
like a web browser.
@param redirectLimit: The maximum number of times the agent is allowed to
follow redirects before failing with a L{error.InfiniteRedirection}.
@cvar _redirectResponses: A L{list} of HTTP status codes to be redirected
for I{GET} and I{HEAD} methods.
@cvar _seeOtherResponses: A L{list} of HTTP status codes to be redirected
for any method and the method altered to I{GET}.
@since: 11.1
"""
_redirectResponses = [http.MOVED_PERMANENTLY, http.FOUND,
http.TEMPORARY_REDIRECT]
_seeOtherResponses = [http.SEE_OTHER]
def __init__(self, agent, redirectLimit=20):
self._agent = agent
self._redirectLimit = redirectLimit
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Send a client request following HTTP redirects.
@see: L{Agent.request}.
"""
deferred = self._agent.request(method, uri, headers, bodyProducer)
return deferred.addCallback(
self._handleResponse, method, uri, headers, 0)
def _resolveLocation(self, requestURI, location):
"""
Resolve the redirect location against the request I{URI}.
@type requestURI: C{bytes}
@param requestURI: The request I{URI}.
@type location: C{bytes}
@param location: The redirect location.
@rtype: C{bytes}
@return: Final resolved I{URI}.
"""
return _urljoin(requestURI, location)
def _handleRedirect(self, response, method, uri, headers, redirectCount):
"""
Handle a redirect response, checking the number of redirects already
followed, and extracting the location header fields.
"""
if redirectCount >= self._redirectLimit:
err = error.InfiniteRedirection(
response.code,
b'Infinite redirection detected',
location=uri)
raise ResponseFailed([Failure(err)], response)
locationHeaders = response.headers.getRawHeaders(b'location', [])
if not locationHeaders:
err = error.RedirectWithNoLocation(
response.code, b'No location header field', uri)
raise ResponseFailed([Failure(err)], response)
location = self._resolveLocation(uri, locationHeaders[0])
deferred = self._agent.request(method, location, headers)
def _chainResponse(newResponse):
newResponse.setPreviousResponse(response)
return newResponse
deferred.addCallback(_chainResponse)
return deferred.addCallback(
self._handleResponse, method, uri, headers, redirectCount + 1)
def _handleResponse(self, response, method, uri, headers, redirectCount):
"""
Handle the response, making another request if it indicates a redirect.
"""
if response.code in self._redirectResponses:
if method not in (b'GET', b'HEAD'):
err = error.PageRedirect(response.code, location=uri)
raise ResponseFailed([Failure(err)], response)
return self._handleRedirect(response, method, uri, headers,
redirectCount)
elif response.code in self._seeOtherResponses:
return self._handleRedirect(response, b'GET', uri, headers,
redirectCount)
return response
class BrowserLikeRedirectAgent(RedirectAgent):
"""
An L{Agent} wrapper which handles HTTP redirects in the same fashion as web
browsers.
Unlike L{RedirectAgent}, the implementation is more relaxed: 301 and 302
behave like 303, redirecting automatically on any method and altering the
redirect request to a I{GET}.
@see: L{RedirectAgent}
@since: 13.1
"""
_redirectResponses = [http.TEMPORARY_REDIRECT]
_seeOtherResponses = [http.MOVED_PERMANENTLY, http.FOUND, http.SEE_OTHER]
class _ReadBodyProtocol(protocol.Protocol):
"""
Protocol that collects data sent to it.
This is a helper for L{IResponse.deliverBody}, which collects the body and
fires a deferred with it.
@ivar deferred: See L{__init__}.
@ivar status: See L{__init__}.
@ivar message: See L{__init__}.
@ivar dataBuffer: list of byte-strings received
@type dataBuffer: L{list} of L{bytes}
"""
def __init__(self, status, message, deferred):
"""
@param status: Status of L{IResponse}
@ivar status: L{int}
@param message: Message of L{IResponse}
@type message: L{bytes}
@param deferred: deferred to fire when response is complete
@type deferred: L{Deferred} firing with L{bytes}
"""
self.deferred = deferred
self.status = status
self.message = message
self.dataBuffer = []
def dataReceived(self, data):
"""
Accumulate some more bytes from the response.
"""
self.dataBuffer.append(data)
def connectionLost(self, reason):
"""
Deliver the accumulated response bytes to the waiting L{Deferred}, if
the response body has been completely received without error.
"""
if reason.check(ResponseDone):
self.deferred.callback(b''.join(self.dataBuffer))
elif reason.check(PotentialDataLoss):
self.deferred.errback(
PartialDownloadError(self.status, self.message,
b''.join(self.dataBuffer)))
else:
self.deferred.errback(reason)
def readBody(response):
"""
Get the body of an L{IResponse} and return it as a byte string.
This is a helper function for clients that don't want to incrementally
receive the body of an HTTP response.
@param response: The HTTP response for which the body will be read.
@type response: L{IResponse} provider
@return: A L{Deferred} which will fire with the body of the response.
Cancelling it will close the connection to the server immediately.
"""
def cancel(deferred):
"""
Cancel a L{readBody} call, close the connection to the HTTP server
immediately, if it is still open.
@param deferred: The cancelled L{defer.Deferred}.
"""
abort = getAbort()
if abort is not None:
abort()
d = defer.Deferred(cancel)
protocol = _ReadBodyProtocol(response.code, response.phrase, d)
def getAbort():
return getattr(protocol.transport, 'abortConnection', None)
response.deliverBody(protocol)
if protocol.transport is not None and getAbort() is None:
warnings.warn(
'Using readBody with a transport that does not have an '
'abortConnection method',
category=DeprecationWarning,
stacklevel=2)
return d
__all__ = [
'Agent',
'BrowserLikeRedirectAgent',
'ContentDecoderAgent',
'CookieAgent',
'downloadPage',
'getPage',
'GzipDecoder',
'HTTPClientFactory',
'HTTPConnectionPool',
'HTTPDownloader',
'HTTPPageDownloader',
'HTTPPageGetter',
'PartialDownloadError',
'ProxyAgent',
'readBody',
'RedirectAgent',
'RequestGenerationFailed',
'RequestTransmissionFailed',
'Response',
'ResponseDone',
'ResponseFailed',
'ResponseNeverReceived',
'URI',
]
| 33.044949
| 133
| 0.623242
|
39d353889a989cb88750b3f4ad05159781ab4b63
| 23,992
|
py
|
Python
|
Coke/ML/createTrainingData_coking_for.py
|
WesleyLeeNTU/EDC
|
f28be5ff586f15b21a3cb53814da1039b118f321
|
[
"MIT"
] | null | null | null |
Coke/ML/createTrainingData_coking_for.py
|
WesleyLeeNTU/EDC
|
f28be5ff586f15b21a3cb53814da1039b118f321
|
[
"MIT"
] | null | null | null |
Coke/ML/createTrainingData_coking_for.py
|
WesleyLeeNTU/EDC
|
f28be5ff586f15b21a3cb53814da1039b118f321
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# from EDC.FPC.ML.ML_model_coke import CHCL3
import time
import random
import numpy as np
import cantera as ct
import matplotlib.pyplot as plt
import sys
import os
import argparse
from pandas.core.accessor import delegate_names
def EDC_cracking(
reaction_mech,
T_list,
pressure_0,
CCl4_X_0,
mass_flow_rate,
n_steps=1000,
n_pfr=18,
length=18,
area=0.03225097679,
label=None,
):
"""
Module that runs a single PFR Cantera simulation via a series of CSTRs.
The Plug flow reactor is represented by a linear chain of zero-dimensional
reactors. The gas at the inlet to the first one has the specified inlet
composition, and for all others the inlet composition is fixed at the
composition of the reactor immediately upstream. Since in a PFR model there
is no diffusion, the upstream reactors are not affected by any downstream
reactors, and therefore the problem may be solved by simply marching from
the first to last reactor, integrating each one to steady state.
Parameters
=============== =============================================================
Attribute Description
=============== =============================================================
`reaction_mech` Cantera reaction mechanism (.cti file)
`T_list` Temperature profile (°C)
`pressure_0` Initial pressue (atm)
`CCl4_X_0` Initial CCl4 concentration (mass fraction)
`mass_flow_rate`Mass flow rate of input gas (T/H)
`n_steps` Number of iterations/number of CSTRs
`n_pfr` Number of PFRs
`length` Length of each PFR (m)
`area` Cross-sectional area (m**2)
`label` Label of this mechanism
=============== =============================================================
"""
#######################################################################
# Input Parameters
#######################################################################
if CCl4_X_0 > 1: # ppm
CCl4_X_0 = float(CCl4_X_0) / 1000000
print(f"cracking {CCl4_X_0}")
T_0 = 273.15 + T_list[0] # inlet temperature [K]
pressure_0 *= ct.one_atm
spcs = ct.Species.listFromFile(reaction_mech)
for spc in spcs[::-1]:
if spc.composition == {'C': 2.0, 'Cl': 2.0, 'H': 4.0} and spc.charge == 0:
EDC_label = spc.name
if spc.composition == {'C': 1.0, 'Cl': 4.0} and spc.charge == 0:
CCl4_label = spc.name
EDC_X_0 = 1 - CCl4_X_0
composition_0 = '{}:{}, {}:{}'.format(
EDC_label, EDC_X_0, CCl4_label, CCl4_X_0)
mass_flow_rate *= 1000 / 3600 # T/H to kg/s
# import the gas model and set the initial conditions
model = ct.Solution(reaction_mech)
model.TPX = T_0, pressure_0, composition_0
dz = length / n_steps
r_vol = area * dz
# create a new reactor
r = ct.IdealGasReactor(model)
r.volume = r_vol
# create a reservoir to represent the reactor immediately upstream. Note
# that the gas object is set already to the state of the upstream reactor
upstream = ct.Reservoir(model, name='upstream')
# create a reservoir for the reactor to exhaust into. The composition of
# this reservoir is irrelevant.
downstream = ct.Reservoir(model, name='downstream')
# The mass flow rate into the reactor will be fixed by using a
# MassFlowController object.
m = ct.MassFlowController(upstream, r, mdot=mass_flow_rate)
# We need an outlet to the downstream reservoir. This will determine the
# pressure in the reactor. The value of K will only affect the transient
# pressure difference.
v = ct.PressureController(r, downstream, master=m, K=1e-5)
sim = ct.ReactorNet([r])
# define time, space, and other information vectors
z = (np.arange(n_steps) + 1) * dz
t = np.zeros(n_pfr) # residence time in each PFR reactor
# compositions of output stream in each PFR reactor
compositions = [None] * n_pfr
states = ct.SolutionArray(r.thermo)
cracking_rates = [0]
for i, T in enumerate(T_list[1:]):
Ti = T_list[i] + 273.15
Te = T + 273.15
dT = (Te - Ti) / n_steps
T = Ti
t_r = np.zeros_like(z) # residence time in each CSTR reactor
# iterate through the PFR cells
for n in range(n_steps):
# simulate the linear T-profile in each reactor
T = Ti + (n + 1) * dT
model.TP = T, None
r.syncState()
# Set the state of the reservoir to match that of the previous reactor
model.TPX = r.thermo.TPX
upstream.syncState()
# integrate the reactor forward in time until steady state is reached
sim.reinitialize()
sim.set_initial_time(0)
sim.advance_to_steady_state()
# compute velocity and transform into time
t_r[n] = r.mass / mass_flow_rate # residence time in this reactor
# write output data
states.append(r.thermo.state)
t[i] = np.sum(t_r)
compositions[i] = model.X[4:]
cracking_rate = (
EDC_X_0 - model.X[model.species_index(EDC_label)]) / EDC_X_0
cracking_rates.append(cracking_rate)
return compositions, t, cracking_rates
'''
def EDC_cracking_C2H3Cl3(
reaction_mech,
T_list,
pressure_0,
CCl4_X_0,
mass_flow_rate,
n_steps=1000,
n_pfr=18,
length=18,
area=0.027403710696000005,
label=None,
):
"""
Module that runs a single PFR Cantera simulation via a series of CSTRs.
The Plug flow reactor is represented by a linear chain of zero-dimensional
reactors. The gas at the inlet to the first one has the specified inlet
composition, and for all others the inlet composition is fixed at the
composition of the reactor immediately upstream. Since in a PFR model there
is no diffusion, the upstream reactors are not affected by any downstream
reactors, and therefore the problem may be solved by simply marching from
the first to last reactor, integrating each one to steady state.
Parameters
=============== =============================================================
Attribute Description
=============== =============================================================
`reaction_mech` Cantera reaction mechanism (.cti file)
`T_list` Temperature profile (°C)
`pressure_0` Initial pressue (atm)
`CCl4_X_0` Initial CCl4 concentration (mass fraction)
`mass_flow_rate`Mass flow rate of input gas (T/H)
`n_steps` Number of iterations/number of CSTRs
`n_pfr` Number of PFRs
`length` Length of each PFR (m)
`area` Cross-sectional area (m**2)
`label` Label of this mechanism
=============== =============================================================
"""
#######################################################################
# Input Parameters
#######################################################################
T_0 = 273.15 + T_list[0] # inlet temperature [K]
pressure_0 *= ct.one_atm
spcs = ct.Species.listFromFile(reaction_mech)
for spc in spcs[::-1]:
if spc.composition == {'C': 2.0, 'Cl': 2.0, 'H': 4.0} and spc.charge == 0:
EDC_label = spc.name
if spc.composition == {'C': 2.0, 'Cl': 3.0, 'H': 3.0} and spc.charge == 0:
C2H3Cl3_label = spc.name
if spc.composition == {'C': 1.0, 'Cl': 4.0} and spc.charge == 0:
CCl4_label = spc.name
EDC_X_0 = 1 - CCl4_X_0
composition_0 = '{}:{}, {}:{}'.format(EDC_label, EDC_X_0, CCl4_label, CCl4_X_0)
mass_flow_rate *= 1000 / 3600 # T/H to kg/s
# import the gas model and set the initial conditions
model = ct.Solution(reaction_mech)
model.TPX = T_0, pressure_0, composition_0
dz = length / n_steps
r_vol = area * dz
# create a new reactor
r = ct.IdealGasReactor(model)
r.volume = r_vol
# create a reservoir to represent the reactor immediately upstream. Note
# that the gas object is set already to the state of the upstream reactor
upstream = ct.Reservoir(model, name='upstream')
# create a reservoir for the reactor to exhaust into. The composition of
# this reservoir is irrelevant.
downstream = ct.Reservoir(model, name='downstream')
# The mass flow rate into the reactor will be fixed by using a
# MassFlowController object.
m = ct.MassFlowController(upstream, r, mdot=mass_flow_rate)
# We need an outlet to the downstream reservoir. This will determine the
# pressure in the reactor. The value of K will only affect the transient
# pressure difference.
v = ct.PressureController(r, downstream, master=m, K=1e-5)
sim = ct.ReactorNet([r])
# define time, space, and other information vectors
z = (np.arange(n_steps) + 1) * dz
t = np.zeros(n_pfr) # residence time in each PFR reactor
compositions = [None] * n_pfr # compositions of output stream in each PFR reactor
states = ct.SolutionArray(r.thermo)
cracking_rates = [0]
forming_rates = [0]
for i, T in enumerate(T_list[1:]):
Ti = T_list[i] + 273.15
Te = T + 273.15
dT = (Te - Ti) / n_steps
T = Ti
t_r = np.zeros_like(z) # residence time in each CSTR reactor
# iterate through the PFR cells
for n in range(n_steps):
# simulate the linear T-profile in each reactor
T = Ti + (n + 1) * dT
model.TP = T, None
r.syncState()
# Set the state of the reservoir to match that of the previous reactor
model.TPX = r.thermo.TPX
upstream.syncState()
# integrate the reactor forward in time until steady state is reached
sim.reinitialize()
sim.set_initial_time(0)
sim.advance_to_steady_state()
# compute velocity and transform into time
t_r[n] = r.mass / mass_flow_rate # residence time in this reactor
# write output data
states.append(r.thermo.state)
t[i] = np.sum(t_r)
compositions[i] = model.X[4:]
cracking_rate = (EDC_X_0 - model.X[model.species_index(EDC_label)]) / EDC_X_0
forming_rate = model.X[model.species_index(C2H3Cl3_label)]
forming_rates.append(forming_rate)
cracking_rates.append(cracking_rate)
return compositions, t, cracking_rates ,forming_rates
'''
def EDC_coking(
reaction_mech,
T_list,
pressure_0,
CCl4_X_0,
CHCl3_X_0,
Tri_X_0,
CP_X_0,
mass_flow_rate,
n_steps=1000,
n_pfr=18,
length=18,
area=0.03225097679,
label=None,
):
"""
Module that runs a single PFR Cantera simulation via a series of CSTRs.
The Plug flow reactor is represented by a linear chain of zero-dimensional
reactors. The gas at the inlet to the first one has the specified inlet
composition, and for all others the inlet composition is fixed at the
composition of the reactor immediately upstream. Since in a PFR model there
is no diffusion, the upstream reactors are not affected by any downstream
reactors, and therefore the problem may be solved by simply marching from
the first to last reactor, integrating each one to steady state.
Parameters
=============== =============================================================
Attribute Description
=============== =============================================================
`reaction_mech` Cantera reaction mechanism (.cti file)
`T_list` Temperature profile (°C)
`pressure_0` Initial pressue (atm)
`CCl4_X_0` Initial CCl4 concentration (mass fraction)
`mass_flow_rate`Mass flow rate of input gas (T/H)
`n_steps` Number of iterations/number of CSTRs
`n_pfr` Number of PFRs
`length` Length of each PFR (m)
`area` Cross-sectional area (m**2)
`label` Label of this mechanism
=============== =============================================================
"""
#######################################################################
# Input Parameters
#######################################################################
if CCl4_X_0 > 1: # ppm
CCl4_X_0 = float(CCl4_X_0) / 1000000
if CHCl3_X_0 > 1:
CHCl3_X_0 = float(CHCl3_X_0) / 1000000
if Tri_X_0 > 1:
Tri_X_0 = float(Tri_X_0)/1000000
if CP_X_0 > 1:
CP_X_0 = float(CP_X_0)/1000000
print(
f"coking CCl4:{CCl4_X_0}, CHCl3:{CHCl3_X_0}, Tri:{Tri_X_0}, CP:{CP_X_0}")
T_0 = 273.15 + T_list[0] # inlet temperature [K]
pressure_0 *= ct.one_atm
spcs = ct.Species.listFromFile(reaction_mech)
for spc in spcs[::-1]:
if spc.composition == {'C': 2.0, 'Cl': 2.0, 'H': 4.0} and spc.charge == 0:
EDC_label = spc.name
if spc.composition == {'C': 1.0, 'Cl': 3.0, 'H': 1.0} and spc.charge == 0:
CHCl3_label = spc.name
if spc.composition == {'C': 2.0, 'Cl': 3.0, 'H': 3.0} and spc.charge == 0:
Tri_label = spc.name
if spc.composition == {'C': 4.0, 'Cl': 1.0, 'H': 5.0} and spc.charge == 0:
if spc.name != '1-CP(23)':
CP_label = spc.name
if spc.composition == {'C': 1.0, 'Cl': 4.0} and spc.charge == 0:
CCl4_label = spc.name
EDC_X_0 = 1 - CCl4_X_0 - CP_X_0 - Tri_X_0 - CHCl3_X_0
composition_0 = '{}:{}, {}:{}, {}:{}, {}:{}, {}:{}'.format(
EDC_label, EDC_X_0,
CCl4_label, CCl4_X_0,
CHCl3_label, CHCl3_X_0,
Tri_label, Tri_X_0,
CP_label, CP_X_0
)
mass_flow_rate *= 1000 / 3600 # T/H to kg/s
# import the gas model and set the initial conditions
model = ct.Solution(reaction_mech)
model.TPX = T_0, pressure_0, composition_0
dz = length / n_steps
r_vol = area * dz
# create a new reactor
r = ct.IdealGasReactor(model)
r.volume = r_vol
# create a reservoir to represent the reactor immediately upstream. Note
# that the gas object is set already to the state of the upstream reactor
upstream = ct.Reservoir(model, name='upstream')
# create a reservoir for the reactor to exhaust into. The composition of
# this reservoir is irrelevant.
downstream = ct.Reservoir(model, name='downstream')
# The mass flow rate into the reactor will be fixed by using a
# MassFlowController object.
m = ct.MassFlowController(upstream, r, mdot=mass_flow_rate)
# We need an outlet to the downstream reservoir. This will determine the
# pressure in the reactor. The value of K will only affect the transient
# pressure difference.
v = ct.PressureController(r, downstream, master=m, K=1e-5)
sim = ct.ReactorNet([r])
# define time, space, and other information vectors
z = (np.arange(n_steps) + 1) * dz
t = np.zeros(n_pfr) # residence time in each PFR reactor
# compositions of output stream in each PFR reactor
compositions = [None] * n_pfr
states = ct.SolutionArray(r.thermo)
cracking_rates = [0]
for i, T in enumerate(T_list[1:]):
Ti = T_list[i] + 273.15
Te = T + 273.15
dT = (Te - Ti) / n_steps
T = Ti
t_r = np.zeros_like(z) # residence time in each CSTR reactor
# iterate through the PFR cells
for n in range(n_steps):
# simulate the linear T-profile in each reactor
T = Ti + (n + 1) * dT
model.TP = T, None
r.syncState()
# Set the state of the reservoir to match that of the previous reactor
model.TPX = r.thermo.TPX
upstream.syncState()
# integrate the reactor forward in time until steady state is reached
sim.reinitialize()
sim.set_initial_time(0)
sim.advance_to_steady_state()
# compute velocity and transform into time
t_r[n] = r.mass / mass_flow_rate # residence time in this reactor
# write output data
states.append(r.thermo.state)
t[i] = np.sum(t_r)
compositions[i] = model.X[4:]
return compositions, t
def plot(T_list, cracking_rates):
ndata = len(T_list)
fig, ax1 = plt.subplots()
l1 = ax1.plot(range(ndata), T_list, color='r',
marker='o', label='Temperature ($^\circ$C)')
ax1.set_ylabel('Temperature ($^\circ$C)')
ax1.set_ylim(0, 600)
ax2 = ax1.twinx()
cracking_rates = [i * 100 for i in cracking_rates]
l2 = ax2.plot(range(ndata), cracking_rates, color='b',
marker='o', label='Cracking rates (%)')
ax2.set_ylabel('Cracking rates (%)')
ax2.set_ylim(-5, 100)
lns = l1 + l2
labs = [l.get_label() for l in lns]
ax1.legend(lns, labs, loc='best')
plt.title('Temperature and cracking rates curves')
ax1.set_xlabel('PFR index')
plt.xticks(range(ndata))
plt.show()
def test():
reaction_mech = '../KM/2001_Choi_EDC/chem_annotated_reversible.cti'
# reaction_mech = './2009_Schirmeister_EDC/chem_annotated_irreversible.cti'
T_list = [322, 350, 375, 399, 424, 451, 461, 466, 471,
477, 479, 479, 480, 481, 482, 483, 484, 485, 486]
pressure_0 = 11.4
CCl4_X_0 = 0.001
mass_flow_rate = 72
n_steps = 100
n_pfr = 18
length = 18
area = 3.14 * ((8 * 2.54 - 2 * 0.818) / 100) ** 2 / 4
compositions, t, cracking_rates = EDC_cracking(
reaction_mech,
T_list,
pressure_0,
CCl4_X_0,
mass_flow_rate,
n_steps,
n_pfr,
length,
area,
)
plot(T_list, cracking_rates)
def main():
reaction_mech_x = '../KM//2009_Schirmeister_EDC/chem_annotated_irreversible.cti'
# reaction_mech_y = '../KM/2001_Choi_EDC/chem_annotated_reversible.cti'
# Cantera simulation parameters
n = 1
# parser = argparse.ArgumentParser(description='Creating coking data.')
# parser.add_argument('--mass', required=True, type=float)
# parser.add_argument('--tin', required=True, type=float)
# parser.add_argument('--pressure', required=True, type=float)
# parser.add_argument('--ccl4', required=True, type=float)
# parser.add_argument('--chcl3', required=True, type=float)
# parser.add_argument('--tri', required=True, type=float)
# parser.add_argument('--cp', required=True, type=float)
# arg = parser.parse_args()
# CCl4_X_0 = arg.ccl4
# # 50 CCl4
# CHCl3_X_0 = arg.chcl3
# Tri_X_0 = arg.tri
# CP_X_0 = arg.cp
# T_in = arg.tin
# mass_flow_rate = arg.mass
# pressure_0 = arg.pressure
for mass_flow_rate in [23]:
for pressure_0 in [11.4]:
for CCl4_X_0 in [0,500,1000,1500,2000,2500]:
for CHCl3_X_0 in [0,100,200,300,400,500]:
for Tri_X_0 in [0,100,200,300,400,500]:
for CP_X_0 in [0,100,200,300,400,500]:
for T_in in [300,310,320,330,340,350]:
DATADIR = "../Data/cokingV1"
DATANAME = f'{mass_flow_rate}_{pressure_0}_{CCl4_X_0}_{CHCl3_X_0}_{Tri_X_0}_{CP_X_0}_{T_in}.csv'
print('Training data creation initiated at {0}'.format(time.asctime()))
if not os.path.exists(DATADIR):
os.makedirs(DATADIR)
if os.path.exists(f'{DATADIR}/{DATANAME}'):
if os.stat(f'{DATADIR}/{DATANAME}').st_size != 0:
sys.exit()
f = open(f'{DATADIR}/{DATANAME}', 'a')
if os.stat(f'{DATADIR}/{DATANAME}').st_size == 0:
f.write('Ti,Te,X1,X2,X3,X4,X5,X6,X7,X8,X9,X10,X11,X12,X13,X14,X15,X16,X17,X18,X19,X20,X21,X22,X23,X24,X25,X26,X27,X28,X29,pressure_0,CCl4,CHCl3,Tri,CP,t,tr\n')
T_list = []
if T_in == 300:
T_list = [300, 332, 361, 387, 411, 429.6, 443.4, 453.7, 460.5,
464.9, 468.7, 473, 477.8, 480.5, 481.5, 482.5, 483.5, 484.5, 486.7]
elif T_in == 310:
T_list = [310, 340, 368.35, 393.65, 416.5, 441.6, 452, 458.3, 464.6,
471, 474, 476.5, 478.8, 481.1, 482.1, 483.1, 484.1, 485.1, 486.7]
elif T_in == 320:
T_list = [320, 348, 374.7, 399.3, 424, 451.3, 460.7, 466, 471.3,
476.7, 478.7, 479.3, 480, 481.3, 482.3, 483.3, 484.3, 485.3, 486.7]
elif T_in == 330:
T_list = [330, 357.3, 382, 406.6, 431.3, 455.1, 461.5, 467.8, 473.1,
476.8, 478.8, 479.4, 480.1, 481.4, 482.4, 483.4, 484.4, 485.4, 486.7]
elif T_in == 340:
T_list = [340, 365.3, 390, 414.6, 439.3, 458.6, 464, 469.3, 474.6,
478, 479, 479.6, 480.3, 481.6, 482.6, 483.6, 484.6, 485.6, 486.7]
elif T_in == 350:
T_list = [350, 374.7, 399.3, 424, 449.3, 460.7, 466, 471.3, 476.7,
478.7, 479.3, 480, 481.3, 482.3, 483.3, 484.3, 485.3, 486, 486.7]
n_steps = 1000
n_pfr = 18
length = 18
area = 3.14 * (186.3 / 1000) ** 2 / 4
compositions, t = EDC_coking(
reaction_mech_x,
T_list,
pressure_0,
CCl4_X_0,
CHCl3_X_0,
Tri_X_0,
CP_X_0,
mass_flow_rate,
n_steps,
n_pfr,
length,
area
)
information = ''
for j in range(n_pfr):
information += str(T_list[j]) + ',' + str(T_list[j+1]) + ','
for k in compositions[j]:
information += str(k) + ','
information += str(pressure_0) + ',' + str(CCl4_X_0) + ',' + str(CHCl3_X_0) \
+ ',' + str(Tri_X_0) + ',' + str(CP_X_0) + ',' + \
str(sum(t[:j+1])) + ',' + str(t[j]) + '\n'
# information += str(pressure_0) + ',' + str(CCl4_X_0) + ',' + str(sum(t[:j+1])) + ',' + str(t[j]) + ','
# information += str(cracking_rates[j+1]) + '\n'
f.write(information)
f.close()
print('Generating training data finished')
print('Training data creation terminated at {0}'.format(time.asctime()))
if __name__ == '__main__':
main()
| 42.091228
| 195
| 0.534887
|
2ea5edf848e99ce58305520b519e37cfb0fb41ef
| 1,758
|
py
|
Python
|
test/unit/module/test_duplicate.py
|
tomislacker/cfn-python-lint
|
f209ddfef9bcc1a005adfebcfcc16220b18deddb
|
[
"MIT-0"
] | 1
|
2020-05-08T20:12:31.000Z
|
2020-05-08T20:12:31.000Z
|
test/unit/module/test_duplicate.py
|
tomislacker/cfn-python-lint
|
f209ddfef9bcc1a005adfebcfcc16220b18deddb
|
[
"MIT-0"
] | null | null | null |
test/unit/module/test_duplicate.py
|
tomislacker/cfn-python-lint
|
f209ddfef9bcc1a005adfebcfcc16220b18deddb
|
[
"MIT-0"
] | 1
|
2020-12-01T14:54:28.000Z
|
2020-12-01T14:54:28.000Z
|
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import json
from test.testlib.testcase import BaseTestCase
from cfnlint.rules import RulesCollection
from cfnlint.core import DEFAULT_RULESDIR # pylint: disable=E0401
import cfnlint.decode.cfn_yaml # pylint: disable=E0401
import cfnlint.decode.cfn_json # pylint: disable=E0401
class TestDuplicate(BaseTestCase):
"""Test Duplicates Parsing """
def setUp(self):
""" SetUp template object"""
self.rules = RulesCollection()
rulesdirs = [DEFAULT_RULESDIR]
for rulesdir in rulesdirs:
self.rules.create_from_directory(rulesdir)
def test_success_run(self):
"""Test success run"""
filename = 'test/fixtures/templates/good/generic.yaml'
try:
cfnlint.decode.cfn_yaml.load(filename)
except cfnlint.decode.cfn_yaml.CfnParseError:
assert(False)
return
assert(True)
def test_fail_json_run(self):
"""Test failure run"""
def test_fail_run(self):
"""Test failure run"""
filename = 'test/fixtures/templates/bad/duplicate.json'
try:
with open(filename) as fp:
json.load(fp, cls=cfnlint.decode.cfn_json.CfnJSONDecoder)
except cfnlint.decode.cfn_json.JSONDecodeError:
assert(True)
return
assert(False)
def test_fail_yaml_run(self):
"""Test failure run"""
filename = 'test/fixtures/templates/bad/duplicate.yaml'
try:
cfnlint.decode.cfn_yaml.load(filename)
except cfnlint.decode.cfn_yaml.CfnParseError:
assert(True)
return
assert(False)
| 27.046154
| 73
| 0.643345
|
9c4c806f8e107a8d56178e28d6771241e967f8fb
| 10,810
|
py
|
Python
|
aiida/backends/tests/workflows.py
|
joepvd/aiida_core
|
6e9711046753332933f982971db1d7ac7e7ade58
|
[
"BSD-2-Clause"
] | null | null | null |
aiida/backends/tests/workflows.py
|
joepvd/aiida_core
|
6e9711046753332933f982971db1d7ac7e7ade58
|
[
"BSD-2-Clause"
] | null | null | null |
aiida/backends/tests/workflows.py
|
joepvd/aiida_core
|
6e9711046753332933f982971db1d7ac7e7ade58
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from aiida.backends.testbase import AiidaTestCase
from aiida.backends.utils import get_workflow_list
from aiida.common.datastructures import wf_states
from aiida.orm.backends import construct_backend
from aiida.workflows.test import WFTestEmpty
from aiida.orm.implementation import get_workflow_info
from aiida.workflows.test import WFTestSimpleWithSubWF
class TestWorkflowBasic(AiidaTestCase):
"""
These tests check the basic features of workflows.
Now only the load_workflow function is tested.
"""
def test_load_workflows(self):
"""
Test for load_node() function.
"""
from aiida.orm import load_workflow
a = WFTestEmpty()
a.store()
self.assertEquals(a.pk, load_workflow(wf_id=a.pk).pk)
self.assertEquals(a.pk, load_workflow(wf_id=a.uuid).pk)
self.assertEquals(a.pk, load_workflow(pk=a.pk).pk)
self.assertEquals(a.pk, load_workflow(uuid=a.uuid).pk)
with self.assertRaises(ValueError):
load_workflow(wf_id=a.pk, pk=a.pk)
with self.assertRaises(ValueError):
load_workflow(pk=a.pk, uuid=a.uuid)
with self.assertRaises(ValueError):
load_workflow(pk=a.uuid)
with self.assertRaises(ValueError):
load_workflow(uuid=a.pk)
with self.assertRaises(ValueError):
load_workflow()
def test_listing_workflows(self):
"""
Test ensuring that the workflow listing works as expected.
(Listing initialized & running workflows and not listing finished
workflows or workflows with errors).
"""
backend = construct_backend()
# Assuming there is only one user
user = backend.users.find(email=self.user_email)[0]
# Creating a workflow & storing it
a = WFTestEmpty()
a.store()
# Setting manually the state to RUNNING.
a.set_state(wf_states.RUNNING)
# Getting all the available workflows of the current user
# and checking if we got the right one.
wfqs = get_workflow_list(all_states=True, user=user)
self.assertTrue(len(wfqs) == 1, "We expect one workflow")
a_prime = wfqs[0].get_aiida_class()
self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected "
"one")
# We ask all the running workflows. We should get one workflow.
wfqs = get_workflow_list(all_states=True, user=user)
self.assertTrue(len(wfqs) == 1, "We expect one workflow")
a_prime = wfqs[0].get_aiida_class()
self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected "
"one")
# We change the state of the workflow to FINISHED.
a.set_state(wf_states.FINISHED)
# Getting all the available workflows of the current user
# and checking if we got the right one.
wfqs = get_workflow_list(all_states=True, user=user)
self.assertTrue(len(wfqs) == 1, "We expect one workflow")
a_prime = wfqs[0].get_aiida_class()
self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected "
"one")
# We ask all the running workflows. We should get zero results.
wfqs = get_workflow_list(all_states=False, user=user)
self.assertTrue(len(wfqs) == 0, "We expect zero workflows")
# We change the state of the workflow to INITIALIZED.
a.set_state(wf_states.INITIALIZED)
# We ask all the running workflows. We should get one workflow.
wfqs = get_workflow_list(all_states=True, user=user)
self.assertTrue(len(wfqs) == 1, "We expect one workflow")
a_prime = wfqs[0].get_aiida_class()
self.assertEqual(a.uuid, a_prime.uuid, "The uuid is not the expected "
"one")
# We change the state of the workflow to ERROR.
a.set_state(wf_states.ERROR)
# We ask all the running workflows. We should get zero results.
wfqs = get_workflow_list(all_states=False, user=user)
self.assertTrue(len(wfqs) == 0, "We expect zero workflows")
def test_workflow_info(self):
"""
This test checks that the workflow info is generate without any
exceptions
:return:
"""
backend = construct_backend()
# Assuming there is only one user
user = backend.users.find(email=self.user_email)[0]
# Creating a simple workflow & storing it
a = WFTestEmpty()
a.store()
# Emulate the workflow list
for w in get_workflow_list(all_states=True, user=user):
if not w.is_subworkflow():
get_workflow_info(w)
# Create a workflow with sub-workflows and store it
b = WFTestSimpleWithSubWF()
b.store()
# Emulate the workflow list
for w in get_workflow_list(all_states=True, user=user):
if not w.is_subworkflow():
get_workflow_info(w)
# Start the first workflow and perform a workflow list
b.start()
for w in get_workflow_list(all_states=True, user=user):
if not w.is_subworkflow():
get_workflow_info(w)
def test_wf_get_state(self):
"""
Simple test that checks the state of the workflows. We create two
workflows since the test order in the SQLA was influencing the value
of aiida.backends.sqlalchemy.models.workflow.DbWorkflow.state which
should be a Choice object, according to the SQLA doc. Sometimes it
was automatically converted to unicode.
Since we are interested to get a unicode from
aiida.orm.implementation.general.workflow.AbstractWorkflow#get_state
we enforce this conversion at
aiida.orm.implementation.sqlalchemy.workflow.Workflow#get_state
For more info, check issue #951
"""
# Creating two simple workflows & storing them
wf1 = WFTestEmpty()
wf1.store()
wf2 = WFTestEmpty()
wf2.store()
# Checking that the get_state doesn't throw exceptions and that
# it is a valid state
self.assertIn(wf1.get_state(), wf_states)
self.assertIn(wf2.get_state(), wf_states)
def test_wf_ctime(self):
import datetime
import pytz
# Get the current datetime (before the creation of the workflow)
dt_before = datetime.datetime.now(pytz.utc)
# Creating a simple workflow & storing it
wf = WFTestEmpty()
wf.store()
# Get the current datetime (after the creation of the workflow)
dt_after = datetime.datetime.now(pytz.utc)
self.assertLessEqual(dt_before, wf.ctime, "The workflow doesn't have"
"a valid creation time")
self.assertGreaterEqual(dt_after, wf.ctime, "The workflow doesn't "
"have a valid creation "
"time")
def test_failing_calc_in_wf(self):
"""
This test checks that a workflow (but also a workflow with
sub-workflows) that has an exception at one of its steps stops
properly and it is not left as RUNNING.
"""
from aiida.daemon.workflowmanager import execute_steps
from aiida.workflows.test import (FailingWFTestSimple,
FailingWFTestSimpleWithSubWF)
try:
# Testing the error propagation of a simple workflow
wf = FailingWFTestSimple()
wf.store()
step_no = 0
wf.start()
while wf.is_running():
execute_steps()
step_no += 1
self.assertLess(step_no, 5, "This workflow should have stopped "
"since it is failing")
# Testing the error propagation of a workflow with subworkflows
wf = FailingWFTestSimpleWithSubWF()
wf.store()
step_no = 0
wf.start()
while wf.is_running():
execute_steps()
step_no += 1
self.assertLess(step_no, 5, "This workflow should have stopped "
"since it is failing")
finally:
pass
def test_result_parameter_name_colision(self):
"""
This test checks that the the workflow parameters and results do not
collide. This was a problem in SQLA (Issue #960) but a test for both
backends is added (for completeness).
"""
# Creating a simple workflow & storing it
wf = WFTestEmpty()
wf.store()
# Set some parameters
params = {'band_calculation_set': 2,
'codename': 'pw-5.2.0',
'pseudo_family': 'SSSP_v0.7_eff_PBE'}
wf.set_params(params)
# Add some results that their names collide with the parameter names
wf.add_result('structure', 'test_string_1')
wf.add_result('codename', 'test_string_2')
# Check that we have the correct results
self.assertDictEqual(
{'structure': 'test_string_1', 'codename': 'test_string_2'},
wf.get_results(), "The workflow results are not the expected "
"ones.")
# Check that we have the correct parameters
self.assertDictEqual(params, wf.get_parameters(),
"The workflow parameters are not the expected "
"ones.")
def tearDown(self):
"""
Cleaning the database after each test. Since I don't
want the workflows of one test to interfere with the
workflows of the other tests.
"""
self._class_was_setup = True
self.clean_db()
self.insert_data()
| 39.025271
| 80
| 0.590842
|
0ba23a714ee1c8ecc34c0ce9a63a9a5bd254e997
| 751
|
py
|
Python
|
examples/character_death_event_stream.py
|
spascou/ps2-census
|
4edce4b9bfe8af9aca2f28244cb4f70cad67dc93
|
[
"MIT"
] | 4
|
2020-05-19T16:20:32.000Z
|
2020-10-13T06:09:01.000Z
|
examples/character_death_event_stream.py
|
spascou/ps2-census
|
4edce4b9bfe8af9aca2f28244cb4f70cad67dc93
|
[
"MIT"
] | null | null | null |
examples/character_death_event_stream.py
|
spascou/ps2-census
|
4edce4b9bfe8af9aca2f28244cb4f70cad67dc93
|
[
"MIT"
] | 1
|
2021-03-08T06:14:53.000Z
|
2021-03-08T06:14:53.000Z
|
##
#
# This example subscribes the all characters death events on the SolTech server, and
# handles 20 events before exiting.
# Output is in the adjacent NDJSON file.
#
##
from ps2_census import CharacterEvent, EventStream, EventStreamWorld, GenericCharacter
async def main():
stream: EventStream = await EventStream()
await stream.subscribe(
worlds=[EventStreamWorld.SOLTECH],
events=[CharacterEvent.DEATH],
characters=[GenericCharacter.ALL],
)
events_count: int = 0
while events_count < 20:
print(await stream.receive())
events_count += 1
await stream.clear_all_subscriptions()
await stream.close()
if __name__ == "__main__":
import asyncio
asyncio.run(main())
| 22.088235
| 86
| 0.691079
|
38ccb337acbbb791b6f15dd216d089343fc46267
| 2,867
|
py
|
Python
|
FrEIA/framework/reversible_sequential_net.py
|
psteinb/FrEIA
|
24135833171d0c95d817e4cfa2ad268de577ab2b
|
[
"MIT"
] | null | null | null |
FrEIA/framework/reversible_sequential_net.py
|
psteinb/FrEIA
|
24135833171d0c95d817e4cfa2ad268de577ab2b
|
[
"MIT"
] | null | null | null |
FrEIA/framework/reversible_sequential_net.py
|
psteinb/FrEIA
|
24135833171d0c95d817e4cfa2ad268de577ab2b
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch
class ReversibleSequential(nn.Module):
'''Simpler than FrEIA.framework.ReversibleGraphNet:
Only supports a sequential series of modules (no splitting, merging, branching off).
Has an append() method, to add new blocks in a more simple way than the computation-graph
based approach of ReversibleGraphNet. For example:
inn = ReversibleSequential(channels, dims_H, dims_W)
for i in range(n_blocks):
inn.append(FrEIA.modules.AllInOneBlock, clamp=2.0, permute_soft=True)
inn.append(FrEIA.modules.HaarDownsampling)
# and so on
'''
def __init__(self, *dims):
super().__init__()
self.shapes = [tuple(dims)]
self.conditions = []
self.module_list = nn.ModuleList()
def append(self, module_class, cond=None, cond_shape=None, **kwargs):
'''Append a reversible block from FrEIA.modules to the network.
module_class: Class from FrEIA.modules.
cond (int): index of which condition to use (conditions will be passed as list to forward()).
Conditioning nodes are not needed for ReversibleSequential.
cond_shape (tuple[int]): the shape of the condition tensor.
**kwargs: Further keyword arguments that are passed to the constructor of module_class (see example).
'''
dims_in = [self.shapes[-1]]
self.conditions.append(cond)
if cond is not None:
kwargs['dims_c'] = [cond_shape]
module = module_class(dims_in, **kwargs)
self.module_list.append(module)
ouput_dims = module.output_dims(dims_in)
assert len(ouput_dims) == 1, "Module has more than one output"
self.shapes.append(ouput_dims[0])
def forward(self, x, c=None, rev=False):
'''
x (Tensor): input tensor (in contrast to ReversibleGraphNet, a list of tensors is not
supported, as ReversibleSequential only has one input).
c (list[Tensor]): list of conditions.
rev: whether to compute the network forward or reversed.
Returns
z (Tensor): network output.
jac (Tensor): log-jacobian-determinant.
There is no separate log_jacobian() method, it is automatically computed during forward().
'''
iterator = range(len(self.module_list))
jac = 0
if rev:
iterator = reversed(iterator)
for i in iterator:
if self.conditions[i] is None:
x, j = (self.module_list[i]([x], rev=rev)[0],
self.module_list[i].jacobian(x, rev=rev))
else:
x, j = (self.module_list[i]([x], c=[c[self.conditions[i]]], rev=rev)[0],
self.module_list[i].jacobian(x, c=[c[self.conditions[i]]], rev=rev))
jac = j + jac
return x, jac
| 37.233766
| 109
| 0.622602
|
edb0bd0fe732a2b2fbf0a79bafd3d180f85d3603
| 1,709
|
py
|
Python
|
gnotty/migrations/0001_initial.py
|
HankMurphy/gnotty
|
bea3762dc9cbc3cb21a5ae7224091cf027273c40
|
[
"BSD-2-Clause"
] | 59
|
2015-01-10T18:50:58.000Z
|
2020-11-27T22:19:57.000Z
|
gnotty/migrations/0001_initial.py
|
HankMurphy/gnotty
|
bea3762dc9cbc3cb21a5ae7224091cf027273c40
|
[
"BSD-2-Clause"
] | 3
|
2015-05-26T21:57:58.000Z
|
2017-05-01T00:30:32.000Z
|
gnotty/migrations/0001_initial.py
|
HankMurphy/gnotty
|
bea3762dc9cbc3cb21a5ae7224091cf027273c40
|
[
"BSD-2-Clause"
] | 22
|
2015-02-10T02:58:32.000Z
|
2021-01-13T11:20:43.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'IRCMessage'
db.create_table('gnotty_ircmessage', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nickname', self.gf('django.db.models.fields.CharField')(max_length=100)),
('message', self.gf('django.db.models.fields.TextField')()),
('server', self.gf('django.db.models.fields.CharField')(max_length=100)),
('channel', self.gf('django.db.models.fields.CharField')(max_length=100)),
('message_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('gnotty', ['IRCMessage'])
def backwards(self, orm):
# Deleting model 'IRCMessage'
db.delete_table('gnotty_ircmessage')
models = {
'gnotty.ircmessage': {
'Meta': {'ordering': "('message_time',)", 'object_name': 'IRCMessage'},
'channel': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'message_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'server': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['gnotty']
| 42.725
| 117
| 0.600936
|
d2c89d033b806a33dc96257d290d525d7494d7dc
| 2,781
|
py
|
Python
|
src/robot/output/output.py
|
Lemonlemmings/robotframework
|
1acecfdba69b361ae533e5d2920de764c4763839
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-10-23T09:43:53.000Z
|
2020-10-23T09:43:53.000Z
|
src/robot/output/output.py
|
Lemonlemmings/robotframework
|
1acecfdba69b361ae533e5d2920de764c4763839
|
[
"ECL-2.0",
"Apache-2.0"
] | 22
|
2021-03-10T07:29:37.000Z
|
2022-02-28T04:11:06.000Z
|
src/robot/output/output.py
|
Lemonlemmings/robotframework
|
1acecfdba69b361ae533e5d2920de764c4763839
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import pyloggingconf
from .debugfile import DebugFile
from .listeners import LibraryListeners, Listeners
from .logger import LOGGER
from .loggerhelper import AbstractLogger
from .xmllogger import XmlLogger
from .jsonlogger import JsonLogger
class Output(AbstractLogger):
def __init__(self, settings):
AbstractLogger.__init__(self)
if str(settings.output).lower().endswith(".json") or settings.json:
self._outputlogger = JsonLogger(settings.output, settings.log_level, settings.rpa)
else:
self._outputlogger = XmlLogger(settings.output, settings.log_level, settings.rpa)
self.listeners = Listeners(settings.listeners, settings.log_level)
self.library_listeners = LibraryListeners(settings.log_level)
self._register_loggers(DebugFile(settings.debug_file))
self._settings = settings
def _register_loggers(self, debug_file):
LOGGER.register_output_logger(self._outputlogger)
LOGGER.register_listeners(self.listeners or None, self.library_listeners)
if debug_file:
LOGGER.register_logger(debug_file)
def register_error_listener(self, listener):
LOGGER.register_error_listener(listener)
def close(self, result):
self._outputlogger.visit_statistics(result.statistics)
self._outputlogger.close()
LOGGER.unregister_xml_logger()
LOGGER.output_file('Output', self._settings['Output'])
def start_suite(self, suite):
LOGGER.start_suite(suite)
def end_suite(self, suite):
LOGGER.end_suite(suite)
def start_test(self, test):
LOGGER.start_test(test)
def end_test(self, test):
LOGGER.end_test(test)
def start_keyword(self, kw):
LOGGER.start_keyword(kw)
def end_keyword(self, kw):
LOGGER.end_keyword(kw)
def message(self, msg):
LOGGER.log_message(msg)
def set_log_level(self, level):
pyloggingconf.set_level(level)
self.listeners.set_log_level(level)
self.library_listeners.set_log_level(level)
return self._outputlogger.set_log_level(level)
| 35.202532
| 94
| 0.720245
|
612080b336e337bef9b902285565db59b8d05c6e
| 30,911
|
py
|
Python
|
test/functional/test_framework/p2p.py
|
robbelouwet/Elixir
|
609412402c5dd4fb9d77ae6d87505d8efd608132
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/p2p.py
|
robbelouwet/Elixir
|
609412402c5dd4fb9d77ae6d87505d8efd608132
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/p2p.py
|
robbelouwet/Elixir
|
609412402c5dd4fb9d77ae6d87505d8efd608132
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2020 The Elixir Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test objects for interacting with a elixird node over the p2p protocol.
The P2PInterface objects interact with the elixird nodes under test using the
node's p2p interface. They can be used to send messages to the node, and
callbacks can be registered that execute when messages are received from the
node. Messages are sent to/received from the node on an asyncio event loop.
State held inside the objects must be guarded by the p2p_lock to avoid data
races between the main testing thread and the event loop.
P2PConnection: A low-level connection object to a node's P2P interface
P2PInterface: A high-level interface object for communicating to a node over P2P
P2PDataStore: A p2p interface class that keeps a store of transactions and blocks
and can respond correctly to getdata and getheaders messages
P2PTxInvStore: A p2p interface class that inherits from P2PDataStore, and keeps
a count of how many times each txid has been announced."""
import asyncio
from collections import defaultdict
from io import BytesIO
import logging
import struct
import sys
import threading
from test_framework.messages import (
CBlockHeader,
MAX_HEADERS_RESULTS,
msg_addr,
msg_addrv2,
msg_block,
MSG_BLOCK,
msg_blocktxn,
msg_cfcheckpt,
msg_cfheaders,
msg_cfilter,
msg_cmpctblock,
msg_feefilter,
msg_filteradd,
msg_filterclear,
msg_filterload,
msg_getaddr,
msg_getblocks,
msg_getblocktxn,
msg_getdata,
msg_getheaders,
msg_headers,
msg_inv,
msg_mempool,
msg_merkleblock,
msg_notfound,
msg_ping,
msg_pong,
msg_sendaddrv2,
msg_sendcmpct,
msg_sendheaders,
msg_tx,
MSG_TX,
MSG_TYPE_MASK,
msg_verack,
msg_version,
MSG_WTX,
msg_wtxidrelay,
NODE_NETWORK,
NODE_WITNESS,
sha256,
)
from test_framework.util import (
MAX_NODES,
p2p_port,
wait_until_helper,
)
logger = logging.getLogger("TestFramework.p2p")
# The minimum P2P version that this test framework supports
MIN_P2P_VERSION_SUPPORTED = 60001
# The P2P version that this test framework implements and sends in its `version` message
# Version 70016 supports wtxid relay
P2P_VERSION = 70016
# The services that this test framework offers in its `version` message
P2P_SERVICES = NODE_NETWORK | NODE_WITNESS
# The P2P user agent string that this test framework sends in its `version` message
P2P_SUBVERSION = "/python-p2p-tester:0.0.3/"
# Value for relay that this test framework sends in its `version` message
P2P_VERSION_RELAY = 1
MESSAGEMAP = {
b"addr": msg_addr,
b"addrv2": msg_addrv2,
b"block": msg_block,
b"blocktxn": msg_blocktxn,
b"cfcheckpt": msg_cfcheckpt,
b"cfheaders": msg_cfheaders,
b"cfilter": msg_cfilter,
b"cmpctblock": msg_cmpctblock,
b"feefilter": msg_feefilter,
b"filteradd": msg_filteradd,
b"filterclear": msg_filterclear,
b"filterload": msg_filterload,
b"getaddr": msg_getaddr,
b"getblocks": msg_getblocks,
b"getblocktxn": msg_getblocktxn,
b"getdata": msg_getdata,
b"getheaders": msg_getheaders,
b"headers": msg_headers,
b"inv": msg_inv,
b"mempool": msg_mempool,
b"merkleblock": msg_merkleblock,
b"notfound": msg_notfound,
b"ping": msg_ping,
b"pong": msg_pong,
b"sendaddrv2": msg_sendaddrv2,
b"sendcmpct": msg_sendcmpct,
b"sendheaders": msg_sendheaders,
b"tx": msg_tx,
b"verack": msg_verack,
b"version": msg_version,
b"wtxidrelay": msg_wtxidrelay,
}
MAGIC_BYTES = {
"mainnet": b"\xf9\xbe\xb4\xd9", # mainnet
"testnet3": b"\x0b\x11\x09\x07", # testnet3
"regtest": b"\xfa\xbf\xb5\xda", # regtest
"signet": b"\x0a\x03\xcf\x40", # signet
}
class P2PConnection(asyncio.Protocol):
"""A low-level connection object to a node's P2P interface.
This class is responsible for:
- opening and closing the TCP connection to the node
- reading bytes from and writing bytes to the socket
- deserializing and serializing the P2P message header
- logging messages as they are sent and received
This class contains no logic for handing the P2P message payloads. It must be
sub-classed and the on_message() callback overridden."""
def __init__(self):
# The underlying transport of the connection.
# Should only call methods on this from the NetworkThread, c.f. call_soon_threadsafe
self._transport = None
@property
def is_connected(self):
return self._transport is not None
def peer_connect_helper(self, dstaddr, dstport, net, timeout_factor):
assert not self.is_connected
self.timeout_factor = timeout_factor
self.dstaddr = dstaddr
self.dstport = dstport
# The initial message to send after the connection was made:
self.on_connection_send_msg = None
self.recvbuf = b""
self.magic_bytes = MAGIC_BYTES[net]
def peer_connect(self, dstaddr, dstport, *, net, timeout_factor):
self.peer_connect_helper(dstaddr, dstport, net, timeout_factor)
loop = NetworkThread.network_event_loop
logger.debug('Connecting to Elixir Node: %s:%d' % (self.dstaddr, self.dstport))
coroutine = loop.create_connection(lambda: self, host=self.dstaddr, port=self.dstport)
return lambda: loop.call_soon_threadsafe(loop.create_task, coroutine)
def peer_accept_connection(self, connect_id, connect_cb=lambda: None, *, net, timeout_factor):
self.peer_connect_helper('0', 0, net, timeout_factor)
logger.debug('Listening for Elixir Node with id: {}'.format(connect_id))
return lambda: NetworkThread.listen(self, connect_cb, idx=connect_id)
def peer_disconnect(self):
# Connection could have already been closed by other end.
NetworkThread.network_event_loop.call_soon_threadsafe(lambda: self._transport and self._transport.abort())
# Connection and disconnection methods
def connection_made(self, transport):
"""asyncio callback when a connection is opened."""
assert not self._transport
logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport))
self._transport = transport
if self.on_connection_send_msg:
self.send_message(self.on_connection_send_msg)
self.on_connection_send_msg = None # Never used again
self.on_open()
def connection_lost(self, exc):
"""asyncio callback when a connection is closed."""
if exc:
logger.warning("Connection lost to {}:{} due to {}".format(self.dstaddr, self.dstport, exc))
else:
logger.debug("Closed connection to: %s:%d" % (self.dstaddr, self.dstport))
self._transport = None
self.recvbuf = b""
self.on_close()
# Socket read methods
def data_received(self, t):
"""asyncio callback when data is read from the socket."""
if len(t) > 0:
self.recvbuf += t
self._on_data()
def _on_data(self):
"""Try to read P2P messages from the recv buffer.
This method reads data from the buffer in a loop. It deserializes,
parses and verifies the P2P header, then passes the P2P payload to
the on_message callback for processing."""
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.magic_bytes:
raise ValueError("magic bytes mismatch: {} != {}".format(repr(self.magic_bytes), repr(self.recvbuf)))
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
msgtype = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if msgtype not in MESSAGEMAP:
raise ValueError("Received unknown msgtype from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, msgtype, repr(msg)))
f = BytesIO(msg)
t = MESSAGEMAP[msgtype]()
t.deserialize(f)
self._log_message("receive", t)
self.on_message(t)
except Exception as e:
logger.exception('Error reading message:', repr(e))
raise
def on_message(self, message):
"""Callback for processing a P2P payload. Must be overridden by derived class."""
raise NotImplementedError
# Socket write methods
def send_message(self, message):
"""Send a P2P message over the socket.
This method takes a P2P payload, builds the P2P header and adds
the message to the send buffer to be sent over the socket."""
tmsg = self.build_message(message)
self._log_message("send", message)
return self.send_raw_message(tmsg)
def send_raw_message(self, raw_message_bytes):
if not self.is_connected:
raise IOError('Not connected')
def maybe_write():
if not self._transport:
return
if self._transport.is_closing():
return
self._transport.write(raw_message_bytes)
NetworkThread.network_event_loop.call_soon_threadsafe(maybe_write)
# Class utility methods
def build_message(self, message):
"""Build a serialized P2P message"""
msgtype = message.msgtype
data = message.serialize()
tmsg = self.magic_bytes
tmsg += msgtype
tmsg += b"\x00" * (12 - len(msgtype))
tmsg += struct.pack("<I", len(data))
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
return tmsg
def _log_message(self, direction, msg):
"""Logs a message being sent or received over the connection."""
if direction == "send":
log_message = "Send message to "
elif direction == "receive":
log_message = "Received message from "
log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500])
if len(log_message) > 500:
log_message += "... (msg truncated)"
logger.debug(log_message)
class P2PInterface(P2PConnection):
"""A high-level P2P interface class for communicating with a Elixir node.
This class provides high-level callbacks for processing P2P message
payloads, as well as convenience methods for interacting with the
node over P2P.
Individual testcases should subclass this and override the on_* methods
if they want to alter message handling behaviour."""
def __init__(self, support_addrv2=False, wtxidrelay=True):
super().__init__()
# Track number of messages of each type received.
# Should be read-only in a test.
self.message_count = defaultdict(int)
# Track the most recent message of each type.
# To wait for a message to be received, pop that message from
# this and use self.wait_until.
self.last_message = {}
# A count of the number of ping messages we've sent to the node
self.ping_counter = 1
# The network services received from the peer
self.nServices = 0
self.support_addrv2 = support_addrv2
# If the peer supports wtxid-relay
self.wtxidrelay = wtxidrelay
def peer_connect_send_version(self, services):
# Send a version msg
vt = msg_version()
vt.nVersion = P2P_VERSION
vt.strSubVer = P2P_SUBVERSION
vt.relay = P2P_VERSION_RELAY
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.on_connection_send_msg = vt # Will be sent in connection_made callback
def peer_connect(self, *args, services=P2P_SERVICES, send_version=True, **kwargs):
create_conn = super().peer_connect(*args, **kwargs)
if send_version:
self.peer_connect_send_version(services)
return create_conn
def peer_accept_connection(self, *args, services=NODE_NETWORK | NODE_WITNESS, **kwargs):
create_conn = super().peer_accept_connection(*args, **kwargs)
self.peer_connect_send_version(services)
return create_conn
# Message receiving methods
def on_message(self, message):
"""Receive message and dispatch message to appropriate callback.
We keep a count of how many of each message type has been received
and the most recent message of each type."""
with p2p_lock:
try:
msgtype = message.msgtype.decode('ascii')
self.message_count[msgtype] += 1
self.last_message[msgtype] = message
getattr(self, 'on_' + msgtype)(message)
except:
print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0]))
raise
# Callback methods. Can be overridden by subclasses in individual test
# cases to provide custom message handling behaviour.
def on_open(self):
pass
def on_close(self):
pass
def on_addr(self, message): pass
def on_addrv2(self, message): pass
def on_block(self, message): pass
def on_blocktxn(self, message): pass
def on_cfcheckpt(self, message): pass
def on_cfheaders(self, message): pass
def on_cfilter(self, message): pass
def on_cmpctblock(self, message): pass
def on_feefilter(self, message): pass
def on_filteradd(self, message): pass
def on_filterclear(self, message): pass
def on_filterload(self, message): pass
def on_getaddr(self, message): pass
def on_getblocks(self, message): pass
def on_getblocktxn(self, message): pass
def on_getdata(self, message): pass
def on_getheaders(self, message): pass
def on_headers(self, message): pass
def on_mempool(self, message): pass
def on_merkleblock(self, message): pass
def on_notfound(self, message): pass
def on_pong(self, message): pass
def on_sendaddrv2(self, message): pass
def on_sendcmpct(self, message): pass
def on_sendheaders(self, message): pass
def on_tx(self, message): pass
def on_wtxidrelay(self, message): pass
def on_inv(self, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
self.send_message(want)
def on_ping(self, message):
self.send_message(msg_pong(message.nonce))
def on_verack(self, message):
pass
def on_version(self, message):
assert message.nVersion >= MIN_P2P_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_P2P_VERSION_SUPPORTED)
if message.nVersion >= 70016 and self.wtxidrelay:
self.send_message(msg_wtxidrelay())
if self.support_addrv2:
self.send_message(msg_sendaddrv2())
self.send_message(msg_verack())
self.nServices = message.nServices
# Connection helper methods
def wait_until(self, test_function_in, *, timeout=60, check_connected=True):
def test_function():
if check_connected:
assert self.is_connected
return test_function_in()
wait_until_helper(test_function, timeout=timeout, lock=p2p_lock, timeout_factor=self.timeout_factor)
def wait_for_connect(self, timeout=60):
test_function = lambda: self.is_connected
wait_until_helper(test_function, timeout=timeout, lock=p2p_lock)
def wait_for_disconnect(self, timeout=60):
test_function = lambda: not self.is_connected
self.wait_until(test_function, timeout=timeout, check_connected=False)
# Message receiving helper methods
def wait_for_tx(self, txid, timeout=60):
def test_function():
if not self.last_message.get('tx'):
return False
return self.last_message['tx'].tx.rehash() == txid
self.wait_until(test_function, timeout=timeout)
def wait_for_block(self, blockhash, timeout=60):
def test_function():
return self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
self.wait_until(test_function, timeout=timeout)
def wait_for_header(self, blockhash, timeout=60):
def test_function():
last_headers = self.last_message.get('headers')
if not last_headers:
return False
return last_headers.headers[0].rehash() == int(blockhash, 16)
self.wait_until(test_function, timeout=timeout)
def wait_for_merkleblock(self, blockhash, timeout=60):
def test_function():
last_filtered_block = self.last_message.get('merkleblock')
if not last_filtered_block:
return False
return last_filtered_block.merkleblock.header.rehash() == int(blockhash, 16)
self.wait_until(test_function, timeout=timeout)
def wait_for_getdata(self, hash_list, timeout=60):
"""Waits for a getdata message.
The object hashes in the inventory vector must match the provided hash_list."""
def test_function():
last_data = self.last_message.get("getdata")
if not last_data:
return False
return [x.hash for x in last_data.inv] == hash_list
self.wait_until(test_function, timeout=timeout)
def wait_for_getheaders(self, timeout=60):
"""Waits for a getheaders message.
Receiving any getheaders message will satisfy the predicate. the last_message["getheaders"]
value must be explicitly cleared before calling this method, or this will return
immediately with success. TODO: change this method to take a hash value and only
return true if the correct block header has been requested."""
def test_function():
return self.last_message.get("getheaders")
self.wait_until(test_function, timeout=timeout)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
if len(expected_inv) > 1:
raise NotImplementedError("wait_for_inv() will only verify the first inv object")
def test_function():
return self.last_message.get("inv") and \
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
self.wait_until(test_function, timeout=timeout)
def wait_for_verack(self, timeout=60):
def test_function():
return "verack" in self.last_message
self.wait_until(test_function, timeout=timeout)
# Message sending helper functions
def send_and_ping(self, message, timeout=60):
self.send_message(message)
self.sync_with_ping(timeout=timeout)
def sync_send_with_ping(self, timeout=60):
"""Ensure SendMessages is called on this connection"""
# Calling sync_with_ping twice requires that the node calls
# `ProcessMessage` twice, and thus ensures `SendMessages` must have
# been called at least once
self.sync_with_ping()
self.sync_with_ping()
def sync_with_ping(self, timeout=60):
"""Ensure ProcessMessages is called on this connection"""
self.send_message(msg_ping(nonce=self.ping_counter))
def test_function():
return self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
self.wait_until(test_function, timeout=timeout)
self.ping_counter += 1
# One lock for synchronizing all data access between the network event loop (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# P2PConnection acquires this lock whenever delivering a message to a P2PInterface.
# This lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the P2PInterface or P2PConnection.
p2p_lock = threading.Lock()
class NetworkThread(threading.Thread):
network_event_loop = None
def __init__(self):
super().__init__(name="NetworkThread")
# There is only one event loop and no more than one thread must be created
assert not self.network_event_loop
NetworkThread.listeners = {}
NetworkThread.protos = {}
NetworkThread.network_event_loop = asyncio.new_event_loop()
def run(self):
"""Start the network thread."""
self.network_event_loop.run_forever()
def close(self, timeout=10):
"""Close the connections and network event loop."""
self.network_event_loop.call_soon_threadsafe(self.network_event_loop.stop)
wait_until_helper(lambda: not self.network_event_loop.is_running(), timeout=timeout)
self.network_event_loop.close()
self.join(timeout)
# Safe to remove event loop.
NetworkThread.network_event_loop = None
@classmethod
def listen(cls, p2p, callback, port=None, addr=None, idx=1):
""" Ensure a listening server is running on the given port, and run the
protocol specified by `p2p` on the next connection to it. Once ready
for connections, call `callback`."""
if port is None:
assert 0 < idx <= MAX_NODES
port = p2p_port(MAX_NODES - idx)
if addr is None:
addr = '127.0.0.1'
coroutine = cls.create_listen_server(addr, port, callback, p2p)
cls.network_event_loop.call_soon_threadsafe(cls.network_event_loop.create_task, coroutine)
@classmethod
async def create_listen_server(cls, addr, port, callback, proto):
def peer_protocol():
"""Returns a function that does the protocol handling for a new
connection. To allow different connections to have different
behaviors, the protocol function is first put in the cls.protos
dict. When the connection is made, the function removes the
protocol function from that dict, and returns it so the event loop
can start executing it."""
response = cls.protos.get((addr, port))
cls.protos[(addr, port)] = None
return response
if (addr, port) not in cls.listeners:
# When creating a listener on a given (addr, port) we only need to
# do it once. If we want different behaviors for different
# connections, we can accomplish this by providing different
# `proto` functions
listener = await cls.network_event_loop.create_server(peer_protocol, addr, port)
logger.debug("Listening server on %s:%d should be started" % (addr, port))
cls.listeners[(addr, port)] = listener
cls.protos[(addr, port)] = proto
callback(addr, port)
class P2PDataStore(P2PInterface):
"""A P2P data store class.
Keeps a block and transaction store and responds correctly to getdata and getheaders requests."""
def __init__(self):
super().__init__()
# store of blocks. key is block hash, value is a CBlock object
self.block_store = {}
self.last_block_hash = ''
# store of txs. key is txid, value is a CTransaction object
self.tx_store = {}
self.getdata_requests = []
def on_getdata(self, message):
"""Check for the tx/block in our stores and if found, reply with an inv message."""
for inv in message.inv:
self.getdata_requests.append(inv.hash)
if (inv.type & MSG_TYPE_MASK) == MSG_TX and inv.hash in self.tx_store.keys():
self.send_message(msg_tx(self.tx_store[inv.hash]))
elif (inv.type & MSG_TYPE_MASK) == MSG_BLOCK and inv.hash in self.block_store.keys():
self.send_message(msg_block(self.block_store[inv.hash]))
else:
logger.debug('getdata message type {} received.'.format(hex(inv.type)))
def on_getheaders(self, message):
"""Search back through our block store for the locator, and reply with a headers message if found."""
locator, hash_stop = message.locator, message.hashstop
# Assume that the most recent block added is the tip
if not self.block_store:
return
headers_list = [self.block_store[self.last_block_hash]]
while headers_list[-1].sha256 not in locator.vHave:
# Walk back through the block store, adding headers to headers_list
# as we go.
prev_block_hash = headers_list[-1].hashPrevBlock
if prev_block_hash in self.block_store:
prev_block_header = CBlockHeader(self.block_store[prev_block_hash])
headers_list.append(prev_block_header)
if prev_block_header.sha256 == hash_stop:
# if this is the hashstop header, stop here
break
else:
logger.debug('block hash {} not found in block store'.format(hex(prev_block_hash)))
break
# Truncate the list if there are too many headers
headers_list = headers_list[:-MAX_HEADERS_RESULTS - 1:-1]
response = msg_headers(headers_list)
if response is not None:
self.send_message(response)
def send_blocks_and_test(self, blocks, node, *, success=True, force_send=False, reject_reason=None, expect_disconnect=False, timeout=60):
"""Send blocks to test node and test whether the tip advances.
- add all blocks to our block_store
- send a headers message for the final block
- the on_getheaders handler will ensure that any getheaders are responded to
- if force_send is False: wait for getdata for each of the blocks. The on_getdata handler will
ensure that any getdata messages are responded to. Otherwise send the full block unsolicited.
- if success is True: assert that the node's tip advances to the most recent block
- if success is False: assert that the node's tip doesn't advance
- if reject_reason is set: assert that the correct reject message is logged"""
with p2p_lock:
for block in blocks:
self.block_store[block.sha256] = block
self.last_block_hash = block.sha256
reject_reason = [reject_reason] if reject_reason else []
with node.assert_debug_log(expected_msgs=reject_reason):
if force_send:
for b in blocks:
self.send_message(msg_block(block=b))
else:
self.send_message(msg_headers([CBlockHeader(block) for block in blocks]))
self.wait_until(
lambda: blocks[-1].sha256 in self.getdata_requests,
timeout=timeout,
check_connected=success,
)
if expect_disconnect:
self.wait_for_disconnect(timeout=timeout)
else:
self.sync_with_ping(timeout=timeout)
if success:
self.wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
else:
assert node.getbestblockhash() != blocks[-1].hash
def send_txs_and_test(self, txs, node, *, success=True, expect_disconnect=False, reject_reason=None):
"""Send txs to test node and test whether they're accepted to the mempool.
- add all txs to our tx_store
- send tx messages for all txs
- if success is True/False: assert that the txs are/are not accepted to the mempool
- if expect_disconnect is True: Skip the sync with ping
- if reject_reason is set: assert that the correct reject message is logged."""
with p2p_lock:
for tx in txs:
self.tx_store[tx.sha256] = tx
reject_reason = [reject_reason] if reject_reason else []
with node.assert_debug_log(expected_msgs=reject_reason):
for tx in txs:
self.send_message(msg_tx(tx))
if expect_disconnect:
self.wait_for_disconnect()
else:
self.sync_with_ping()
raw_mempool = node.getrawmempool()
if success:
# Check that all txs are now in the mempool
for tx in txs:
assert tx.hash in raw_mempool, "{} not found in mempool".format(tx.hash)
else:
# Check that none of the txs are now in the mempool
for tx in txs:
assert tx.hash not in raw_mempool, "{} tx found in mempool".format(tx.hash)
class P2PTxInvStore(P2PInterface):
"""A P2PInterface which stores a count of how many times each txid has been announced."""
def __init__(self):
super().__init__()
self.tx_invs_received = defaultdict(int)
def on_inv(self, message):
super().on_inv(message) # Send getdata in response.
# Store how many times invs have been received for each tx.
for i in message.inv:
if (i.type == MSG_TX) or (i.type == MSG_WTX):
# save txid
self.tx_invs_received[i.hash] += 1
def get_invs(self):
with p2p_lock:
return list(self.tx_invs_received.keys())
def wait_for_broadcast(self, txns, timeout=60):
"""Waits for the txns (list of txids) to complete initial broadcast.
The mempool should mark unbroadcast=False for these transactions.
"""
# Wait until invs have been received (and getdatas sent) for each txid.
self.wait_until(lambda: set(self.tx_invs_received.keys()) == set([int(tx, 16) for tx in txns]), timeout=timeout)
# Flush messages and wait for the getdatas to be processed
self.sync_with_ping()
| 39.078382
| 190
| 0.649866
|
f5dd6170892bc2fa85463e70697baf5f29d9d022
| 2,179
|
py
|
Python
|
stardog/http/client.py
|
drahnreb/pystardog
|
4ed4f42bc90b9713316601d329939dfe3993f3a7
|
[
"Apache-2.0"
] | null | null | null |
stardog/http/client.py
|
drahnreb/pystardog
|
4ed4f42bc90b9713316601d329939dfe3993f3a7
|
[
"Apache-2.0"
] | null | null | null |
stardog/http/client.py
|
drahnreb/pystardog
|
4ed4f42bc90b9713316601d329939dfe3993f3a7
|
[
"Apache-2.0"
] | null | null | null |
import requests
import requests.auth
import requests_toolbelt.multipart as multipart
from .. import exceptions as exceptions
class Client(object):
DEFAULT_ENDPOINT = 'http://localhost:5820'
DEFAULT_USERNAME = 'admin'
DEFAULT_PASSWORD = 'admin'
def __init__(self,
endpoint=None,
database=None,
username=None,
password=None,
auth=None):
self.url = endpoint if endpoint else self.DEFAULT_ENDPOINT
# XXX this might not be right when the auth object is used. Ideally we could drop storing this
# information with this object but it is used when a store procedure is made as the "creator"
self.username = username if username else self.DEFAULT_USERNAME
if database:
self.url = '{}/{}'.format(self.url, database)
self.session = requests.Session()
if auth is None:
auth = requests.auth.HTTPBasicAuth(self.username, password if password else self.DEFAULT_PASSWORD)
self.session.auth = auth
def post(self, path, **kwargs):
return self.__wrap(self.session.post(self.url + path, **kwargs))
def put(self, path, **kwargs):
return self.__wrap(self.session.put(self.url + path, **kwargs))
def get(self, path, **kwargs):
return self.__wrap(self.session.get(self.url + path, **kwargs))
def delete(self, path, **kwargs):
return self.__wrap(self.session.delete(self.url + path, **kwargs))
def close(self):
self.session.close()
def __wrap(self, request):
if not request.ok:
try:
msg = request.json()
except ValueError:
# sometimes errors come as strings
msg = {'message': request.text}
raise exceptions.StardogException('[{}] {}: {}'.format(
request.status_code, msg.get('code', ''), msg.get(
'message', '')))
return request
def _multipart(self, response):
decoder = multipart.decoder.MultipartDecoder.from_response(response)
return [part.content for part in decoder.parts]
| 33.523077
| 110
| 0.61129
|
f4f7d41d4a4bda02ed3634ed04a864a895b0af6c
| 4,728
|
py
|
Python
|
acidrain.py
|
alaxa27/competitive
|
f60f1470ba12e83e483127c109d1f60f22b88130
|
[
"Apache-2.0"
] | null | null | null |
acidrain.py
|
alaxa27/competitive
|
f60f1470ba12e83e483127c109d1f60f22b88130
|
[
"Apache-2.0"
] | null | null | null |
acidrain.py
|
alaxa27/competitive
|
f60f1470ba12e83e483127c109d1f60f22b88130
|
[
"Apache-2.0"
] | null | null | null |
class AcidRain():
def create_map(self, b, e, y):
size = max(e) - min(b)
self.MAP = []
for i in range(0, len(self.L)):
self.MAP.append((size+2)*[-1])
"""
for i in range(0, len(self.L)):
for j in range(b[i], e[i]+1):
self.MAP[i][j] = i
"""
L = self.L
for i in range(0, len(L)):
for j in range(0, len(L[i])):
for h in range(L[i][j][0], L[i][j][1]+1):
self.MAP[i][h] = i+j
def what_is_below(self, x, y):
if x == min(self.b) or x == max(self.e):
return True
if y > 0:
if self.MAP[y-1][x] != -1:
return [x, self.MAP[y-1][x]]
else:
return self.what_is_below(x, y-1)
if y == 0:
if self.MAP[y][x+1] == -1:
return False
if self.MAP[y][x] == -1:
return False
elif self.MAP[y][x] != -1:
return True
def simule_left(self, shield, level):
r = self.what_is_below(shield[0], level)
if not r:
return False
if r == True:
return True
over = float(r[0]) / (self.e[r[1]]-self.b[r[1]])
n = r[1]
if over > 0.5:
return self.simule_right([self.b[n], self.e[n]], level - 1)
elif over < 0.5:
return self.simule_left([self.b[n], self.e[n]], level - 1)
elif over == 0.5:
a = self.simule_right([self.b[n], self.e[n]], level - 1)
b = self.simule_left([self.b[n], self.e[n]], level - 1)
return a and b
def simule_right(self, shield, level):
r = self.what_is_below(shield[1], level)
if not r:
return False
if r == True:
return True
over = float(r[0]) / (self.e[r[1]]-self.b[r[1]])
n = r[1]
if over > 0.5:
return self.simule_right([self.b[n], self.e[n]], level - 1)
elif over < 0.5:
return self.simule_left([self.b[n], self.e[n]], level - 1)
elif over == 0.5:
a = self.simule_right([self.b[n], self.e[n]], level - 1)
b = self.simule_left([self.b[n], self.e[n]], level - 1)
return a and b
def saveHarvest(self, b, e, y):
b = list(b)
e = list(e)
y = list(y)
self.length = 0
if len(y) > 1:
Sorted = all(y[i] <= y[i+1] for i in xrange(len(y)-1))
while not Sorted:
for i in range(1, len(y)):
if y[i] < y[i-1]:
h = y[i]
y[i] = y[i-1]
y[i-1] = h
h = b[i]
b[i] = b[i-1]
b[i-1] = h
h = e[i]
e[i] = e[i-1]
e[i-1] = h
Sorted = all(y[i] <= y[i+1] for i in xrange(len(y)-1))
L = []
l = []
for i in range(0, len(y)):
l.append([])
for j in range(0, len(y)):
if y[i] == y[j]:
l[-1].append(j)
l[-1].sort()
a = []
for x in l:
if x not in a:
a.append(x)
l = a
for i in l:
L.append([])
for j in i:
L[-1].append([b[j], e[j]])
self.e = e
self.y = y
self.b = b
self.L = L
self.create_map(b, e, y)
self.MAP.reverse()
for i in self.MAP:
print i
print "\n"
self.create_map(b, e, y)
for i in range(1, len(L)):
Lprim = L[1:(i+1)]
for j in range(0, len(self.L[i])):
a = self.simule_right(self.L[i][j], i)
while not a:
self.L[i][j][1] += 1
self.length += 1
self.create_map(b, e, y)
a = self.simule_right(self.L[i][j], i)
a = self.simule_left(self.L[i][j], i)
while not a:
self.L[i][j][0] -= 1
self.length += 1
self.create_map(b, e, y)
a = self.simule_left(self.L[i][j], i)
self.create_map(b, e, y)
self.MAP.reverse()
for i in self.MAP:
print i
if self.length == 4:
return self.length -2
return self.length
A = AcidRain()
a = A.saveHarvest([1, 0, 3, 5],[4, 3, 5, 6], [10, 3, 1000, 8])
#a = A.saveHarvest([0, 1], [2, 4], [1, 2])
#a = A.saveHarvest([1,2], [2,3], [1, 1])
print a
| 31.311258
| 71
| 0.389594
|
ce9ccb15174eea8d282d708db59a123cda7fb1e9
| 141,792
|
py
|
Python
|
researchon_backgroundcontrolling routerpsloit.py
|
tanc7/ArmsCommander-TestBed
|
e00bb166084735d8b0de058b54d6d98a057cd7d8
|
[
"FSFUL"
] | 1
|
2018-10-17T04:49:42.000Z
|
2018-10-17T04:49:42.000Z
|
researchon_backgroundcontrolling routerpsloit.py
|
tanc7/ArmsCommander-TestBed
|
e00bb166084735d8b0de058b54d6d98a057cd7d8
|
[
"FSFUL"
] | null | null | null |
researchon_backgroundcontrolling routerpsloit.py
|
tanc7/ArmsCommander-TestBed
|
e00bb166084735d8b0de058b54d6d98a057cd7d8
|
[
"FSFUL"
] | null | null | null |
"""
if not os.path.exists(self.history_file):
open(self.history_file, 'a+').close()
readline.read_history_file(self.history_file)
readline.set_history_length(self.history_length)
atexit.register(readline.write_history_file, self.history_file)
readline.parse_and_bind('set enable-keypad on')
readline.set_completer(self.complete)
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
'description': 'Exploits ZTE F460 and F660 backdoor vulnerability that allows executing commands on operating system level.',
routersploit/modules/exploits/routers/zte/f460_f660_backdoor.py:55: print_info(self.execute(cmd))
routersploit/modules/exploits/routers/zte/f460_f660_backdoor.py:57: def execute(self, cmd):
routersploit/modules/exploits/routers/zte/f460_f660_backdoor.py:85: response = self.execute(cmd)
routersploit/modules/exploits/cameras/multi/P2P_wificam_rce.py:22: 'name': 'P2P wificam remote code execution',
routersploit/modules/exploits/cameras/multi/P2P_wificam_rce.py:25: unauthenticated remote code execution.""",
routersploit/modules/exploits/cameras/multi/P2P_wificam_rce.py:1303: def execute(self, cmd):
routersploit/modules/exploits/misc/asus/b1m_projector_rce.py:15: Exploit implementation for Asus B1M Projector Remote Code Execution vulnerability.
routersploit/modules/exploits/misc/asus/b1m_projector_rce.py:16: If the target is vulnerable, command loop is invoked that allows executing commands with root privileges.
routersploit/modules/exploits/misc/asus/b1m_projector_rce.py:20: 'description': 'Module exploits Asus B1M Projector Remote Code Execution vulnerability which '
routersploit/modules/exploits/misc/asus/b1m_projector_rce.py:21: 'allows executing command on operating system level with root privileges.',
routersploit/modules/exploits/misc/asus/b1m_projector_rce.py:45: def execute(self, cmd):
routersploit/modules/exploits/misc/asus/b1m_projector_rce.py:58: response_text = self.execute(cmd)
routersploit/modules/exploits/misc/wepresent/wipg1000_rce.py:16: If the target is vulnerable, it is possible to execute commands on operating system level.
routersploit/modules/exploits/misc/wepresent/wipg1000_rce.py:21: 'executing commands on operating system level.',
routersploit/modules/exploits/misc/wepresent/wipg1000_rce.py:48: def execute(self, cmd):
routersploit/modules/payloads/mipsbe/bind_tcp.py:82: # execve("//bin/sh", ["//bin/sh"], [/* 0 vars */]) = 0
routersploit/modules/payloads/mipsbe/bind_tcp.py:95: "\x24\x02\x0f\xab" + # li v0,4011 ( __NR_execve )
routersploit/modules/payloads/mipsle/bind_tcp.py:87: "\xab\x0f\x02\x24" + # li v0,4011 ( __NR_execve )
routersploit/modules/scanners/autopwn.py:142: with threads.ThreadPoolExecutor(self.threads) as executor:
routersploit/modules/scanners/autopwn.py:145: executor.submit(self.target_function, exploit)
Binary file routersploit/modules/scanners/autopwn.pyc matches
routersploit/modules/creds/http_basic_bruteforce.py:17:from routersploit.exceptions import StopThreadPoolExecutor
routersploit/modules/creds/http_basic_bruteforce.py:78: with threads.ThreadPoolExecutor(self.threads) as executor:
routersploit/modules/creds/http_basic_bruteforce.py:80: executor.submit(self.target_function, url, record)
routersploit/modules/creds/http_basic_bruteforce.py:101: raise StopThreadPoolExecutor
routersploit/modules/creds/http_digest_default.py:15:from routersploit.exceptions import StopThreadPoolExecutor
routersploit/modules/creds/http_digest_default.py:16:from routersploit.threads import ThreadPoolExecutor
routersploit/modules/creds/http_digest_default.py:72: with ThreadPoolExecutor(self.threads) as executor:
routersploit/modules/creds/http_digest_default.py:75: executor.submit(self.target_function, url, username, password)
routersploit/modules/creds/http_digest_default.py:98: raise StopThreadPoolExecutor
routersploit/modules/creds/http_digest_bruteforce.py:17:from routersploit.exceptions import StopThreadPoolExecutor
routersploit/modules/creds/http_digest_bruteforce.py:80: with threads.ThreadPoolExecutor(self.threads) as executor:
routersploit/modules/creds/http_digest_bruteforce.py:82: executor.submit(self.target_function, url, record)
routersploit/modules/creds/http_digest_bruteforce.py:103: raise StopThreadPoolExecutor
routersploit/modules/creds/http_basic_default.py:15:from routersploit.exceptions import StopThreadPoolExecutor
routersploit/modules/creds/http_basic_default.py:16:from routersploit.threads import ThreadPoolExecutor
routersploit/modules/creds/http_basic_default.py:70: with ThreadPoolExecutor(self.threads) as executor:
routersploit/modules/creds/http_basic_default.py:73: executor.submit(self.target_function, url, username, password)
routersploit/modules/creds/http_basic_default.py:96: raise StopThreadPoolExecutor
routersploit/threads.py:12:from .exceptions import StopThreadPoolExecutor
routersploit/threads.py:34: except StopThreadPoolExecutor:
routersploit/threads.py:42:class ThreadPoolExecutor(object):
Binary file routersploit/exceptions.pyc matches
Binary file routersploit/shell.pyc matches
routersploit/utils/__init__.py:113: before executing command specific to modules (ex. 'run').
routersploit/utils/__init__.py:179: multiple targets definition. Decorated function will be executed
Binary file routersploit/utils/__init__.pyc matches
routersploit/shell.py:33: print_success("Welcome to cmd. Commands are sent to the target via the execute method.")
routersploit/shell.py:117: params['exec_binary'] = data
routersploit/shell.py:129: print_status("Executing '{}' on the device...".format(cmd))
routersploit/shell.py:130: print_info(exploit.execute(cmd))
routersploit/shell.py:155: def __init__(self, exploit, payload, options, location="", wget_options={}, echo_options={}, exec_binary=None):
routersploit/shell.py:167: # process of executing payload
routersploit/shell.py:168: self.exec_binary = exec_binary
routersploit/shell.py:221: self.exploit.execute(cmd)
routersploit/shell.py:260: self.exploit.execute(cmd)
routersploit/shell.py:280: # set of instructions to execute payload on the device
routersploit/shell.py:281: if isinstance(self.exec_binary, list) or isinstance(self.exec_binary, tuple):
routersploit/shell.py:282: for item_exec_binary in self.exec_binary:
routersploit/shell.py:283: if isinstance(item_exec_binary, str):
routersploit/shell.py:285: commands.append(item_exec_binary.format(path))
routersploit/shell.py:287: commands.append(item_exec_binary)
routersploit/shell.py:288: elif callable(item_exec_binary):
routersploit/shell.py:289: commands.append(item_exec_binary(path))
routersploit/shell.py:291: # instruction to execute generic payload e.g. netcat / awk
routersploit/shell.py:292: elif isinstance(self.exec_binary, str):
routersploit/shell.py:294: commands.append(self.exec_binary.format(path))
routersploit/shell.py:296: commands.append(self.exec_binary)
routersploit/shell.py:298: # default way of executing payload
routersploit/shell.py:300: exec_binary_str = "chmod 777 {0}; {0}; rm {0}".format(path)
routersploit/shell.py:301: commands.append(exec_binary_str)
routersploit/shell.py:311: # execute binary
routersploit/shell.py:314: print_status("Executing payload on the device")
routersploit/shell.py:318: self.exploit.execute(command)
routersploit/shell.py:320: # asynchronous last command to execute binary & rm binary
routersploit/shell.py:321: thread = threading.Thread(target=self.exploit.execute, args=(commands[-1],))
routersploit/shell.py:336: # execute binary
routersploit/shell.py:341: self.exploit.execute(command)
routersploit/shell.py:343: # asynchronous last command to execute binary & rm binary
routersploit/shell.py:344: thread = threading.Thread(target=self.exploit.execute, args=(commands[-1],))
tests/test_completer.py:38: 'exec exit help search show use \r\n',
tests/test_completer.py:97: 'back exec help search setg use \r\n'
tests/test_completer.py:201: "back exec help search setg use \r\n"
tests/test_completer.py:214: 'back exec help search setg unsetg \r\n'
tests/test_interpreter.py:215: def test_command_run_exception_during_exploit_execution(self,
tests/test_interpreter.py:298: ['back', 'check', 'exec ', 'exit', 'help', 'run', 'search ',
tests/test_interpreter.py:307: ['back', 'check', 'exec ', 'exit', 'help', 'run', 'search ',
tests/test_interpreter.py:317: ['exec ', 'exit', 'help', 'search ', 'show ', 'use ']
tests/test_interpreter.py:677: def test_command_exec(self, mock_system):
tests/test_interpreter.py:678: self.interpreter.command_exec("foo -bar")
root@CRACK_COCAINE:~/Documents/routersploit# egrep -ix "show" * --color
grep: routersploit: Is a directory
grep: tests: Is a directory
root@CRACK_COCAINE:~/Documents/routersploit# egrep -irx "show" * --color
root@CRACK_COCAINE:~/Documents/routersploit# egrep -irx "show" * --color
root@CRACK_COCAINE:~/Documents/routersploit# egrep -irn "show" * --color
CONTRIBUTING.md:23:3. If exploit does not work but it should, check "show info" for more information. References should provide you with links to proof of concept exploits.
README.md:103: rsf (D-LINK DIR-300 & DIR-600 RCE) > show options
README.md:137: rsf (D-LINK DIR-300 & DIR-600 RCE) > show info
README.md:189: rsf (SSH Default Creds) > show options
README.md:250: rsf (D-Link Scanner) > show options
routersploit/interpreter.py:174: show [info|options|devices] Print information, options, or target devices for a module
routersploit/interpreter.py:185: self.show_sub_commands = (
routersploit/interpreter.py:191: ['use ', 'exec ', 'help', 'exit', 'show ', 'search ']
routersploit/interpreter.py:394: def _show_info(self, *args, **kwargs):
routersploit/interpreter.py:402: def _show_options(self, *args, **kwargs):
routersploit/interpreter.py:417: def _show_devices(self, *args, **kwargs): # TODO: cover with tests
routersploit/interpreter.py:433: def __show_modules(self, root=''):
routersploit/interpreter.py:437: def _show_all(self, *args, **kwargs):
routersploit/interpreter.py:438: self.__show_modules()
routersploit/interpreter.py:440: def _show_scanners(self, *args, **kwargs):
routersploit/interpreter.py:441: self.__show_modules('scanners')
routersploit/interpreter.py:443: def _show_exploits(self, *args, **kwargs):
routersploit/interpreter.py:444: self.__show_modules('exploits')
routersploit/interpreter.py:446: def _show_creds(self, *args, **kwargs):
routersploit/interpreter.py:447: self.__show_modules('creds')
routersploit/interpreter.py:449: def command_show(self, *args, **kwargs):
routersploit/interpreter.py:452: getattr(self, "_show_{}".format(sub_command))(*args, **kwargs)
routersploit/interpreter.py:454: utils.print_error("Unknown 'show' sub-command '{}'. "
routersploit/interpreter.py:455: "What do you want to show?\n"
routersploit/interpreter.py:456: "Possible choices are: {}".format(sub_command, self.show_sub_commands))
routersploit/interpreter.py:459: def complete_show(self, text, *args, **kwargs):
routersploit/interpreter.py:461: return [command for command in self.show_sub_commands if command.startswith(text)]
routersploit/interpreter.py:463: return self.show_sub_commands
Binary file routersploit/interpreter.pyc matches
routersploit/modules/exploits/routers/cisco/ios_http_authorization_bypass.py:19: Example: http://10.0.0.1/level/99/exec/show/startup/config
routersploit/modules/exploits/routers/cisco/ios_http_authorization_bypass.py:39: show_command = exploits.Option('show startup-config', 'Command to be executed e.g show startup-config')
routersploit/modules/exploits/routers/cisco/ios_http_authorization_bypass.py:45: url = "{}:{}/level/{}/exec/-/{}".format(self.target, self.port, self.access_level, self.show_command)
routersploit/modules/exploits/routers/cisco/ios_http_authorization_bypass.py:59: url = "{}:{}/level/{}/exec/-/{}".format(self.target, self.port, num, self.show_command)
routersploit/modules/exploits/routers/cisco/ios_http_authorization_bypass.py:64: if response.status_code == 200 and "Command was: {}".format(self.show_command) in response.text:
routersploit/modules/exploits/routers/cisco/catalyst_2960_rocem.py:44: device = exploits.Option(-1, 'Target device - use "show devices"', validators=validators.integer)
routersploit/modules/exploits/routers/cisco/catalyst_2960_rocem.py:55: # next bytes are shown as offsets from r1
routersploit/modules/exploits/routers/cisco/catalyst_2960_rocem.py:118: # next bytes are shown as offsets from r1
routersploit/modules/exploits/routers/cisco/catalyst_2960_rocem.py:176: print_error("Set target device - use \"show devices\" and \"set device <id>\"")
routersploit/modules/exploits/routers/multi/misfortune_cookie.py:129: device = exploits.Option('', 'Target device (show devices)') # target firmware
routersploit/modules/exploits/routers/multi/rom0.py:30: 'http://www.osvdb.org/show/osvdb/102668',
routersploit/modules/exploits/routers/2wire/4011g_5012nv_path_traversal.py:48: data = {"__ENH_SHOW_REDIRECT_PATH__": "/pages/C_4_0.asp/../../..{}".format(self.filename),
routersploit/modules/exploits/routers/2wire/4011g_5012nv_path_traversal.py:49: "__ENH_SUBMIT_VALUE_SHOW__": "Acceder",
routersploit/modules/exploits/routers/2wire/4011g_5012nv_path_traversal.py:68: data = {"__ENH_SHOW_REDIRECT_PATH__": "/pages/C_4_0.asp/../../../etc/passwd",
routersploit/modules/exploits/routers/2wire/4011g_5012nv_path_traversal.py:69: "__ENH_SUBMIT_VALUE_SHOW__": "Acceder",
routersploit/modules/exploits/routers/dlink/dir_300_320_600_615_info_disclosure.py:43: url = "{}:{}/model/__show_info.php?REQUIRE_FILE=/var/etc/httpasswd".format(self.target, self.port)
routersploit/modules/exploits/routers/dlink/dir_300_320_600_615_info_disclosure.py:60: url = "{}:{}/model/__show_info.php?REQUIRE_FILE=/var/etc/httpasswd".format(self.target, self.port)
Binary file routersploit/shell.pyc matches
routersploit/utils/__init__.py:369: Pretty printing dictionary in specific order. (as in 'show info' command)
Binary file routersploit/utils/__init__.pyc matches
routersploit/shell.py:35: print_status("For further exploitation use 'show payloads' and 'set payload <payload>' commands.")
routersploit/shell.py:52: elif cmd == "show payloads":
routersploit/shell.py:79: if cmd == "show options":
tests/test_completer.py:38: 'exec exit help search show use \r\n',
tests/test_completer.py:98: 'check exit run set show \r\n',
tests/test_completer.py:138: 'search set setg show \r\n',
tests/test_completer.py:202: "check exit run set show \r\n",
tests/test_completer.py:215: 'check exit run set show use \r\n',
tests/test_completer.py:244: def test_complete_show_raw(self):
tests/test_completer.py:249: 'show ',
tests/test_completer.py:252: def test_complete_show(self):
tests/test_completer.py:254: self.rsf.send("show \t\t")
tests/test_completer.py:261: def test_complete_show_info(self):
tests/test_completer.py:263: self.rsf.send("show i\t\t")
tests/test_completer.py:266: 'show info'
tests/test_completer.py:269: def test_complete_show_options(self):
tests/test_completer.py:271: self.rsf.send("show o\t\t")
tests/test_completer.py:274: 'show options'
tests/test_interpreter.py:299: 'set ', 'setg ', 'show ', 'use ']
tests/test_interpreter.py:308: 'set ', 'setg ', 'show ', 'unsetg ', 'use ']
tests/test_interpreter.py:317: ['exec ', 'exit', 'help', 'search ', 'show ', 'use ']
tests/test_interpreter.py:418: def test_show_info(self, mock_print):
tests/test_interpreter.py:430: self.interpreter._show_info()
tests/test_interpreter.py:449: def test_command_show_info_module_with_no_metadata(self, mock_print):
tests/test_interpreter.py:455: self.interpreter._show_info()
tests/test_interpreter.py:462: def test_show_options(self, mock_print):
tests/test_interpreter.py:481: self.interpreter._show_options()
tests/test_interpreter.py:509: def test_command_show_options_when_there_is_no_module_opts(self,
tests/test_interpreter.py:522: self.interpreter._show_options()
tests/test_interpreter.py:541: def test_command_show(self):
tests/test_interpreter.py:543: "_show_options") as mock_show_options:
tests/test_interpreter.py:544: self.interpreter.command_show("options")
tests/test_interpreter.py:545: mock_show_options.assert_called_once_with("options")
tests/test_interpreter.py:548: def test_command_show_unknown_sub_command(self, mock_print_error):
tests/test_interpreter.py:549: self.interpreter.command_show('unknown_sub_command')
tests/test_interpreter.py:551: "Unknown 'show' sub-command 'unknown_sub_command'. "
tests/test_interpreter.py:552: "What do you want to show?\n"
tests/test_interpreter.py:554: self.interpreter.show_sub_commands))
tests/test_interpreter.py:557: def test_show_all(self, mock_print):
tests/test_interpreter.py:567: self.interpreter._show_all()
tests/test_interpreter.py:581: def test_show_scanners(self, mock_print):
tests/test_interpreter.py:591: self.interpreter._show_scanners()
tests/test_interpreter.py:598: def test_show_exploits(self, mock_print):
tests/test_interpreter.py:608: self.interpreter._show_exploits()
tests/test_interpreter.py:615: def test_show_creds(self, mock_print):
tests/test_interpreter.py:625: self.interpreter._show_creds()
tests/test_interpreter.py:643: def test_if_command_show_info_has_module_required_decorator(self):
tests/test_interpreter.py:645: self.interpreter._show_info,
tests/test_interpreter.py:649: def test_if_command_show_options_has_module_required_decorator(self):
tests/test_interpreter.py:651: self.interpreter._show_options,
tests/test_interpreter.py:655: def test_if_command_show_devices_has_module_required_decorator(self):
tests/test_interpreter.py:657: self.interpreter._show_devices,
tests/test_interpreter.py:672: cmd, args = self.interpreter.parse_line("show options")
tests/test_interpreter.py:673: self.assertEqual(cmd, "show")
root@CRACK_COCAINE:~/Documents/routersploit# egrep -irn "cmd ==" * --color
routersploit/interpreter.py:117: if cmd == '':
routersploit/shell.py:52: elif cmd == "show payloads":
routersploit/shell.py:79: if cmd == "show options":
routersploit/shell.py:102: elif cmd == "run":
routersploit/shell.py:125: elif cmd == "back":
root@CRACK_COCAINE:~/Documents/routersploit# ls
CONTRIBUTING.md LICENSE README.md requirements.txt routersploit.log tests
Dockerfile Makefile requirements-dev.txt routersploit rsf.py
root@CRACK_COCAINE:~/Documents/routersploit# ls */*.log
ls: cannot access '*/*.log': No such file or directory
root@CRACK_COCAINE:~/Documents/routersploit# ls */*.log
ls: cannot access '*/*.log': No such file or directory
root@CRACK_COCAINE:~/Documents/routersploit# ls *.log/*.log
ls: cannot access '*.log/*.log': No such file or directory
root@CRACK_COCAINE:~/Documents/routersploit# ls
CONTRIBUTING.md LICENSE README.md requirements.txt routersploit.log tests
Dockerfile Makefile requirements-dev.txt routersploit rsf.py
root@CRACK_COCAINE:~/Documents/routersploit# cd routersploit
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# ls
exceptions.py exploits.pyc interpreter.py payloads.py printer.pyc templates utils wordlists
exceptions.pyc __init__.py interpreter.pyc payloads.pyc shell.py threads.py validators.py
exploits.py __init__.pyc modules printer.py shell.pyc threads.pyc validators.pyc
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# ls *.log
ls: cannot access '*.log': No such file or directory
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# ls -la
total 164
drwxr-xr-x 6 root root 4096 Dec 13 10:58 .
drwxr-xr-x 6 root root 4096 Dec 13 10:58 ..
-rw-r--r-- 1 root root 343 Dec 13 10:15 exceptions.py
-rw-r--r-- 1 root root 945 Dec 13 10:31 exceptions.pyc
-rw-r--r-- 1 root root 4078 Dec 13 10:15 exploits.py
-rw-r--r-- 1 root root 5152 Dec 13 10:31 exploits.pyc
-rw-r--r-- 1 root root 460 Dec 13 10:15 __init__.py
-rw-r--r-- 1 root root 797 Dec 13 10:31 __init__.pyc
-rw-r--r-- 1 root root 18793 Dec 13 10:15 interpreter.py
-rw-r--r-- 1 root root 21898 Dec 13 10:58 interpreter.pyc
drwxr-xr-x 6 root root 4096 Dec 13 10:31 modules
-rw-r--r-- 1 root root 5804 Dec 13 10:15 payloads.py
-rw-r--r-- 1 root root 5797 Dec 13 10:31 payloads.pyc
-rw-r--r-- 1 root root 618 Dec 13 10:15 printer.py
-rw-r--r-- 1 root root 1172 Dec 13 10:31 printer.pyc
-rw-r--r-- 1 root root 12397 Dec 13 10:15 shell.py
-rw-r--r-- 1 root root 10669 Dec 13 10:31 shell.pyc
drwxr-xr-x 2 root root 4096 Dec 13 10:15 templates
-rw-r--r-- 1 root root 2457 Dec 13 10:15 threads.py
-rw-r--r-- 1 root root 3417 Dec 13 10:58 threads.pyc
drwxr-xr-x 2 root root 4096 Dec 13 10:31 utils
-rw-r--r-- 1 root root 2725 Dec 13 10:15 validators.py
-rw-r--r-- 1 root root 3328 Dec 13 10:31 validators.pyc
drwxr-xr-x 2 root root 4096 Dec 13 10:31 wordlists
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# atom shell.py
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# ls
exceptions.py exploits.pyc interpreter.py payloads.py printer.pyc templates utils wordlists
exceptions.pyc __init__.py interpreter.pyc payloads.pyc shell.py threads.py validators.py
exploits.py __init__.pyc modules printer.py shell.pyc threads.pyc validators.pyc
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# python shell.py
Traceback (most recent call last):
File "shell.py", line 12, in <module>
from routersploit import validators
ImportError: No module named routersploit
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# python shell.py
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# ps aux
USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
root 1 0.0 0.0 220016 8588 ? Ss 05:49 0:04 /sbin/init
root 2 0.0 0.0 0 0 ? S 05:49 0:00 [kthreadd]
root 3 0.0 0.0 0 0 ? S 05:49 0:00 [ksoftirqd/0]
root 5 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/0:0H]
root 7 0.0 0.0 0 0 ? S 05:49 0:02 [rcu_sched]
root 8 0.0 0.0 0 0 ? S 05:49 0:00 [rcu_bh]
root 9 0.0 0.0 0 0 ? S 05:49 0:00 [migration/0]
root 10 0.0 0.0 0 0 ? S< 05:49 0:00 [lru-add-drain]
root 11 0.0 0.0 0 0 ? S 05:49 0:00 [watchdog/0]
root 12 0.0 0.0 0 0 ? S 05:49 0:00 [cpuhp/0]
root 13 0.0 0.0 0 0 ? S 05:49 0:00 [cpuhp/1]
root 14 0.0 0.0 0 0 ? S 05:49 0:00 [watchdog/1]
root 15 0.0 0.0 0 0 ? S 05:49 0:00 [migration/1]
root 16 0.0 0.0 0 0 ? S 05:49 0:00 [ksoftirqd/1]
root 18 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/1:0H]
root 19 0.0 0.0 0 0 ? S 05:49 0:00 [cpuhp/2]
root 20 0.0 0.0 0 0 ? S 05:49 0:00 [watchdog/2]
root 21 0.0 0.0 0 0 ? S 05:49 0:00 [migration/2]
root 22 0.0 0.0 0 0 ? S 05:49 0:00 [ksoftirqd/2]
root 24 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/2:0H]
root 25 0.0 0.0 0 0 ? S 05:49 0:00 [cpuhp/3]
root 26 0.0 0.0 0 0 ? S 05:49 0:00 [watchdog/3]
root 27 0.0 0.0 0 0 ? S 05:49 0:00 [migration/3]
root 28 0.0 0.0 0 0 ? S 05:49 0:00 [ksoftirqd/3]
root 30 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/3:0H]
root 31 0.0 0.0 0 0 ? S 05:49 0:00 [kdevtmpfs]
root 32 0.0 0.0 0 0 ? S< 05:49 0:00 [netns]
root 33 0.0 0.0 0 0 ? S 05:49 0:00 [khungtaskd]
root 34 0.0 0.0 0 0 ? S 05:49 0:00 [oom_reaper]
root 35 0.0 0.0 0 0 ? S< 05:49 0:00 [writeback]
root 36 0.0 0.0 0 0 ? S 05:49 0:00 [kcompactd0]
root 38 0.0 0.0 0 0 ? SN 05:49 0:00 [ksmd]
root 39 0.0 0.0 0 0 ? SN 05:49 0:00 [khugepaged]
root 40 0.0 0.0 0 0 ? S< 05:49 0:00 [crypto]
root 41 0.0 0.0 0 0 ? S< 05:49 0:00 [kintegrityd]
root 42 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 43 0.0 0.0 0 0 ? S< 05:49 0:00 [kblockd]
root 47 0.0 0.0 0 0 ? S< 05:49 0:00 [devfreq_wq]
root 48 0.0 0.0 0 0 ? S< 05:49 0:00 [watchdogd]
root 49 0.0 0.0 0 0 ? S 05:49 0:00 [kswapd0]
root 50 0.0 0.0 0 0 ? S< 05:49 0:00 [vmstat]
root 62 0.0 0.0 0 0 ? S< 05:49 0:00 [kthrotld]
root 63 0.0 0.0 0 0 ? S< 05:49 0:00 [ipv6_addrconf]
root 109 0.0 0.0 0 0 ? S< 05:49 0:00 [acpi_thermal_pm]
root 111 0.0 0.0 0 0 ? S< 05:49 0:00 [ata_sff]
root 152 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_0]
root 153 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_0]
root 154 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_1]
root 155 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_1]
root 156 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_2]
root 157 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_2]
root 158 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_3]
root 159 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_3]
root 160 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_4]
root 161 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_4]
root 162 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_5]
root 163 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_5]
root 171 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 172 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 173 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 174 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 176 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/0:1H]
root 180 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/2:1H]
root 181 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/3:1H]
root 182 0.0 0.0 0 0 ? S< 05:49 0:00 [kworker/1:1H]
root 187 0.0 0.0 0 0 ? S< 05:49 0:00 [md]
root 209 0.0 0.0 0 0 ? S< 05:49 0:00 [raid5wq]
root 226 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 265 0.0 0.0 0 0 ? S< 05:49 0:00 [ext4-rsv-conver]
root 312 0.0 0.0 81684 10600 ? Ss 05:49 0:00 /lib/systemd/systemd-journald
root 317 0.0 0.0 0 0 ? S 05:49 0:00 [kauditd]
root 357 0.0 0.0 0 0 ? S 05:49 0:00 [nvidia-modeset]
root 360 0.0 0.0 46012 5316 ? Ss 05:49 0:00 /lib/systemd/systemd-udevd
root 394 0.0 0.0 0 0 ? S< 05:49 0:00 [edac-poller]
root 401 0.0 0.0 0 0 ? S 05:49 0:00 [irq/31-mei_me]
systemd+ 444 0.0 0.0 147096 5156 ? Ssl 05:49 0:00 /lib/systemd/systemd-timesyncd
root 578 0.0 0.0 0 0 ? S 05:49 0:00 [scsi_eh_6]
root 579 0.0 0.0 0 0 ? S< 05:49 0:00 [scsi_tmf_6]
root 580 0.0 0.0 0 0 ? S 05:49 0:00 [usb-storage]
root 599 0.0 0.0 0 0 ? S< 05:49 0:00 [bioset]
root 645 0.0 0.0 29624 2928 ? Ss 05:49 0:00 /usr/sbin/cron -f
root 646 0.0 0.0 427140 9064 ? Ssl 05:49 0:00 /usr/sbin/ModemManager
root 649 0.0 0.0 25336 4608 ? Ss 05:49 0:00 /usr/sbin/smartd -n
root 654 0.0 0.0 116156 3388 ? Ssl 05:49 0:00 /usr/sbin/irqbalance --foreground
root 655 0.0 0.0 275496 4004 ? Ssl 05:49 0:00 /usr/sbin/rsyslogd -n
message+ 656 0.0 0.0 48772 5356 ? Ss 05:49 0:03 /usr/bin/dbus-daemon --system --address=systemd: --nofork --no
root 662 0.0 0.0 50912 2976 ? S 05:49 0:00 /usr/sbin/CRON -f
root 667 0.0 0.0 50912 2960 ? S 05:49 0:00 /usr/sbin/CRON -f
root 679 0.0 0.1 454332 15496 ? Ssl 05:49 0:00 /usr/sbin/NetworkManager --no-daemon
root 681 0.0 0.0 65236 5724 ? Ss 05:49 0:00 /lib/systemd/systemd-logind
root 682 0.0 0.0 285712 6608 ? Ssl 05:49 0:00 /usr/lib/accountsservice/accounts-daemon
rtkit 684 0.0 0.0 187832 3024 ? SNsl 05:49 0:00 /usr/lib/rtkit/rtkit-daemon
root 699 0.0 0.0 290228 8484 ? Ssl 05:49 0:00 /usr/lib/policykit-1/polkitd --no-debug
root 712 0.0 0.0 4312 760 ? Ss 05:49 0:00 /bin/sh -c python /root/ArmsCommander/passwordattacks/autostar
root 725 0.0 0.0 4312 740 ? Ss 05:49 0:00 /bin/sh -c /bin/sh /usr/local/bin/IDS.sh
root 726 0.0 0.0 32088 9892 ? S 05:49 0:00 python /root/ArmsCommander/passwordattacks/autostart_password_
root 732 0.0 0.0 4312 772 ? S 05:49 0:00 /bin/sh /usr/local/bin/IDS.sh
root 775 0.0 0.0 8500 1564 ? Ss 05:49 0:00 nvidia-persistenced --persistence-mode
root 778 0.7 0.0 0 0 ? S 05:49 3:16 [irq/33-nvidia]
root 779 0.0 0.0 0 0 ? S 05:49 0:00 [nvidia]
pulse 825 0.0 0.0 357068 11264 ? S<l 05:49 0:00 pulseaudio -D --system
root 836 0.0 0.0 19872 9060 ? Ss 05:49 0:00 /sbin/mount.ntfs /dev/sda2 /mnt/Data2 -o rw
root 840 0.0 0.1 190896 14508 ? S 05:49 0:06 /usr/bin/python -O /usr/share/wicd/daemon/wicd-daemon.py --kee
root 842 0.0 0.0 13300 2648 ? Ss 05:49 0:00 /sbin/mount.ntfs /dev/sdb2 /mnt/Data3 -o rw
root 858 0.0 0.1 111348 17848 ? S 05:49 0:02 /usr/bin/python -O /usr/share/wicd/daemon/monitor.py
root 900 0.0 0.0 13052 2388 ? Ss 05:49 0:00 /sbin/mount.ntfs /dev/sdc1 /mnt/Data4 -o rw
root 909 0.0 0.0 0 0 ? S< 05:49 0:00 [iprt-VBoxWQueue]
root 921 0.0 0.0 0 0 ? S 05:49 0:00 [iprt-VBoxTscThr]
root 942 0.0 0.0 20484 1040 ? Ss 05:49 0:00 dhclient eth0
root 951 0.0 0.0 71996 5564 ? Ss 05:49 0:00 /usr/sbin/sshd -D
root 962 0.0 0.0 371556 7600 ? Ssl 05:49 0:00 /usr/sbin/gdm3
root 984 0.0 0.0 243928 7564 ? Sl 05:49 0:00 gdm-session-worker [pam/gdm-launch-environment]
Debian-+ 1019 0.0 0.0 80020 7988 ? Ss 05:49 0:00 /lib/systemd/systemd --user
Debian-+ 1020 0.0 0.0 102172 2384 ? S 05:49 0:00 (sd-pam)
root 1039 0.0 0.0 0 0 ? S 05:49 0:00 [UVM global queu]
root 1041 0.0 0.0 0 0 ? S 05:49 0:00 [UVM Tools Event]
Debian-+ 1062 0.0 0.0 203236 5324 tty1 Ssl+ 05:49 0:00 /usr/lib/gdm3/gdm-x-session gnome-session --autostart /usr/sha
root 1073 0.0 0.4 267228 49324 tty1 Sl+ 05:49 0:01 /usr/lib/xorg/Xorg vt1 -displayfd 3 -auth /run/user/132/gdm/Xa
Debian-+ 1175 0.0 0.0 47336 4032 ? Ss 05:49 0:00 /usr/bin/dbus-daemon --session --address=systemd: --nofork --n
Debian-+ 1177 0.0 0.1 553208 12440 tty1 Sl+ 05:49 0:00 /usr/lib/gnome-session/gnome-session-binary --autostart /usr/s
Debian-+ 1183 0.0 0.0 355200 6228 ? Ssl 05:49 0:00 /usr/lib/at-spi2-core/at-spi-bus-launcher
Debian-+ 1188 0.0 0.0 47116 3488 ? S 05:49 0:00 /usr/bin/dbus-daemon --config-file=/usr/share/defaults/at-spi2
Debian-+ 1191 0.0 0.0 222348 5312 ? Sl 05:49 0:00 /usr/lib/at-spi2-core/at-spi2-registryd --use-gnome-session
Debian-+ 1200 0.0 1.3 2274720 159764 tty1 Sl+ 05:49 0:03 /usr/bin/gnome-shell
root 1204 0.0 0.0 313532 8516 ? Ssl 05:49 0:00 /usr/lib/upower/upowerd
Debian-+ 1236 0.0 0.0 1229632 11912 ? Ssl 05:49 0:00 /usr/bin/pulseaudio --daemonize=no
root 1248 0.0 0.2 444476 31020 ? Ssl 05:50 0:02 /usr/lib/packagekit/packagekitd
Debian-+ 1249 0.0 0.2 1025836 29760 tty1 Sl+ 05:50 0:00 /usr/lib/gnome-settings-daemon/gnome-settings-daemon
root 1263 0.0 0.0 48328 5132 ? Ss 05:50 0:00 /sbin/wpa_supplicant -u -s -O /run/wpa_supplicant
colord 1269 0.0 0.1 317520 13508 ? Ssl 05:50 0:00 /usr/lib/colord/colord
root 1400 0.0 0.0 4312 1648 ? S 05:54 0:00 /bin/sh /root/Desktop/external_scans_njp_dnac/edit_2__bash_scr
root 1401 0.0 0.0 4312 1632 ? S 05:54 0:00 /bin/sh /root/Desktop/external_scans_njp_dnac/nmap_ssl_detecti
root 1434 0.0 1.4 594104 179572 ? Sl 05:54 0:03 snort -q -A full -c /etc/snort/snort.conf
postgres 1508 0.0 0.1 276848 24164 ? S 05:54 0:00 /usr/lib/postgresql/9.5/bin/postgres -D /var/lib/postgresql/9.
postgres 1511 0.0 0.2 293676 25356 ? S 05:54 0:00 /usr/lib/postgresql/9.6/bin/postgres -D /var/lib/postgresql/9.
postgres 1523 0.0 0.0 293676 3976 ? Ss 05:54 0:00 postgres: 9.6/main: checkpointer process
postgres 1524 0.0 0.0 293676 3976 ? Ss 05:54 0:00 postgres: 9.6/main: writer process
postgres 1525 0.0 0.0 293676 3976 ? Ss 05:54 0:00 postgres: 9.6/main: wal writer process
postgres 1526 0.0 0.0 294104 6396 ? Ss 05:54 0:00 postgres: 9.6/main: autovacuum launcher process
postgres 1527 0.0 0.0 148676 3172 ? Ss 05:54 0:00 postgres: 9.6/main: stats collector process
postgres 1534 0.0 0.0 276948 10940 ? Ss 05:54 0:00 postgres: 9.5/main: checkpointer process
postgres 1535 0.0 0.0 276848 6196 ? Ss 05:54 0:00 postgres: 9.5/main: writer process
postgres 1536 0.0 0.0 276848 9544 ? Ss 05:54 0:00 postgres: 9.5/main: wal writer process
postgres 1537 0.0 0.0 277280 6780 ? Ss 05:54 0:00 postgres: 9.5/main: autovacuum launcher process
postgres 1538 0.0 0.0 132004 4860 ? Ss 05:54 0:00 postgres: 9.5/main: stats collector process
root 4164 0.0 0.0 248052 7768 ? Sl 05:59 0:00 gdm-session-worker [pam/gdm-password]
root 4169 0.0 0.0 71576 7748 ? Ss 05:59 0:00 /lib/systemd/systemd --user
root 4170 0.0 0.0 249636 2424 ? S 05:59 0:00 (sd-pam)
root 4177 0.0 0.0 287764 7876 ? Sl 05:59 0:00 /usr/bin/gnome-keyring-daemon --daemonize --login
root 4181 0.0 0.0 203236 5464 tty2 Ssl+ 05:59 0:00 /usr/lib/gdm3/gdm-x-session --run-script default
root 4183 0.6 0.6 315908 73964 tty2 Sl+ 05:59 2:49 /usr/lib/xorg/Xorg vt2 -displayfd 3 -auth /run/user/0/gdm/Xaut
root 4187 0.0 0.0 48112 5004 ? Ss 05:59 0:00 /usr/bin/dbus-daemon --session --address=systemd: --nofork --n
root 4189 0.0 0.1 774704 13264 tty2 Sl+ 05:59 0:00 /usr/lib/gnome-session/gnome-session-binary
root 4244 0.0 0.0 11100 332 ? Ss 05:59 0:00 /usr/bin/ssh-agent x-session-manager
root 4251 0.0 0.0 355208 6404 ? Ssl 05:59 0:00 /usr/lib/at-spi2-core/at-spi-bus-launcher
root 4256 0.0 0.0 47240 3908 ? S 05:59 0:00 /usr/bin/dbus-daemon --config-file=/usr/share/defaults/at-spi2
root 4259 0.0 0.0 222348 6844 ? Sl 05:59 0:02 /usr/lib/at-spi2-core/at-spi2-registryd --use-gnome-session
root 4276 1.2 4.2 3012136 523172 tty2 Sl+ 05:59 5:04 /usr/bin/gnome-shell
root 4279 0.0 0.0 285212 6796 ? Ssl 05:59 0:00 /usr/lib/gvfs/gvfsd
root 4284 0.0 0.0 417780 5472 ? Sl 05:59 0:00 /usr/lib/gvfs/gvfsd-fuse /run/user/0/gvfs -f -o big_writes
root 4294 0.0 0.1 2284756 13428 ? S<sl 05:59 0:01 /usr/bin/pulseaudio --daemonize=no
root 4301 0.0 0.1 615016 15260 ? Sl 05:59 0:00 /usr/lib/gnome-shell/gnome-shell-calendar-server
root 4312 0.0 0.1 1229040 21172 ? Ssl 05:59 0:00 /usr/lib/evolution/evolution-source-registry
root 4317 0.0 0.0 468280 11700 ? Ssl 05:59 0:00 /usr/lib/telepathy/mission-control-5
root 4322 0.0 0.2 769136 33164 ? Sl 05:59 0:00 /usr/lib/gnome-online-accounts/goa-daemon
root 4324 0.0 0.0 359628 11140 ? Ssl 05:59 0:00 /usr/lib/gvfs/gvfs-udisks2-volume-monitor
root 4328 0.0 0.0 380244 8420 ? Ssl 05:59 0:06 /usr/lib/udisks2/udisksd --no-debug
root 4335 0.0 0.0 269568 5936 ? Ssl 05:59 0:00 /usr/lib/gvfs/gvfs-goa-volume-monitor
root 4348 0.0 0.0 370416 7452 ? Sl 05:59 0:00 /usr/lib/gnome-online-accounts/goa-identity-service
root 4357 0.0 0.0 271368 5204 ? Ssl 05:59 0:00 /usr/lib/gvfs/gvfs-mtp-volume-monitor
root 4361 0.0 0.0 374324 7344 ? Ssl 05:59 0:00 /usr/lib/gvfs/gvfs-afc-volume-monitor
root 4366 0.0 0.0 283740 6240 ? Ssl 05:59 0:00 /usr/lib/gvfs/gvfs-gphoto2-volume-monitor
root 4372 0.0 0.3 1501824 40612 tty2 Sl+ 05:59 0:01 /usr/lib/gnome-settings-daemon/gnome-settings-daemon
root 4383 0.0 0.0 187504 4868 ? Sl 05:59 0:00 /usr/lib/dconf/dconf-service
root 4402 0.0 0.0 442468 11988 tty2 SNl+ 05:59 0:00 /usr/lib/tracker/tracker-miner-apps
root 4406 0.0 0.0 503548 11800 tty2 Sl+ 05:59 0:00 /usr/lib/gnome-settings-daemon/gsd-printer
root 4407 0.0 0.2 896700 25800 tty2 SNl+ 05:59 0:01 /usr/lib/tracker/tracker-extract
root 4413 0.0 0.2 697632 34676 tty2 Sl+ 05:59 0:08 psensor
root 4420 0.0 0.0 426144 8036 ? Ssl 05:59 0:00 /usr/bin/zeitgeist-daemon
root 4428 0.0 0.1 441384 18492 ? Sl 05:59 0:00 zeitgeist-datahub
root 4432 0.0 0.0 341280 12172 tty2 SNl+ 05:59 0:00 /usr/lib/tracker/tracker-miner-user-guides
root 4433 0.0 0.8 936708 109568 tty2 Sl+ 05:59 0:02 /usr/bin/gnome-software --gapplication-service
root 4434 0.0 0.3 398160 38368 tty2 Sl+ 05:59 0:00 /usr/bin/python -O /usr/share/wicd/gtk/wicd-client.py --tray
root 4435 0.0 0.1 329128 16240 ? Ssl 05:59 0:00 /usr/lib/zeitgeist/zeitgeist/zeitgeist-fts
root 4448 0.0 0.3 1252228 41124 tty2 Sl+ 05:59 0:01 nautilus-desktop
root 4455 0.0 0.2 838396 32404 ? Ssl 05:59 0:00 /usr/lib/evolution/evolution-calendar-factory
root 4469 0.0 0.5 472696 62988 ? Ssl 05:59 0:00 /usr/lib/tracker/tracker-store
root 4490 0.0 0.0 361372 6760 ? Sl 06:00 0:00 /usr/lib/gvfs/gvfsd-trash --spawner :1.17 /org/gtk/gvfs/exec_s
root 4528 0.0 0.1 874164 24140 ? Sl 06:00 0:00 /usr/lib/evolution/evolution-calendar-factory-subprocess --fac
root 4538 0.0 0.1 711224 20136 ? Sl 06:00 0:00 /usr/lib/evolution/evolution-calendar-factory-subprocess --fac
root 4543 0.0 0.1 707516 22508 ? Ssl 06:00 0:00 /usr/lib/evolution/evolution-addressbook-factory
root 4556 0.0 0.1 847816 22520 ? Sl 06:00 0:00 /usr/lib/evolution/evolution-addressbook-factory-subprocess --
root 4592 0.0 0.0 195860 5904 ? Ssl 06:00 0:00 /usr/lib/gvfs/gvfsd-metadata
root 4706 0.0 0.0 73248 5680 ? S 06:00 0:00 /usr/lib/x86_64-linux-gnu/gconf/gconfd-2
root 12661 0.0 0.4 97744 50384 ? S 09:21 0:03 nmap -sV --version-all -sS -sU -T4 -A -v -PE -PP -PS80,443 -PA
root 14894 0.0 0.0 4312 756 ? S 10:05 0:00 /bin/sh /root/Desktop/external_scans_njp_dnac/edit_2__bash_scr
root 14895 0.0 0.0 4312 760 ? S 10:05 0:00 /bin/sh /root/Desktop/external_scans_njp_dnac/nmap_ssl_detecti
root 14925 0.0 0.0 0 0 ? S 10:05 0:00 [kworker/0:1]
debian-+ 16497 0.0 0.3 95956 46616 ? Ss 10:06 0:03 /usr/bin/tor --defaults-torrc /usr/share/tor/tor-service-defau
root 17557 0.0 0.4 98292 51092 ? S 10:11 0:03 nmap -sV --version-all -sS -sU -T4 -A -v -PE -PP -PS80,443 -PA
root 17606 0.0 0.0 0 0 ? S 10:12 0:00 [kworker/2:1]
root 18365 0.0 0.0 20484 4344 ? S 10:14 0:00 /sbin/dhclient -d -q -sf /usr/lib/NetworkManager/nm-dhcp-helpe
root 18366 0.0 0.0 20488 4492 ? S 10:14 0:00 /sbin/dhclient -d -q -6 -N -sf /usr/lib/NetworkManager/nm-dhcp
root 18649 0.0 0.4 98528 51184 ? S 10:16 0:02 nmap -sV --version-all -sS -sU -T4 -A -v -PE -PP -PS80,443 -PA
root 18927 0.0 0.0 0 0 ? S 10:21 0:00 [kworker/2:0]
root 20617 0.0 0.0 355524 10588 ? Sl 10:58 0:00 /usr/lib/gvfs/gvfsd-http --spawner :1.17 /org/gtk/gvfs/exec_sp
root 21636 0.1 0.3 674912 45324 ? Rsl 11:16 0:06 /usr/lib/gnome-terminal/gnome-terminal-server
root 21685 0.0 0.0 19904 3748 pts/0 Ss 11:16 0:00 bash
root 21731 0.0 0.0 11196 1852 ? S 11:17 0:00 /bin/bash /usr/bin/atom automated_routersploit.py
root 21733 0.7 1.8 1701044 229200 ? Sl 11:17 0:46 /usr/share/atom/atom --executed-from=/root/Desktop/projects --
root 21735 0.0 0.2 386384 31508 ? S 11:17 0:00 /usr/share/atom/atom --type=zygote --no-sandbox
root 21752 0.7 1.2 618452 157900 ? Sl 11:17 0:45 /usr/share/atom/atom --type=gpu-process --channel=21733.0.1796
root 21766 4.5 2.9 2677712 358404 ? SLl 11:17 4:26 /usr/share/atom/atom --type=renderer --no-sandbox --primordial
root 21798 0.0 1.4 1037400 175488 ? Sl 11:17 0:00 /usr/share/atom/atom --eval CompileCache = require('/usr/share
root 21860 0.0 1.5 1125292 191380 ? Sl 11:17 0:01 /usr/share/atom/atom --type=renderer --no-sandbox --primordial
root 22657 0.0 0.4 98168 51076 ? S 11:38 0:02 nmap -sV --version-all -sS -sU -T4 -A -v -PE -PP -PS80,443 -PA
root 22727 0.9 1.3 1300788 170308 tty2 SLl+ 11:38 0:45 /usr/lib/x86_64-linux-gnu/opera/opera
root 22732 0.0 0.0 6372 764 tty2 S+ 11:38 0:00 /usr/lib/x86_64-linux-gnu/opera/opera_sandbox /usr/lib/x86_64-
root 22733 0.0 0.2 414796 28468 tty2 S+ 11:38 0:00 /usr/lib/x86_64-linux-gnu/opera/opera --type=zygote
root 22735 0.0 0.0 414796 6836 tty2 S+ 11:38 0:00 /usr/lib/x86_64-linux-gnu/opera/opera --type=zygote
root 22770 0.3 1.5 631396 184436 tty2 Sl+ 11:38 0:14 /usr/lib/x86_64-linux-gnu/opera/opera --type=gpu-process --fie
root 22815 0.0 0.1 449624 16872 tty2 S+ 11:38 0:00 /usr/lib/x86_64-linux-gnu/opera/opera --type=gpu-broker
root 22837 0.0 0.8 903308 99284 tty2 Sl+ 11:38 0:02 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 22869 0.4 1.5 1030196 188436 tty2 Sl+ 11:38 0:22 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 22874 0.0 0.8 867616 108284 tty2 Sl+ 11:38 0:01 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 22958 0.3 0.5 793020 70780 tty2 Sl+ 11:39 0:14 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 22981 0.0 0.8 897684 106020 tty2 Sl+ 11:39 0:01 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 23709 0.0 0.0 0 0 ? S 12:04 0:00 [kworker/0:2]
root 23710 0.0 0.0 0 0 ? S 12:04 0:01 [kworker/1:1]
root 23801 0.0 0.0 0 0 ? S 12:04 0:00 [kworker/3:1]
root 25827 0.0 0.2 152444 30960 pts/0 Sl+ 12:30 0:00 python rsf.py
root 25835 0.0 0.0 19904 3744 pts/2 Ss 12:30 0:00 bash
root 25838 0.1 0.2 78772 31192 pts/2 T 12:30 0:02 python
root 25883 0.3 0.0 0 0 ? S 12:32 0:04 [kworker/1:3]
root 26038 0.0 0.0 0 0 ? S 12:35 0:00 [kworker/u8:2]
root 26117 2.5 0.2 78772 31284 pts/2 T 12:38 0:24 python
root 26142 0.0 0.9 850144 121776 tty2 Sl+ 12:38 0:00 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 26188 0.0 0.0 19908 3740 pts/3 Ss 12:40 0:00 bash
root 26262 0.0 0.0 0 0 ? S 12:43 0:00 [kworker/u8:0]
root 26321 0.0 0.0 4312 772 ? S 12:45 0:00 sh -c /usr/local/bin/pp64.bin /root/Documents/wifi_cracking_wo
root 26322 0.5 1.5 189684 186780 ? S 12:45 0:02 /usr/local/bin/pp64.bin /root/Documents/wifi_cracking_wordlist
root 26323 39.5 2.1 14749888 266600 ? Sl 12:45 3:36 hashcat -a 0 -w 4 -m 2500 /root/ArmsCommander/logs/HashCat/has
root 26324 0.0 0.0 0 0 ? S 12:45 0:00 [UVM GPU1 BH]
root 26361 0.0 0.0 0 0 ? S 12:46 0:00 [kworker/3:0]
root 26427 0.2 0.9 913408 111736 tty2 Sl+ 12:48 0:00 /usr/lib/x86_64-linux-gnu/opera/opera --type=renderer --field-
root 26509 0.0 0.2 78516 30724 pts/2 S+ 12:49 0:00 python
root 26631 0.0 0.0 0 0 ? S 12:51 0:00 [kworker/3:2]
root 26773 0.0 0.0 40320 3264 pts/3 R+ 12:54 0:00 ps aux
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# cd /root/Documents/routersploit/
root@CRACK_COCAINE:~/Documents/routersploit# ls
CONTRIBUTING.md LICENSE README.md requirements.txt routersploit.log tests
Dockerfile Makefile requirements-dev.txt routersploit rsf.py
root@CRACK_COCAINE:~/Documents/routersploit# cat routersploit.log
2017-12-13 10:58:29,120 ERROR routersploit.exceptions Error during loading 'routersploit/modules/scanners/autopwn'
Error: 'module' object has no attribute 'optionsParser'
It should be valid path to the module. Use <tab> key multiple times for completion.
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/utils/__init__.py", line 66, in import_exploit
module = importlib.import_module(path)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/root/Documents/routersploit/routersploit/modules/scanners/autopwn.py", line 103, in <module>
main()
File "/root/Documents/routersploit/routersploit/modules/scanners/autopwn.py", line 92, in main
parser = optparse.optionsParser('usage %parser -r <manual control> -a full auto')
AttributeError: 'module' object has no attribute 'optionsParser'
2017-12-13 10:59:03,109 ERROR routersploit.exceptions Unknown command: 'options'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_options'
2017-12-13 10:59:45,547 ERROR routersploit.exceptions Unknown command: 'shpw'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shpw'
2017-12-13 11:07:26,917 ERROR routersploit.exceptions Unknown command: 'jobs'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_jobs'
2017-12-13 12:43:21,542 ERROR routersploit.exceptions Unknown command: 'hrelep'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_hrelep'
2017-12-13 12:54:46,998 ERROR routersploit.exceptions Unknown command: 'shell'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shell'
2017-12-13 12:54:51,933 ERROR routersploit.exceptions Unknown command: 'shell('')'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shell('')'
2017-12-13 12:55:16,471 ERROR routersploit.exceptions Unknown command: '-v'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_-v'
2017-12-13 12:55:18,348 ERROR routersploit.exceptions Unknown command: 'version'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_version'
root@CRACK_COCAINE:~/Documents/routersploit# cat routersploit.log
2017-12-13 10:58:29,120 ERROR routersploit.exceptions Error during loading 'routersploit/modules/scanners/autopwn'
Error: 'module' object has no attribute 'optionsParser'
It should be valid path to the module. Use <tab> key multiple times for completion.
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/utils/__init__.py", line 66, in import_exploit
module = importlib.import_module(path)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/root/Documents/routersploit/routersploit/modules/scanners/autopwn.py", line 103, in <module>
main()
File "/root/Documents/routersploit/routersploit/modules/scanners/autopwn.py", line 92, in main
parser = optparse.optionsParser('usage %parser -r <manual control> -a full auto')
AttributeError: 'module' object has no attribute 'optionsParser'
2017-12-13 10:59:03,109 ERROR routersploit.exceptions Unknown command: 'options'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_options'
2017-12-13 10:59:45,547 ERROR routersploit.exceptions Unknown command: 'shpw'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shpw'
2017-12-13 11:07:26,917 ERROR routersploit.exceptions Unknown command: 'jobs'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_jobs'
2017-12-13 12:43:21,542 ERROR routersploit.exceptions Unknown command: 'hrelep'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_hrelep'
2017-12-13 12:54:46,998 ERROR routersploit.exceptions Unknown command: 'shell'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shell'
2017-12-13 12:54:51,933 ERROR routersploit.exceptions Unknown command: 'shell('')'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shell('')'
2017-12-13 12:55:16,471 ERROR routersploit.exceptions Unknown command: '-v'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_-v'
2017-12-13 12:55:18,348 ERROR routersploit.exceptions Unknown command: 'version'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_version'
root@CRACK_COCAINE:~/Documents/routersploit# cat routersploit.log
2017-12-13 10:58:29,120 ERROR routersploit.exceptions Error during loading 'routersploit/modules/scanners/autopwn'
Error: 'module' object has no attribute 'optionsParser'
It should be valid path to the module. Use <tab> key multiple times for completion.
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/utils/__init__.py", line 66, in import_exploit
module = importlib.import_module(path)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/root/Documents/routersploit/routersploit/modules/scanners/autopwn.py", line 103, in <module>
main()
File "/root/Documents/routersploit/routersploit/modules/scanners/autopwn.py", line 92, in main
parser = optparse.optionsParser('usage %parser -r <manual control> -a full auto')
AttributeError: 'module' object has no attribute 'optionsParser'
2017-12-13 10:59:03,109 ERROR routersploit.exceptions Unknown command: 'options'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_options'
2017-12-13 10:59:45,547 ERROR routersploit.exceptions Unknown command: 'shpw'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shpw'
2017-12-13 11:07:26,917 ERROR routersploit.exceptions Unknown command: 'jobs'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_jobs'
2017-12-13 12:43:21,542 ERROR routersploit.exceptions Unknown command: 'hrelep'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_hrelep'
2017-12-13 12:54:46,998 ERROR routersploit.exceptions Unknown command: 'shell'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shell'
2017-12-13 12:54:51,933 ERROR routersploit.exceptions Unknown command: 'shell('')'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_shell('')'
2017-12-13 12:55:16,471 ERROR routersploit.exceptions Unknown command: '-v'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_-v'
2017-12-13 12:55:18,348 ERROR routersploit.exceptions Unknown command: 'version'
Traceback (most recent call last):
File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
command_handler = getattr(self, "command_{}".format(command))
AttributeError: 'RoutersploitInterpreter' object has no attribute 'command_version'
root@CRACK_COCAINE:~/Documents/routersploit# cat routersploit.log grep^C
root@CRACK_COCAINE:~/Documents/routersploit# egrep -irn command_handler * --color
routersploit/interpreter.py:66: def get_command_handler(self, command):
routersploit/interpreter.py:70: :return: command_handler
routersploit/interpreter.py:73: command_handler = getattr(self, "command_{}".format(command))
routersploit/interpreter.py:77: return command_handler
routersploit/interpreter.py:89: command_handler = self.get_command_handler(command)
routersploit/interpreter.py:90: command_handler(args)
Binary file routersploit/interpreter.pyc matches
routersploit.log:18: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:19: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:23: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:24: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:28: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:29: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:33: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:34: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:38: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:39: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:43: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:44: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:48: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:49: command_handler = getattr(self, "command_{}".format(command))
routersploit.log:53: File "/root/Documents/routersploit/routersploit/interpreter.py", line 73, in get_command_handler
routersploit.log:54: command_handler = getattr(self, "command_{}".format(command))
root@CRACK_COCAINE:~/Documents/routersploit# cd routersploit
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# atom interpreter.py
root@CRACK_COCAINE:~/Documents/routersploit/routersploit# cd -
/root/Documents/routersploit
root@CRACK_COCAINE:~/Documents/routersploit# python
Python 2.7.14 (default, Sep 17 2017, 18:50:44)
[GCC 7.2.0] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import routersploit
>>> from routersploit import *
>>> import interpreter
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named interpreter
>>> from routersploit import interpreter
>>> interpreter.get_command_handler(self, 'show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'get_command_handler'
>>> interpreter.get_command_handler(self, 'show')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'get_command_handler'
>>> interpreter.BaseInterpreter.get_command_handler(self, 'show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> interpreter.BaseInterpreter(self).get_command_handler(self, 'show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> interpreter.BaseInterpreter(object).get_command_handler(self, 'show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter(__init__).get_command_handler(self, 'show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__init__' is not defined
>>> interpreter.BaseInterpreter(object).get_command_handler(self, 'show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter(object).get_command_handler('show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter(object).get_command_handler('show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter(object).__init__
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> dir(interpreter.BaseInterpreter)
['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', 'commands', 'complete', 'default_completer', 'get_command_handler', 'global_help', 'history_file', 'history_length', 'parse_line', 'prompt', 'raw_command_completer', 'setup', 'start', 'suggested_commands']
>>> dir(interpreter.BaseInterpreter.commands)
['__call__', '__class__', '__cmp__', '__delattr__', '__doc__', '__format__', '__func__', '__get__', '__getattribute__', '__hash__', '__init__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__self__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'im_class', 'im_func', 'im_self']
>>> dir(interpreter.BaseInterpreter.get_command_handler
... show payloads
File "<stdin>", line 2
show payloads
^
SyntaxError: invalid syntax
>>> dir(interpreter.BaseInterpreter.get_command_handler('show payloads'))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method get_command_handler() must be called with BaseInterpreter instance as first argument (got str instance instead)
>>> BaseInterpreter()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> help(BaseInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> BaseInterpreter(__init__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> import BaseInterpreter
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named BaseInterpreter
>>> import BaseInterpreter~
KeyboardInterrupt
>>> dir(BaseInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> dir(interpreter.BaseInterpreter)
['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', 'commands', 'complete', 'default_completer', 'get_command_handler', 'global_help', 'history_file', 'history_length', 'parse_line', 'prompt', 'raw_command_completer', 'setup', 'start', 'suggested_commands']
>>> dir(interpreter.suggeseted_commands)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'suggeseted_commands'
>>> dir(interpreter.suggeseted_commands)
KeyboardInterrupt
>>> vars
<built-in function vars>
>>> vars()
{'exploits': <module 'routersploit.exploits' from 'routersploit/exploits.pyc'>, 'mute': <function mute at 0x7fb92ad31398>, 'utils': <module 'routersploit.utils' from 'routersploit/utils/__init__.pyc'>, 'print_status': <function print_status at 0x7fb92ad315f0>, 'print_error': <function print_error at 0x7fb92ad31578>, 'printer': <module 'routersploit.printer' from 'routersploit/printer.pyc'>, 'validators': <module 'routersploit.validators' from 'routersploit/validators.pyc'>, 'interpreter': <module 'routersploit.interpreter' from 'routersploit/interpreter.pyc'>, 'print_table': <function print_table at 0x7fb92ad31b18>, 'sanitize_url': <function sanitize_url at 0x7fb92ad31b90>, 'wordlists': <module 'routersploit.wordlists' from 'routersploit/wordlists/__init__.pyc'>, '__package__': None, 'payloads': <module 'routersploit.payloads' from 'routersploit/payloads.pyc'>, 'tokenize': <function tokenize at 0x7fb92ad31f50>, '__doc__': None, 'http_request': <function http_request at 0x7fb92ad31cf8>, 'shell': <function shell at 0x7fb92accf398>, 'ssh_interactive': <function ssh_interactive at 0x7fb92ad31de8>, '__builtins__': <module '__builtin__' (built-in)>, 'boolify': <function boolify at 0x7fb92ad31d70>, 'LockedIterator': <class 'routersploit.utils.LockedIterator'>, 'multi': <function multi at 0x7fb92ad31488>, '__name__': '__main__', 'modules': <module 'routersploit.modules' from 'routersploit/modules/__init__.pyc'>, 'routersploit': <module 'routersploit' from 'routersploit/__init__.pyc'>, 'index_modules': <function index_modules at 0x7fb92ad15c80>, 'print_success': <function print_success at 0x7fb92ad31668>, 'print_info': <function print_info at 0x7fb92ad316e0>, 'exceptions': <module 'routersploit.exceptions' from 'routersploit/exceptions.pyc'>, 'random_text': <function random_text at 0x7fb92ad31c80>}
>>> dir(interpreter.prompt)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'prompt'
>>> help(interpreter.prompt)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'prompt'
>>> prompt
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'prompt' is not defined
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'tokenize', 'utils', 'validators', 'wordlists']
>>> dir(multi)
['__call__', '__class__', '__closure__', '__code__', '__defaults__', '__delattr__', '__dict__', '__doc__', '__format__', '__get__', '__getattribute__', '__globals__', '__hash__', '__init__', '__module__', '__name__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'func_closure', 'func_code', 'func_defaults', 'func_dict', 'func_doc', 'func_globals', 'func_name']
>>> dir(_call_)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '_call_' is not defined
>>> dir(_globals_)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '_globals_' is not defined
>>> globals
<built-in function globals>
>>> globals()
{'exploits': <module 'routersploit.exploits' from 'routersploit/exploits.pyc'>, 'mute': <function mute at 0x7fb92ad31398>, 'utils': <module 'routersploit.utils' from 'routersploit/utils/__init__.pyc'>, 'print_status': <function print_status at 0x7fb92ad315f0>, 'print_error': <function print_error at 0x7fb92ad31578>, 'printer': <module 'routersploit.printer' from 'routersploit/printer.pyc'>, 'validators': <module 'routersploit.validators' from 'routersploit/validators.pyc'>, 'interpreter': <module 'routersploit.interpreter' from 'routersploit/interpreter.pyc'>, 'print_table': <function print_table at 0x7fb92ad31b18>, 'sanitize_url': <function sanitize_url at 0x7fb92ad31b90>, 'wordlists': <module 'routersploit.wordlists' from 'routersploit/wordlists/__init__.pyc'>, '__package__': None, 'payloads': <module 'routersploit.payloads' from 'routersploit/payloads.pyc'>, 'tokenize': <function tokenize at 0x7fb92ad31f50>, '__doc__': None, 'http_request': <function http_request at 0x7fb92ad31cf8>, 'shell': <function shell at 0x7fb92accf398>, 'ssh_interactive': <function ssh_interactive at 0x7fb92ad31de8>, '__builtins__': <module '__builtin__' (built-in)>, 'boolify': <function boolify at 0x7fb92ad31d70>, 'LockedIterator': <class 'routersploit.utils.LockedIterator'>, 'multi': <function multi at 0x7fb92ad31488>, '__name__': '__main__', 'modules': <module 'routersploit.modules' from 'routersploit/modules/__init__.pyc'>, 'routersploit': <module 'routersploit' from 'routersploit/__init__.pyc'>, 'index_modules': <function index_modules at 0x7fb92ad15c80>, 'print_success': <function print_success at 0x7fb92ad31668>, 'print_info': <function print_info at 0x7fb92ad316e0>, 'exceptions': <module 'routersploit.exceptions' from 'routersploit/exceptions.pyc'>, 'random_text': <function random_text at 0x7fb92ad31c80>}
>>> exec
File "<stdin>", line 1
exec
^
SyntaxError: invalid syntax
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'tokenize', 'utils', 'validators', 'wordlists']
>>> dir(interpreter)
['BaseInterpreter', 'BasePayload', 'Counter', 'Exploit', 'GLOBAL_OPTS', 'PrinterThread', 'RoutersploitException', 'RoutersploitInterpreter', '__builtins__', '__doc__', '__file__', '__name__', '__package__', 'atexit', 'itertools', 'os', 'print_function', 'printer_queue', 'readline', 'sys', 'traceback', 'utils']
>>> dir(itertools)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'itertools' is not defined
>>> dir(readline)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'readline' is not defined
>>> dir(sys)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'sys' is not defined
>>> dir(utils)
['ABCMeta', 'CREDS_DIR', 'DummyFile', 'EXPLOITS_DIR', 'LockedIterator', 'MODULES_DIR', 'NonStringIterable', 'PrintResource', 'Resource', 'RoutersploitException', 'SCANNERS_DIR', '__builtins__', '__cprint', '__doc__', '__file__', '__name__', '__package__', '__path__', 'absolute_import', 'abstractmethod', 'boolify', 'collections', 'colors', 'create_exploit', 'create_resource', 'errno', 'http_request', 'humanize_path', 'import_exploit', 'importlib', 'index_modules', 'iter_modules', 'mkdir_p', 'module_required', 'multi', 'mute', 'os', 'posix_shell', 'pprint_dict_in_order', 'print_error', 'print_function', 'print_info', 'print_lock', 'print_status', 'print_success', 'print_table', 'printer_queue', 'pythonize_path', 'random', 'random_text', 're', 'requests', 'rsf_modules', 'sanitize_url', 'select', 'socket', 'ssh_interactive', 'stop_after', 'string', 'strtobool', 'sys', 'thread_output_stream', 'threading', 'tokenize', 'windows_shell', 'wraps']
>>> dir(import_exploit)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'import_exploit' is not defined
>>> dir(ssh_interactive)
['__call__', '__class__', '__closure__', '__code__', '__defaults__', '__delattr__', '__dict__', '__doc__', '__format__', '__get__', '__getattribute__', '__globals__', '__hash__', '__init__', '__module__', '__name__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'func_closure', 'func_code', 'func_defaults', 'func_dict', 'func_doc', 'func_globals', 'func_name']
>>> dir(__dict__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> help(__dict__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> help(ssh_interactive.__dict__)
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'tokenize', 'utils', 'validators', 'wordlists']
>>> dir(utils)
['ABCMeta', 'CREDS_DIR', 'DummyFile', 'EXPLOITS_DIR', 'LockedIterator', 'MODULES_DIR', 'NonStringIterable', 'PrintResource', 'Resource', 'RoutersploitException', 'SCANNERS_DIR', '__builtins__', '__cprint', '__doc__', '__file__', '__name__', '__package__', '__path__', 'absolute_import', 'abstractmethod', 'boolify', 'collections', 'colors', 'create_exploit', 'create_resource', 'errno', 'http_request', 'humanize_path', 'import_exploit', 'importlib', 'index_modules', 'iter_modules', 'mkdir_p', 'module_required', 'multi', 'mute', 'os', 'posix_shell', 'pprint_dict_in_order', 'print_error', 'print_function', 'print_info', 'print_lock', 'print_status', 'print_success', 'print_table', 'printer_queue', 'pythonize_path', 'random', 'random_text', 're', 'requests', 'rsf_modules', 'sanitize_url', 'select', 'socket', 'ssh_interactive', 'stop_after', 'string', 'strtobool', 'sys', 'thread_output_stream', 'threading', 'tokenize', 'windows_shell', 'wraps']
>>> import absolute_import
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named absolute_import
>>> from routersploit import absolute_import
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: cannot import name absolute_import
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'tokenize', 'utils', 'validators', 'wordlists']
>>> dir(interpreter)
['BaseInterpreter', 'BasePayload', 'Counter', 'Exploit', 'GLOBAL_OPTS', 'PrinterThread', 'RoutersploitException', 'RoutersploitInterpreter', '__builtins__', '__doc__', '__file__', '__name__', '__package__', 'atexit', 'itertools', 'os', 'print_function', 'printer_queue', 'readline', 'sys', 'traceback', 'utils']
>>> dir(interpreter.command_run)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'command_run'
>>> dir(interpreter.command_run())
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'command_run'
>>> dir(interpreter.command_run('show'))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'command_run'
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'tokenize', 'utils', 'validators', 'wordlists']
>>> dir(modules)
['__author__', '__builtins__', '__doc__', '__file__', '__name__', '__package__', '__path__']
>>> dir(index_modules)
['__call__', '__class__', '__closure__', '__code__', '__defaults__', '__delattr__', '__dict__', '__doc__', '__format__', '__get__', '__getattribute__', '__globals__', '__hash__', '__init__', '__module__', '__name__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'func_closure', 'func_code', 'func_defaults', 'func_dict', 'func_doc', 'func_globals', 'func_name']
>>> dir(utils)
['ABCMeta', 'CREDS_DIR', 'DummyFile', 'EXPLOITS_DIR', 'LockedIterator', 'MODULES_DIR', 'NonStringIterable', 'PrintResource', 'Resource', 'RoutersploitException', 'SCANNERS_DIR', '__builtins__', '__cprint', '__doc__', '__file__', '__name__', '__package__', '__path__', 'absolute_import', 'abstractmethod', 'boolify', 'collections', 'colors', 'create_exploit', 'create_resource', 'errno', 'http_request', 'humanize_path', 'import_exploit', 'importlib', 'index_modules', 'iter_modules', 'mkdir_p', 'module_required', 'multi', 'mute', 'os', 'posix_shell', 'pprint_dict_in_order', 'print_error', 'print_function', 'print_info', 'print_lock', 'print_status', 'print_success', 'print_table', 'printer_queue', 'pythonize_path', 'random', 'random_text', 're', 'requests', 'rsf_modules', 'sanitize_url', 'select', 'socket', 'ssh_interactive', 'stop_after', 'string', 'strtobool', 'sys', 'thread_output_stream', 'threading', 'tokenize', 'windows_shell', 'wraps']
>>> dir(pythonize)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'pythonize' is not defined
>>> dir(pythonize_path)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'pythonize_path' is not defined
>>> help(pythonize_path)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'pythonize_path' is not defined
>>> dir(pythonize_path)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'pythonize_path' is not defined
>>> posix_shell()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'posix_shell' is not defined
>>> help(posix_shell)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'posix_shell' is not defined
>>> help(import_exploit)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'import_exploit' is not defined
>>> help(__dict__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'tokenize', 'utils', 'validators', 'wordlists']
>>> dir(modules)
['__author__', '__builtins__', '__doc__', '__file__', '__name__', '__package__', '__path__']
>>> dir(random_text)
['__call__', '__class__', '__closure__', '__code__', '__defaults__', '__delattr__', '__dict__', '__doc__', '__format__', '__get__', '__getattribute__', '__globals__', '__hash__', '__init__', '__module__', '__name__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'func_closure', 'func_code', 'func_defaults', 'func_dict', 'func_doc', 'func_globals', 'func_name']
>>> dir(utils)
['ABCMeta', 'CREDS_DIR', 'DummyFile', 'EXPLOITS_DIR', 'LockedIterator', 'MODULES_DIR', 'NonStringIterable', 'PrintResource', 'Resource', 'RoutersploitException', 'SCANNERS_DIR', '__builtins__', '__cprint', '__doc__', '__file__', '__name__', '__package__', '__path__', 'absolute_import', 'abstractmethod', 'boolify', 'collections', 'colors', 'create_exploit', 'create_resource', 'errno', 'http_request', 'humanize_path', 'import_exploit', 'importlib', 'index_modules', 'iter_modules', 'mkdir_p', 'module_required', 'multi', 'mute', 'os', 'posix_shell', 'pprint_dict_in_order', 'print_error', 'print_function', 'print_info', 'print_lock', 'print_status', 'print_success', 'print_table', 'printer_queue', 'pythonize_path', 'random', 'random_text', 're', 'requests', 'rsf_modules', 'sanitize_url', 'select', 'socket', 'ssh_interactive', 'stop_after', 'string', 'strtobool', 'sys', 'thread_output_stream', 'threading', 'tokenize', 'windows_shell', 'wraps']
>>> dir(create_exploit)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'create_exploit' is not defined
>>> help(create_exploit)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'create_exploit' is not defined
>>> help(humanize_path)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'humanize_path' is not defined
>>> dir(humanize_path)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'humanize_path' is not defined
>>> routersploit.utils.__init__
<method-wrapper '__init__' of module object at 0x7fb92ecd26a8>
>>> dir(routersploit.utils)
['ABCMeta', 'CREDS_DIR', 'DummyFile', 'EXPLOITS_DIR', 'LockedIterator', 'MODULES_DIR', 'NonStringIterable', 'PrintResource', 'Resource', 'RoutersploitException', 'SCANNERS_DIR', '__builtins__', '__cprint', '__doc__', '__file__', '__name__', '__package__', '__path__', 'absolute_import', 'abstractmethod', 'boolify', 'collections', 'colors', 'create_exploit', 'create_resource', 'errno', 'http_request', 'humanize_path', 'import_exploit', 'importlib', 'index_modules', 'iter_modules', 'mkdir_p', 'module_required', 'multi', 'mute', 'os', 'posix_shell', 'pprint_dict_in_order', 'print_error', 'print_function', 'print_info', 'print_lock', 'print_status', 'print_success', 'print_table', 'printer_queue', 'pythonize_path', 'random', 'random_text', 're', 'requests', 'rsf_modules', 'sanitize_url', 'select', 'socket', 'ssh_interactive', 'stop_after', 'string', 'strtobool', 'sys', 'thread_output_stream', 'threading', 'tokenize', 'windows_shell', 'wraps']
>>> help(string)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'string' is not defined
>>> vars()
{'exploits': <module 'routersploit.exploits' from 'routersploit/exploits.pyc'>, 'mute': <function mute at 0x7fb92ad31398>, 'utils': <module 'routersploit.utils' from 'routersploit/utils/__init__.pyc'>, 'print_status': <function print_status at 0x7fb92ad315f0>, 'print_error': <function print_error at 0x7fb92ad31578>, 'printer': <module 'routersploit.printer' from 'routersploit/printer.pyc'>, 'validators': <module 'routersploit.validators' from 'routersploit/validators.pyc'>, 'interpreter': <module 'routersploit.interpreter' from 'routersploit/interpreter.pyc'>, 'print_table': <function print_table at 0x7fb92ad31b18>, 'sanitize_url': <function sanitize_url at 0x7fb92ad31b90>, 'wordlists': <module 'routersploit.wordlists' from 'routersploit/wordlists/__init__.pyc'>, '__package__': None, 'payloads': <module 'routersploit.payloads' from 'routersploit/payloads.pyc'>, 'tokenize': <function tokenize at 0x7fb92ad31f50>, '__doc__': None, 'http_request': <function http_request at 0x7fb92ad31cf8>, 'shell': <function shell at 0x7fb92accf398>, 'ssh_interactive': <function ssh_interactive at 0x7fb92ad31de8>, '__builtins__': <module '__builtin__' (built-in)>, 'boolify': <function boolify at 0x7fb92ad31d70>, 'LockedIterator': <class 'routersploit.utils.LockedIterator'>, 'multi': <function multi at 0x7fb92ad31488>, '__name__': '__main__', 'modules': <module 'routersploit.modules' from 'routersploit/modules/__init__.pyc'>, 'routersploit': <module 'routersploit' from 'routersploit/__init__.pyc'>, 'index_modules': <function index_modules at 0x7fb92ad15c80>, 'print_success': <function print_success at 0x7fb92ad31668>, 'print_info': <function print_info at 0x7fb92ad316e0>, 'exceptions': <module 'routersploit.exceptions' from 'routersploit/exceptions.pyc'>, 'random_text': <function random_text at 0x7fb92ad31c80>}
>>> command = "show"
>>> command = "show payloads"
>>> interpreter.get_command_handler(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'get_command_handler'
>>> interpreter.start.get_command_handler(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'start'
>>> interpreter.BaseInterpreter.start.get_command_handler(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'get_command_handler'
>>> interpreter.BaseInterpreter(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter()
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ace76d0>
>>> print 0x7fb92ace76d0
140433263851216
>>> test = interpreter.BaseInterpreter(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> test = interpreter.BaseInterpreter()
>>> print test
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ace7710>
>>> test = interpreter.BaseInterpreter(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> test = interpreter.BaseInterpreter()
>>> command = use scanners/autopwn
File "<stdin>", line 1
command = use scanners/autopwn
^
SyntaxError: invalid syntax
>>> command = "use scanners/autopwn"
>>> test = interpreter.BaseInterpreter()
>>> interpreter.BaseInterpreter()
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ac99790>
>>> print
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'wordlists']
>>> printer
<module 'routersploit.printer' from 'routersploit/printer.pyc'>
>>> test = printer
>>> print test
<module 'routersploit.printer' from 'routersploit/printer.pyc'>
>>> command = "run"
>>> interpreter.BaseInterpreter()
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ac90690>
>>> print_error
<function print_error at 0x7fb92ad31578>
>>> print command
run
>>> print_info
<function print_info at 0x7fb92ad316e0>
>>> print_status
<function print_status at 0x7fb92ad315f0>
>>> print_success
<function print_success at 0x7fb92ad31668>
>>> help(__builtins__)
>>> __getattribute__
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__getattribute__' is not defined
>>> __getattribute__(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__getattribute__' is not defined
>>> __getattribute__(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__getattribute__' is not defined
>>> help(__builtins__)
>>> dict
<type 'dict'>
>>> dict
<type 'dict'>
>>> dict command
File "<stdin>", line 1
dict command
^
SyntaxError: invalid syntax
>>> doct get_command
File "<stdin>", line 1
doct get_command
^
SyntaxError: invalid syntax
>>> doct get_command()
File "<stdin>", line 1
doct get_command()
^
SyntaxError: invalid syntax
>>> locals()
{'exploits': <module 'routersploit.exploits' from 'routersploit/exploits.pyc'>, 'mute': <function mute at 0x7fb92ad31398>, 'utils': <module 'routersploit.utils' from 'routersploit/utils/__init__.pyc'>, 'print_status': <function print_status at 0x7fb92ad315f0>, 'print_error': <function print_error at 0x7fb92ad31578>, 'printer': <module 'routersploit.printer' from 'routersploit/printer.pyc'>, 'validators': <module 'routersploit.validators' from 'routersploit/validators.pyc'>, 'interpreter': <module 'routersploit.interpreter' from 'routersploit/interpreter.pyc'>, 'print_table': <function print_table at 0x7fb92ad31b18>, 'sanitize_url': <function sanitize_url at 0x7fb92ad31b90>, 'wordlists': <module 'routersploit.wordlists' from 'routersploit/wordlists/__init__.pyc'>, '__package__': None, 'payloads': <module 'routersploit.payloads' from 'routersploit/payloads.pyc'>, 'tokenize': <function tokenize at 0x7fb92ad31f50>, 'test': <module 'routersploit.printer' from 'routersploit/printer.pyc'>, 'command': 'run', '__doc__': None, 'http_request': <function http_request at 0x7fb92ad31cf8>, 'shell': <function shell at 0x7fb92accf398>, 'ssh_interactive': <function ssh_interactive at 0x7fb92ad31de8>, '__builtins__': <module '__builtin__' (built-in)>, 'boolify': <function boolify at 0x7fb92ad31d70>, 'LockedIterator': <class 'routersploit.utils.LockedIterator'>, 'multi': <function multi at 0x7fb92ad31488>, '__name__': '__main__', 'modules': <module 'routersploit.modules' from 'routersploit/modules/__init__.pyc'>, 'routersploit': <module 'routersploit' from 'routersploit/__init__.pyc'>, 'index_modules': <function index_modules at 0x7fb92ad15c80>, 'print_success': <function print_success at 0x7fb92ad31668>, 'print_info': <function print_info at 0x7fb92ad316e0>, 'exceptions': <module 'routersploit.exceptions' from 'routersploit/exceptions.pyc'>, 'random_text': <function random_text at 0x7fb92ad31c80>}
>>> BaseInterpreter(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> interpreter.BaseInterpreter(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter()
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ac99790>
>>> command = "exit"
>>> interpreter.BaseInterpreter()
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ac90690>
>>> help(__doc__)
>>> list
<type 'list'>
>>> list __builtins__
File "<stdin>", line 1
list __builtins__
^
SyntaxError: invalid syntax
>>> list main
File "<stdin>", line 1
list main
^
SyntaxError: invalid syntax
>>> list command
File "<stdin>", line 1
list command
^
SyntaxError: invalid syntax
>>> list(print_status)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'function' object is not iterable
>>> list(wordlists)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not iterable
>>> list(modules)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not iterable
>>> help(list)
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'wordlists']
>>> help(command)
>>> command(__dict__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> dict(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ValueError: dictionary update sequence element #0 has length 1; 2 is required
>>> __dict__(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> help(command)
>>> help(command)
KeyboardInterrupt
>>> command(__weakref__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__weakref__' is not defined
>>> help(command)
>>> call(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'call' is not defined
>>> command(call)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'call' is not defined
>>> command(__call__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__call__' is not defined
>>> command.call('show payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'str' object has no attribute 'call'
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'wordlists']
>>> interpreter.dir()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'dir'
>>> get.command.call
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'get' is not defined
>>> get_command.call
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'get_command' is not defined
>>> interpreter.get_command.call
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'get_command'
>>> interpreter.BaseInterpreter()
<routersploit.interpreter.BaseInterpreter object at 0x7fb92ac76510>
>>> interpreter.BaseInterpreter(dir)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter.setup()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method setup() must be called with BaseInterpreter instance as first argument (got nothing instead)
>>> interpreter.BaseInterpreter.parse_line.call("show payloads")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'call'
>>> interpreter.BaseInterpreter.parse_line("show payloads")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method parse_line() must be called with BaseInterpreter instance as first argument (got str instance instead)
>>> interpreter.BaseInterpreter("show payloads")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.BaseInterpreter(__init__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__init__' is not defined
>>> interpreter.BaseInterpreter.__init__
<unbound method BaseInterpreter.__init__>
>>> interpreter.BaseInterpreter.__init__."show payloads"
File "<stdin>", line 1
interpreter.BaseInterpreter.__init__."show payloads"
^
SyntaxError: invalid syntax
>>> interpreter.BaseInterpreter.__init__.show
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'show'
>>> interpreter.BaseInterpreter.__init__.parse_line
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'parse_line'
>>> interpreter.BaseInterpreter.parse_line
<unbound method BaseInterpreter.parse_line>
>>> interpreter.BaseInterpreter.parse_line(self,"showpayloads")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> interpreter.BaseInterpreter.get_command_handler('show')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method get_command_handler() must be called with BaseInterpreter instance as first argument (got str instance instead)
>>> interpreter.BaseInterpreter(get_command_handler('show'))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'get_command_handler' is not defined
>>> interpreter.BaseInterpreter().parse_line(self,"showpayloads")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> interpreter.BaseInterpreter().parse_line(self,"showpayloads")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> interpreter.BaseInterpreter().parse_line("showpayloads")
('showpayloads', '')
>>> interpreter.BaseInterpreter().parse_line("show payloads")
('show', 'payloads')
>>> interpreter.BaseInterpreter().parse_line("use scanners/autopwn")
('use', 'scanners/autopwn')
>>> interpreter.BaseInterpreter().parse_line("set target 192.168.1.1")
('set', 'target 192.168.1.1')
>>> interpreter.BaseInterpreter().parse_line("set target 192.168.1.1")
('set', 'target 192.168.1.1')
>>> interpreter.BaseInterpreter().parse_line("run")
('run', '')
>>> interpreter.BaseInterpreter().parse_line("show target")
('show', 'target')
>>> interpreter.BaseInterpreter().parse_line("0v")
('0v', '')
>>> interpreter.BaseInterpreter().parse_line("-v")
('-v', '')
>>> interpreter.BaseInterpreter().parse_line("exec uname")
('exec', 'uname')
>>> interpreter.BaseInterpreter().parse_line("ssh -p 666 root@70.170.54.53")
('ssh', '-p 666 root@70.170.54.53')
>>> complete_command.call
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'complete_command' is not defined
>>> complete(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'complete' is not defined
>>> complete(command,0)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'complete' is not defined
>>> complete('show',0)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'complete' is not defined
>>> interpreter.BaseInterpreter().parse_line("ssh -p 666 root@70.170.54.53")
('ssh', '-p 666 root@70.170.54.53')
>>> print command
exit
>>> print history_file
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'history_file' is not defined
>>> print RoutersploitInterpreter.history_file
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> print RoutersploitInterpreter().history_file
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> print RoutersploitInterpreter(BaseInterpreter).history_file
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> print interpreter.RoutersploitInterpreter(BaseInterpreter).history_file
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> print interpreter.RoutersploitInterpreter().history_file
/root/.rsf_history
>>> run
KeyboardInterrupt
>>> RoutersploitInterpreter()._parse_prompt('show all')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> RoutersploitInterpreter().command_use('scanners/autopwn')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> RoutersploitInterpreter().BaseInterpreter().command_use('scanners/autopwn')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> interpreter.BaseInterpreter().command_use('scanners/autopwn')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'BaseInterpreter' object has no attribute 'command_use'
>>> utils.command_use('scanners/autopwn')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'command_use'
>>> utils.stop.after.command_use('scanners/autopwn')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'stop'
>>> utils.stop_after.command_use('scanners/autopwn')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'command_use'
>>> interpreter.BaseInterpreter()._show_all()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'BaseInterpreter' object has no attribute '_show_all'
>>> interpreter.BaseInterpreter()._show_modules()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'BaseInterpreter' object has no attribute '_show_modules'
>>> interpreter.utils._show_modules()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute '_show_modules'
>>> interpreter.utils.module_required._show_modules()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_modules'
>>> interpreter.utils.module_required()._show_modules()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: module_required() takes exactly 1 argument (0 given)
>>> interpreter.utils.module_required()._show_modules('root')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: module_required() takes exactly 1 argument (0 given)
>>> interpreter.utils.module_required()._show_modules('')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: module_required() takes exactly 1 argument (0 given)
>>> interpreter.utils.module_required()._show_modules(' ')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: module_required() takes exactly 1 argument (0 given)
>>> interpreter.utils.__show_modules('scanners')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute '__show_modules'
>>> interpreter.utils().__show_modules('scanners')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not callable
>>> interpreter.__show_modules('scanners')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute '__show_modules'
>>> interpreter.@utils.module_required.__show_modules('scanners')
File "<stdin>", line 1
interpreter.@utils.module_required.__show_modules('scanners')
^
SyntaxError: invalid syntax
>>> interpreter.utils.module_required.__show_modules('scanners')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '__show_modules'
>>> interpreter.utils.module_required.__show_modules('scanners'
...
...
...
KeyboardInterrupt
>>> var = interpreter.utils.module_required.__show_modules('scanners'
...
KeyboardInterrupt
>>> var = interpreter.utils.module_required.__show_modules('scanners')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '__show_modules'
>>> self.__show_modules('scanners')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> _show_scanners()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '_show_scanners' is not defined
>>> interpreter._show_scanners()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute '_show_scanners'
>>> interpreter.utils._show_scanners()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute '_show_scanners'
>>> interpreter.utils.print_info_show_scanners()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'print_info_show_scanners'
>>> interpreter.utils.print_info._show_scanners()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_scanners'
>>> _show_scanners(self, *args, **kwargs):
File "<stdin>", line 1
_show_scanners(self, *args, **kwargs):
^
SyntaxError: invalid syntax
>>> self.__show_modules('scanners')
File "<stdin>", line 1
self.__show_modules('scanners')
^
IndentationError: unexpected indent
>>> _show_scanners(self)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '_show_scanners' is not defined
>>> _show_scanners(self,,)
File "<stdin>", line 1
_show_scanners(self,,)
^
SyntaxError: invalid syntax
>>> _show_scanners(self,'','')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '_show_scanners' is not defined
>>> utils.module_required._show_devices
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_devices'
>>> utils.module_required._show_devices()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_devices'
>>> utils.module_required._show_devices(services)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_devices'
>>> utils.module_required._show_devices('devices')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_devices'
>>> interpreter.utils.module_required._show_devices('devices')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute '_show_devices'
>>> /root/Documents/routersploit/routersploit/interpreter.pyinterpreter.utils.module_required._show_devices('devices')
KeyboardInterrupt
>>> RoutersploitInterpreter.available_modules_completion('payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> BaseInterpreter.available_modules_completion('payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> interpreter.available_modules_completion('payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'available_modules_completion'
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'wordlists']
>>> help()
Welcome to Python 2.7! This is the online help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/2.7/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
help>
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
>>> help(__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__' is not defined
>>> help(_init__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '_init__' is not defined
>>> BaseInterpreter.available_modules_completion('payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> RoutersploitInterpreter(BaseInterpreter).available_modules_completion('payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> RoutersploitInterpreter('payloads')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> help(RoutersploitInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> dir(RoutersploitInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> /root/Documents/routersploit/routersploit/interpreter.py
KeyboardInterrupt
>>> help(BaseInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> dir(BaseInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'BaseInterpreter' is not defined
>>> dir(interpreter)
['BaseInterpreter', 'BasePayload', 'Counter', 'Exploit', 'GLOBAL_OPTS', 'PrinterThread', 'RoutersploitException', 'RoutersploitInterpreter', '__builtins__', '__doc__', '__file__', '__name__', '__package__', 'atexit', 'itertools', 'os', 'print_function', 'printer_queue', 'readline', 'sys', 'traceback', 'utils']
>>> help(interpreter.RoutersploitInterpreter)
>>> interpreter.RoutersploitInterpreter.start()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method start() must be called with RoutersploitInterpreter instance as first argument (got nothing instead)
>>> interpreter.RoutersploitInterpreter.start('show')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method start() must be called with RoutersploitInterpreter instance as first argument (got str instance instead)
>>> interpreter.RoutersploitInterpreter(start)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'start' is not defined
>>> RoutersploitInterpreter(start)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> help(RoutersploitInterpreter)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> RoutersploitInterpreter.start
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> RoutersploitInterpreter().start
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> RoutersploitInterpreter().start.()
File "<stdin>", line 1
RoutersploitInterpreter().start.()
^
SyntaxError: invalid syntax
>>> RoutersploitInterpreter.start.()
File "<stdin>", line 1
RoutersploitInterpreter.start.()
^
SyntaxError: invalid syntax
>>> RoutersploitInterpreter(BaseInterpreter).start.()
File "<stdin>", line 1
RoutersploitInterpreter(BaseInterpreter).start.()
^
SyntaxError: invalid syntax
>>> global_help
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'global_help' is not defined
>>> RoutersploitInterpreterglobal_help
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreterglobal_help' is not defined
>>> RoutersploitInterpreter.global_help
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'RoutersploitInterpreter' is not defined
>>> interpreter.RoutersploitInterpreter.global_help
'Global commands:\n help Print this help menu\n use <module> Select a module for usage\n exec <shell command> <args> Execute a command in a shell\n search <search term> Search for appropriate module\n exit Exit RouterSploit'
>>> interpreter.RoutersploitInterpreter.search payloads
File "<stdin>", line 1
interpreter.RoutersploitInterpreter.search payloads
^
SyntaxError: invalid syntax
>>> interpreter.RoutersploitInterpreter.search
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: type object 'RoutersploitInterpreter' has no attribute 'search'
>>> interpreter.RoutersploitInterpreter.show
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: type object 'RoutersploitInterpreter' has no attribute 'show'
>>> interpreter.RoutersploitInterpreter.use scanners/autopwn
File "<stdin>", line 1
interpreter.RoutersploitInterpreter.use scanners/autopwn
^
SyntaxError: invalid syntax
>>> interpreter.RoutersploitInterpreter.use
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: type object 'RoutersploitInterpreter' has no attribute 'use'
>>> interpreter.RoutersploitInterpreter.global_help use
File "<stdin>", line 1
interpreter.RoutersploitInterpreter.global_help use
^
SyntaxError: invalid syntax
>>> interpreter.RoutersploitInterpreter.commands()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method commands() must be called with RoutersploitInterpreter instance as first argument (got nothing instead)
>>> interpreter.RoutersploitInterpreter.commands(self)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> interpreter.RoutersploitInterpreter(commands)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'commands' is not defined
>>> interpreter.RoutersploitInterpreter('commands')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>> interpreter.RoutersploitInterpreter()
<routersploit.interpreter.RoutersploitInterpreter object at 0x7fb92ac80250>
>>> interpreter.BaseInterpreter.commands()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method commands() must be called with BaseInterpreter instance as first argument (got nothing instead)
>>> interpreter.BaseInterpreter(commands)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'commands' is not defined
>>> interpreter.BaseInterpreter(commands.())
File "<stdin>", line 1
interpreter.BaseInterpreter(commands.())
^
SyntaxError: invalid syntax
>>> interpreter.BaseInterpreter('commands')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __init__() takes exactly 1 argument (2 given)
>>>
>>> interpreter.BaseInterpreter.init('commands')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: type object 'BaseInterpreter' has no attribute 'init'
>>> interpreter.BaseInterpreter.__init__('commands')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method __init__() must be called with BaseInterpreter instance as first argument (got str instance instead)
>>>
>>> interpreter.BaseInterpreter.commands()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method commands() must be called with BaseInterpreter instance as first argument (got nothing instead)
>>> interpreter.BaseInterpreter.commands
<unbound method BaseInterpreter.commands>
>>> interpreter.BaseInterpreter.commands
<unbound method BaseInterpreter.commands>
>>>
KeyboardInterrupt
>>>
KeyboardInterrupt
>>> var = interpreter.BaseInterpreter.commands
>>> print var
<unbound method BaseInterpreter.commands>
>>> var = interpreter.BaseInterpreter(self).commands
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> var = interpreter.BaseInterpreter(self).commands(self)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> var = interpreter.BaseInterpreter(self).commands()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> var = interpreter.BaseInterpreter().commands()
>>> var = interpreter.BaseInterpreter().commands()
>>> print var
[]
>>> uname
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'uname' is not defined
>>> interpreter.BaseInterpreter.commands
<unbound method BaseInterpreter.commands>
>>> interpreter.BaseInterpreter.commands()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: unbound method commands() must be called with BaseInterpreter instance as first argument (got nothing instead)
>>> interpreter.BaseInterpreter().commands()
[]
>>> interpreter.RoutersploitInterpreter().commands()
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> interpreter.RoutersploitInterpreter().commands('sudo su')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> interpreter.RoutersploitInterpreter().commands('show payloads')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> interpreter.RoutersploitInterpreter().commands('use scanners/autopwn')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> interpreter.RoutersploitInterpreter().commands('set target 192.168.1.1')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> interpreter.RoutersploitInterpreter().commands('set port 80')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> run
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'run' is not defined
>>> interpreter.RoutersploitInterpreter().commands('run')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> interpreter.RoutersploitInterpreter().commands('help')
['back', 'check', 'exec', 'exit', 'exploit', 'help', 'run', 'search', 'set', 'setg', 'show', 'unsetg', 'use']
>>> license
Type license() to see the full license text
>>> license()
A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations, which became
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
https://www.python.org/psf/) was formed, a non-profit organization
created specifically to own Python-related Intellectual Property.
Zope Corporation was a sponsoring member of the PSF.
All Python releases are Open Source (see http://www.opensource.org for
the Open Source Definition). Historically, most, but not all, Python
Hit Return for more, or q (and Return) to quit:
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2 and above 2.1.1 2001-now PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
Hit Return for more, or q (and Return) to quit:
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
Hit Return for more, or q (and Return) to quit:
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights
Reserved" are retained in Python alone or in any derivative version prepared by
Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
Hit Return for more, or q (and Return) to quit:
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
Hit Return for more, or q (and Return) to quit:
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
Hit Return for more, or q (and Return) to quit: q
>>> dir(class)
File "<stdin>", line 1
dir(class)
^
SyntaxError: invalid syntax
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'var', 'wordlists']
>>> help(command)
>>> help(command)
>>> help(command(__dict__))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> __dict__(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__dict__' is not defined
>>> dict(command)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ValueError: dictionary update sequence element #0 has length 1; 2 is required
>>> interpreter
<module 'routersploit.interpreter' from 'routersploit/interpreter.pyc'>
>>> printer interpreter
File "<stdin>", line 1
printer interpreter
^
SyntaxError: invalid syntax
>>> modules
<module 'routersploit.modules' from 'routersploit/modules/__init__.pyc'>
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'var', 'wordlists']
>>> utils
<module 'routersploit.utils' from 'routersploit/utils/__init__.pyc'>
>>> ()
()
>>> routersploit.utils
<module 'routersploit.utils' from 'routersploit/utils/__init__.pyc'>
>>> routersploit.utils()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not callable
>>> multi()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: multi() takes exactly 1 argument (0 given)
>>> multi.()
File "<stdin>", line 1
multi.()
^
SyntaxError: invalid syntax
>>> multi(self)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> dir(multi)
['__call__', '__class__', '__closure__', '__code__', '__defaults__', '__delattr__', '__dict__', '__doc__', '__format__', '__get__', '__getattribute__', '__globals__', '__hash__', '__init__', '__module__', '__name__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'func_closure', 'func_code', 'func_defaults', 'func_dict', 'func_doc', 'func_globals', 'func_name']
>>> globals(multi)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: globals() takes no arguments (1 given)
>>> vars(multi))
File "<stdin>", line 1
vars(multi))
^
SyntaxError: invalid syntax
>>> vars(multi)
{}
>>> __class__(multi))
File "<stdin>", line 1
__class__(multi))
^
SyntaxError: invalid syntax
>>> __list__(multi)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__list__' is not defined
>>> help(multi)
>>> __getattribute__(__hash__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__getattribute__' is not defined
>>> help(__hash__)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name '__hash__' is not defined
>>> print(hash)
<built-in function hash>
>>> hash.()
File "<stdin>", line 1
hash.()
^
SyntaxError: invalid syntax
>>> hash()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: hash() takes exactly one argument (0 given)
>>> hash(self)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'self' is not defined
>>> dir()
['LockedIterator', '__builtins__', '__doc__', '__name__', '__package__', 'boolify', 'command', 'exceptions', 'exploits', 'http_request', 'index_modules', 'interpreter', 'modules', 'multi', 'mute', 'payloads', 'print_error', 'print_info', 'print_status', 'print_success', 'print_table', 'printer', 'random_text', 'routersploit', 'sanitize_url', 'shell', 'ssh_interactive', 'test', 'tokenize', 'utils', 'validators', 'var', 'wordlists']
>>> validators()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not callable
>>> validators(dir)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not callable
>>> dir(validators)
['OptionValidationError', '__builtins__', '__doc__', '__file__', '__name__', '__package__', 'address', 'boolify', 'choice', 'convert_ip', 'convert_port', 'integer', 'ipv4', 'socket', 'strtobool', 'url', 'urlparse']
>>> exceptions()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not callable
>>> modules()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'module' object is not callable
>>> modules.()
File "<stdin>", line 1
modules.()
^
SyntaxError: invalid syntax
>>> module()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
NameError: name 'module' is not defined
>>> index_modules()
['exploits.routers.cisco.ucs_manager_rce', 'exploits.routers.cisco.video_surv_path_traversal', 'exploits.routers.cisco.ios_http_authorization_bypass', 'exploits.routers.cisco.catalyst_2960_rocem', 'exploits.routers.cisco.unified_multi_path_traversal', 'exploits.routers.cisco.firepower_management60_path_traversal', 'exploits.routers.cisco.dpc2420_info_disclosure', 'exploits.routers.cisco.secure_acs_bypass', 'exploits.routers.cisco.firepower_management60_rce', 'exploits.routers.cisco.ucm_info_disclosure', 'exploits.routers.huawei.e5331_mifi_info_disclosure', 'exploits.routers.huawei.hg530_hg520b_password_disclosure', 'exploits.routers.huawei.hg630a_default_creds', 'exploits.routers.huawei.hg866_password_change', 'exploits.routers.huawei.hg520_info_dislosure', 'exploits.routers.multi.heartbleed', 'exploits.routers.multi.misfortune_cookie', 'exploits.routers.multi.shellshock', 'exploits.routers.multi.tcp_32764_rce', 'exploits.routers.multi.tcp_32764_info_disclosure', 'exploits.routers.multi.rom0', 'exploits.routers.multi.ssh_auth_keys', 'exploits.routers.2wire.4011g_5012nv_path_traversal', 'exploits.routers.2wire.gateway_auth_bypass', 'exploits.routers.juniper.screenos_backdoor', 'exploits.routers.tplink.archer_c2_c20i_rce', 'exploits.routers.tplink.wdr740nd_wdr740n_path_traversal', 'exploits.routers.tplink.wdr740nd_wdr740n_backdoor', 'exploits.routers.tplink.wdr842nd_wdr842n_configure_disclosure', 'exploits.routers.netgear.multi_rce', 'exploits.routers.netgear.r7000_r6400_rce', 'exploits.routers.netgear.wnr500_612v3_jnr1010_2010_path_traversal', 'exploits.routers.netgear.n300_auth_bypass', 'exploits.routers.netgear.multi_password_disclosure-2017-5521', 'exploits.routers.netgear.prosafe_rce', 'exploits.routers.netgear.jnr1010_path_traversal', 'exploits.routers.netgear.dgn2200_dnslookup_cgi_rce', 'exploits.routers.netgear.dgn2200_ping_cgi_rce', 'exploits.routers.linksys.1500_2500_rce', 'exploits.routers.linksys.smartwifi_password_disclosure', 'exploits.routers.linksys.wap54gv3_rce', 'exploits.routers.linksys.wrt100_110_rce', 'exploits.routers.fortinet.fortigate_os_backdoor', 'exploits.routers.asus.infosvr_backdoor_rce', 'exploits.routers.asus.rt_n16_password_disclosure', 'exploits.routers.ipfire.ipfire_proxy_rce', 'exploits.routers.ipfire.ipfire_shellshock', 'exploits.routers.belkin.g_plus_info_disclosure', 'exploits.routers.belkin.g_n150_password_disclosure', 'exploits.routers.belkin.play_max_prce', 'exploits.routers.belkin.n150_path_traversal', 'exploits.routers.belkin.n750_rce', 'exploits.routers.belkin.auth_bypass', 'exploits.routers.bhu.bhu_urouter_rce', 'exploits.routers.dlink.dir_825_path_traversal', 'exploits.routers.dlink.dir_645_password_disclosure', 'exploits.routers.dlink.dir_300_320_600_615_info_disclosure', 'exploits.routers.dlink.dir_645_815_rce', 'exploits.routers.dlink.dsl_2640b_dns_change', 'exploits.routers.dlink.dsl_2730b_2780b_526b_dns_change', 'exploits.routers.dlink.dwr_932_info_disclosure', 'exploits.routers.dlink.dvg_n5402sp_path_traversal', 'exploits.routers.dlink.dsp_w110_rce', 'exploits.routers.dlink.dsl_2730_2750_path_traversal', 'exploits.routers.dlink.dir_300_320_615_auth_bypass', 'exploits.routers.dlink.dcs_930l_auth_rce', 'exploits.routers.dlink.dwl_3200ap_password_disclosure', 'exploits.routers.dlink.dsl_2740r_dns_change', 'exploits.routers.dlink.dwr_932b_backdoor', 'exploits.routers.dlink.dir_300_645_815_upnp_rce', 'exploits.routers.dlink.multi_hedwig_cgi_exec', 'exploits.routers.dlink.dir_815_850l_rce', 'exploits.routers.dlink.dgs_1510_add_user', 'exploits.routers.dlink.multi_hnap_rce', 'exploits.routers.dlink.dir_300_600_rce', 'exploits.routers.dlink.dns_320l_327l_rce', 'exploits.routers.dlink.dsl_2750b_info_disclosure', 'exploits.routers.3com.officeconnect_rce', 'exploits.routers.3com.imc_info_disclosure', 'exploits.routers.3com.3cradsl72_info_disclosure', 'exploits.routers.3com.officeconnect_info_disclosure', 'exploits.routers.3com.ap8760_password_disclosure', 'exploits.routers.3com.imc_path_traversal', 'exploits.routers.netsys.multi_rce', 'exploits.routers.netcore.udp_53413_rce', 'exploits.routers.thomson.twg849_info_disclosure', 'exploits.routers.thomson.twg850_password_disclosure', 'exploits.routers.billion.5200w_rce', 'exploits.routers.billion.7700nr4_password_disclosure', 'exploits.routers.movistar.adsl_router_bhs_rta_path_traversal', 'exploits.routers.zyxel.p660hn-t_v1_rce', 'exploits.routers.zyxel.p660hn-t_v2_rce', 'exploits.routers.zyxel.d1000_rce', 'exploits.routers.zyxel.d1000_wifi_password_disclosure', 'exploits.routers.zyxel.zywall_usg_extract_hashes', 'exploits.routers.ubiquiti.airos_6_x', 'exploits.routers.comtrend.ct_5361t_password_disclosure', 'exploits.routers.asmax.ar_1004g_password_disclosure', 'exploits.routers.asmax.ar_804_gu_rce', 'exploits.routers.technicolor.tg784_authbypass', 'exploits.routers.technicolor.tc7200_password_disclosure', 'exploits.routers.technicolor.dwg855_authbypass', 'exploits.routers.technicolor.tc7200_password_disclosure_v2', 'exploits.routers.zte.f660_config_disclosure', 'exploits.routers.zte.zxv10_rce', 'exploits.routers.zte.f609_config_disclosure', 'exploits.routers.zte.f460_f660_backdoor', 'exploits.routers.zte.f6xx_default_root', 'exploits.routers.shuttle.915wm_dns_change', 'exploits.cameras.videoiq.videoiq_camera_path_traversal', 'exploits.cameras.multi.netwave_IP_camera', 'exploits.cameras.multi.jvc_vanderbilt_honeywell_path_traversal', 'exploits.cameras.multi.P2P_wificam_rce', 'exploits.cameras.multi.P2P_wificam_credential_disclosure', 'exploits.cameras.honeywell.hicc_1100pt_password_disclosure', 'exploits.cameras.dlink.dcs_930l_932l_auth_bypass', 'exploits.cameras.brickcom.corp_network_cameras_conf_disclosure', 'exploits.cameras.brickcom.users_cgi_cred_disclosure', 'exploits.cameras.grandstream.gxv3611hd_ip_camera_rce', 'exploits.cameras.siemens.CVMS2025_credentials_disclosure', 'exploits.misc.asus.b1m_projector_rce', 'exploits.misc.miele.pg8528_path_traversal', 'exploits.misc.wepresent.wipg1000_rce', 'payloads.mipsbe.reverse_tcp', 'payloads.mipsbe.bind_tcp', 'payloads.generic.netcat_reverse_tcp', 'payloads.generic.awk_bind_tcp', 'payloads.generic.awk_reverse_tcp', 'payloads.generic.netcat_bind_tcp', 'payloads.mipsle.reverse_tcp', 'payloads.mipsle.bind_tcp', 'payloads.armle.reverse_tcp', 'payloads.armle.bind_tcp', 'scanners.netgear_scan', 'scanners.cisco_scan', 'scanners.cameras_scan', 'scanners.asus_scan', 'scanners.technicolor_scan', 'scanners.linksys_scan', 'scanners.3com_scan', 'scanners.zte_scan', 'scanners.zyxel_scan', 'scanners.misc_scan', 'scanners.autopwn', 'scanners.movistar_scan', 'scanners.multi_scan', 'scanners.2wire_scan', 'scanners.grandstream_scan', 'scanners.shuttle_scan', 'scanners.netsys_scan', 'scanners.tplink_scan', 'scanners.comtrend_scan', 'scanners.routers_scan', 'scanners.thomson_scan', 'scanners.asmax_scan', 'scanners.ubiquiti_scan', 'scanners.belkin_scan', 'scanners.juniper_scan', 'scanners.netcore_scan', 'scanners.billion_scan', 'scanners.fortinet_scan', 'scanners.bhu_scan', 'scanners.ipfire_scan', 'scanners.dlink_scan', 'scanners.huawei_scan', 'creds.ftp_default', 'creds.http_basic_bruteforce', 'creds.ssh_default', 'creds.telnet_bruteforce', 'creds.ftp_bruteforce', 'creds.http_digest_default', 'creds.telnet_default', 'creds.snmp_bruteforce', 'creds.http_form_default', 'creds.http_form_bruteforce', 'creds.ssh_bruteforce', 'creds.http_digest_bruteforce', 'creds.http_basic_default']
>>>
>>>
"""
| 69.099415
| 7,433
| 0.711768
|
e600e887120db9d6a51c6995bc84313eb0c0402b
| 738
|
py
|
Python
|
Problems/String/0344-ReverseString.py
|
s2503901ernie/LeetCode
|
cd40be2ddd2bc5e4e5630ec9068ae62504d527c2
|
[
"MIT"
] | null | null | null |
Problems/String/0344-ReverseString.py
|
s2503901ernie/LeetCode
|
cd40be2ddd2bc5e4e5630ec9068ae62504d527c2
|
[
"MIT"
] | null | null | null |
Problems/String/0344-ReverseString.py
|
s2503901ernie/LeetCode
|
cd40be2ddd2bc5e4e5630ec9068ae62504d527c2
|
[
"MIT"
] | null | null | null |
"""
Write a function that reverses a string. The input string is given as an array of characters s.
You must do this by modifying the input array in-place with O(1) extra memory.
Example 1:
Input: s = ["h","e","l","l","o"]
Output: ["o","l","l","e","h"]
Example 2:
Input: s = ["H","a","n","n","a","h"]
Output: ["h","a","n","n","a","H"]
Constraints:
1 <= s.length <= 10^5
s[i] is a printable ascii character.
"""
class Solution:
def reverseString(self, s: List[str]) -> None:
"""
Do not return anything, modify s in-place instead.
"""
left = 0
right = len(s) - 1
while left < right:
s[left], s[right] = s[right], s[left]
left += 1
right -= 1
| 21.085714
| 95
| 0.536585
|
ebe310fd4754bc7cd50064131e016ff83c3265ce
| 3,820
|
py
|
Python
|
skdesign/power/gof/cmh.py
|
louden/scikit-design
|
c51259fd7de43d6bdb4bb49e71136321768c2acc
|
[
"BSD-3-Clause"
] | 2
|
2016-10-31T04:22:50.000Z
|
2019-08-19T10:58:24.000Z
|
skdesign/power/gof/cmh.py
|
louden/scikit-design
|
c51259fd7de43d6bdb4bb49e71136321768c2acc
|
[
"BSD-3-Clause"
] | null | null | null |
skdesign/power/gof/cmh.py
|
louden/scikit-design
|
c51259fd7de43d6bdb4bb49e71136321768c2acc
|
[
"BSD-3-Clause"
] | 2
|
2018-06-06T10:58:15.000Z
|
2020-08-10T10:57:49.000Z
|
from skdesign.power.gof import GofBase
from skdesign.power import (is_in_0_1,
is_integer)
import math
import scipy.stats as stats
class CMH(GofBase):
""" Cochran-Mantel-Haenszel Test for Independance (Multiple Strata)
Cochran-Mantel-Haenszel Independance Test uses a Chi Squared test to test
for the independance in multiple strata.
Attributes:
n: The sample size
alpha: The :math:`\\alpha` level required by the hypothesis.
beta: The :math:`\\beta` level required by the hypothesis (equal to
:math:`1 - power`).
power: The power required by the hypothesis (equal to
:math:`1 - \\beta`).
p: A list of lists of lists of observed probabilities. The depth of
the inception is the number of strata
pi: The proportion of subjects in each strata
"""
def __init__(self, n=None, alpha=None, beta=None, power=None, p=None,
pi=None):
self._check_list(p, 'p')
if pi is None:
pi = [1 / float(len(p))] * len(p)
else:
self._check_list(pi, 'pi')
if n is not None:
is_integer(n, '`n` should be of type Int.')
self.n = n
num = 0
denom = 0
for stratum in range(len(p)):
rowsums = [sum(p[stratum][0]), sum(p[stratum][1])]
colsums = [p[stratum][0][0] + p[stratum][1][0],
p[stratum][0][1] + p[stratum][1][1]]
num += pi[stratum] * (p[stratum][0][0] - rowsums[0] * colsums[0])
denom += (pi[stratum] * rowsums[0] * rowsums[1] *
colsums[0] * colsums[1])
self.delta = num / math.sqrt(denom)
self.n = n
# Set remaining variables.
super(CMH, self).__init__(alpha=alpha, beta=beta, power=power)
def calculate(self):
if self.n is None:
self._set_default_alpha()
self._set_default_power()
self._calculate_n_known()
self._calculate_power_known()
elif self.power is None:
self._set_default_alpha()
self._calculate_power_known()
elif self.alpha is None:
self._calculate_alpha_known()
def _calculate_n_known(self):
""" Calculate n
This is an internal static method only.
"""
distribution = stats.norm()
z_alpha = distribution.ppf(1 - self.alpha / 2.0)
z_beta = distribution.ppf(1 - self.beta)
n = (z_alpha + z_beta)**2 / self.delta**2
self.n = math.ceil(n)
def _calculate_alpha_known(self):
""" Calculate :math:`\\alpha`
This is an internal static method only.
"""
distribution = stats.norm()
z_beta = distribution.ppf(1 - self.beta)
z_alpha = math.sqrt(self.n) * self.delta - z_beta
self.alpha = (1 - distribution.cdf(z_alpha)) * 2.0
def _calculate_power_known(self):
""" Calculate power
This is an internal static method only.
"""
distribution = stats.norm()
z_alpha = distribution.ppf(1 - self.alpha / 2.0)
z_beta = math.sqrt(self.n) * self.delta - z_alpha
self.beta = (1 - stats.norm.cdf(z_beta))
self.power = 1 - self.beta
def _check_list(self, lst, label):
""" Recursively check the lists in lst """
if isinstance(lst, list):
for values in lst:
if isinstance(values, list):
self._check_list(values, label=label)
else:
is_in_0_1(values,
('All values of ' + label +
'p should be in (0, 1).'))
else:
raise ValueError("`p` must be a list of lists of numerics")
| 33.80531
| 77
| 0.551832
|
a37cfd876e6095ce928e64cf8b84d2107d3a1b40
| 3,963
|
py
|
Python
|
fluent_comments/forms/helper.py
|
susansan/django-fluent-comments
|
74c2d682cc30a03695667d92a1caa74532b7af59
|
[
"Apache-2.0"
] | null | null | null |
fluent_comments/forms/helper.py
|
susansan/django-fluent-comments
|
74c2d682cc30a03695667d92a1caa74532b7af59
|
[
"Apache-2.0"
] | null | null | null |
fluent_comments/forms/helper.py
|
susansan/django-fluent-comments
|
74c2d682cc30a03695667d92a1caa74532b7af59
|
[
"Apache-2.0"
] | 1
|
2019-07-07T23:53:43.000Z
|
2019-07-07T23:53:43.000Z
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Button, Submit
from crispy_forms.utils import TEMPLATE_PACK
from django import forms
from django.forms.widgets import Input
from django.utils.translation import ugettext_lazy as _
from django_comments import get_form_target
from fluent_comments import appsettings
class CommentFormHelper(FormHelper):
"""
The django-crispy-forms configuration that handles form appearance.
The default is configured to show bootstrap forms nicely.
"""
form_tag = False # we need to define the form_tag
form_id = 'comment-form-ID'
form_class = 'js-comments-form {0}'.format(appsettings.FLUENT_COMMENTS_FORM_CSS_CLASS)
label_class = appsettings.FLUENT_COMMENTS_LABEL_CSS_CLASS
field_class = appsettings.FLUENT_COMMENTS_FIELD_CSS_CLASS
render_unmentioned_fields = True # like honeypot and security_hash
BASE_FIELDS_TOP = ('content_type', 'object_pk', 'timestamp', 'security_hash')
BASE_FIELDS_END = ('honeypot',)
BASE_FIELDS = BASE_FIELDS_TOP + BASE_FIELDS_END
if appsettings.USE_THREADEDCOMMENTS:
BASE_FIELDS_TOP += ('parent',)
@property
def form_action(self):
return get_form_target() # reads get_form_target from COMMENTS_APP
def __init__(self, form=None):
super(CommentFormHelper, self).__init__(form=form)
if form is not None:
# When using the helper like this, it could generate all fields.
self.form_id = 'comment-form-{0}'.format(form.target_object.pk)
self.attrs = {
'data-object-id': form.target_object.pk,
}
class CompactLabelsCommentFormHelper(CommentFormHelper):
"""
Compact labels in the form, show them as placeholder text instead.
.. note::
Make sure that the :attr:`layout` attribute is defined and
it has fields added to it, otherwise the placeholders don't appear.
The text input can easily be resized using CSS like:
.. code-block: css
@media only screen and (min-width: 768px) {
form.comments-form input.form-control {
width: 50%;
}
}
"""
form_class = CommentFormHelper.form_class.replace('form-horizontal', 'form-vertical') + ' comments-form-compact'
label_class = 'sr-only'
field_class = ''
def render_layout(self, form, context, template_pack=TEMPLATE_PACK):
"""
Copy any field label to the ``placeholder`` attribute.
Note, this method is called when :attr:`layout` is defined.
"""
# Writing the label values into the field placeholders.
# This is done at rendering time, so the Form.__init__() could update any labels before.
# Django 1.11 no longer lets EmailInput or URLInput inherit from TextInput,
# so checking for `Input` instead while excluding `HiddenInput`.
for field in form.fields.values():
if field.label and \
isinstance(field.widget, (Input, forms.Textarea)) and \
not isinstance(field.widget, forms.HiddenInput):
field.widget.attrs['placeholder'] = u"{0}:".format(field.label)
return super(CompactLabelsCommentFormHelper, self).render_layout(form, context, template_pack=template_pack)
class SubmitButton(Submit):
"""
The submit button to add to the layout.
Note: the ``name=post`` is mandatory, it helps the
"""
def __init__(self, text=_("Post Comment"), **kwargs):
super(SubmitButton, self).__init__(name='post', value=text, **kwargs)
class PreviewButton(Button):
"""
The preview button to add to the layout.
Note: the ``name=post`` is mandatory, it helps the
"""
input_type = 'submit'
def __init__(self, text=_("Preview"), **kwargs):
kwargs.setdefault('css_class', 'btn-default')
super(PreviewButton, self).__init__(name='preview', value=text, **kwargs)
| 36.694444
| 116
| 0.680545
|
77ca0c62cbc24b9185a0ada2b84f173e2552be32
| 14,339
|
py
|
Python
|
jax/experimental/stax.py
|
JaeHunRo/jax
|
3898033e953297af8483ab5ef2e38f7651abc0d7
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
jax/experimental/stax.py
|
JaeHunRo/jax
|
3898033e953297af8483ab5ef2e38f7651abc0d7
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
jax/experimental/stax.py
|
JaeHunRo/jax
|
3898033e953297af8483ab5ef2e38f7651abc0d7
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stax is a small but flexible neural net specification library from scratch.
For an example of its use, see examples/resnet50.py.
"""
import functools
import itertools
import operator as op
import numpy as onp
from jax import lax
from jax import random
import jax.numpy as np
from jax.nn import (relu, log_softmax, softmax, softplus, sigmoid, elu,
leaky_relu, selu, gelu, normalize)
from jax.nn.initializers import glorot_normal, normal, ones, uniform, zeros
from jax.ops import index_update
# aliases for backwards compatibility
glorot = glorot_normal
randn = normal
logsoftmax = log_softmax
# Following the convention used in Keras and tf.layers, we use CamelCase for the
# names of layer constructors, like Conv and Relu, while using snake_case for
# other functions, like lax.conv and relu.
# Each layer constructor function returns an (init_fun, apply_fun) pair, where
# init_fun: takes an rng key and an input shape and returns an
# (output_shape, params) pair,
# apply_fun: takes params, inputs, and an rng key and applies the layer.
def Dense(out_dim, W_init=glorot_normal(), b_init=normal()):
"""Layer constructor function for a dense (fully-connected) layer."""
def init_fun(rng, input_shape):
output_shape = input_shape[:-1] + (out_dim,)
k1, k2 = random.split(rng)
W, b = W_init(k1, (input_shape[-1], out_dim)), b_init(k2, (out_dim,))
return output_shape, (W, b)
def apply_fun(params, inputs, **kwargs):
W, b = params
return np.dot(inputs, W) + b
return init_fun, apply_fun
def GeneralConv(dimension_numbers, out_chan, filter_shape,
strides=None, padding='VALID', W_init=None,
b_init=normal(1e-6)):
"""Layer construction function for a general convolution layer."""
lhs_spec, rhs_spec, out_spec = dimension_numbers
one = (1,) * len(filter_shape)
strides = strides or one
W_init = W_init or glorot_normal(rhs_spec.index('I'), rhs_spec.index('O'))
def init_fun(rng, input_shape):
filter_shape_iter = iter(filter_shape)
kernel_shape = [out_chan if c == 'O' else
input_shape[lhs_spec.index('C')] if c == 'I' else
next(filter_shape_iter) for c in rhs_spec]
output_shape = lax.conv_general_shape_tuple(
input_shape, kernel_shape, strides, padding, dimension_numbers)
bias_shape = [out_chan if c == 'C' else 1 for c in out_spec]
bias_shape = tuple(itertools.dropwhile(lambda x: x == 1, bias_shape))
k1, k2 = random.split(rng)
W, b = W_init(k1, kernel_shape), b_init(k2, bias_shape)
return output_shape, (W, b)
def apply_fun(params, inputs, **kwargs):
W, b = params
return lax.conv_general_dilated(inputs, W, strides, padding, one, one,
dimension_numbers=dimension_numbers) + b
return init_fun, apply_fun
Conv = functools.partial(GeneralConv, ('NHWC', 'HWIO', 'NHWC'))
def GeneralConvTranspose(dimension_numbers, out_chan, filter_shape,
strides=None, padding='VALID', W_init=None,
b_init=normal(1e-6)):
"""Layer construction function for a general transposed-convolution layer."""
lhs_spec, rhs_spec, out_spec = dimension_numbers
one = (1,) * len(filter_shape)
strides = strides or one
W_init = W_init or glorot_normal(rhs_spec.index('I'), rhs_spec.index('O'))
def init_fun(rng, input_shape):
filter_shape_iter = iter(filter_shape)
kernel_shape = [out_chan if c == 'O' else
input_shape[lhs_spec.index('C')] if c == 'I' else
next(filter_shape_iter) for c in rhs_spec]
output_shape = lax.conv_transpose_shape_tuple(
input_shape, kernel_shape, strides, padding, dimension_numbers)
bias_shape = [out_chan if c == 'C' else 1 for c in out_spec]
bias_shape = tuple(itertools.dropwhile(lambda x: x == 1, bias_shape))
k1, k2 = random.split(rng)
W, b = W_init(k1, kernel_shape), b_init(k2, bias_shape)
return output_shape, (W, b)
def apply_fun(params, inputs, **kwargs):
W, b = params
return lax.conv_transpose(inputs, W, strides, padding,
dimension_numbers=dimension_numbers) + b
return init_fun, apply_fun
Conv1DTranspose = functools.partial(GeneralConvTranspose, ('NHC', 'HIO', 'NHC'))
ConvTranspose = functools.partial(GeneralConvTranspose,
('NHWC', 'HWIO', 'NHWC'))
def BatchNorm(axis=(0, 1, 2), epsilon=1e-5, center=True, scale=True,
beta_init=zeros, gamma_init=ones):
"""Layer construction function for a batch normalization layer."""
_beta_init = lambda rng, shape: beta_init(rng, shape) if center else ()
_gamma_init = lambda rng, shape: gamma_init(rng, shape) if scale else ()
axis = (axis,) if np.isscalar(axis) else axis
def init_fun(rng, input_shape):
shape = tuple(d for i, d in enumerate(input_shape) if i not in axis)
k1, k2 = random.split(rng)
beta, gamma = _beta_init(k1, shape), _gamma_init(k2, shape)
return input_shape, (beta, gamma)
def apply_fun(params, x, **kwargs):
beta, gamma = params
# TODO(phawkins): np.expand_dims should accept an axis tuple.
# (https://github.com/numpy/numpy/issues/12290)
ed = tuple(None if i in axis else slice(None) for i in range(np.ndim(x)))
beta = beta[ed]
gamma = gamma[ed]
z = normalize(x, axis, epsilon=epsilon)
if center and scale: return gamma * z + beta
if center: return z + beta
if scale: return gamma * z
return z
return init_fun, apply_fun
def Embedding(vocab_size,
embedding_size,
padding_idx=None,
embedding_init=uniform()):
"""Layer construction function for an embedding layer."""
def init_fun(rng, input_shape):
embedding_shape = (vocab_size, embedding_size)
embedding_table = embedding_init(rng, embedding_shape)
if padding_idx is not None:
embedding_table = index_update(embedding_table, padding_idx, 0.)
output_shape = input_shape + (embedding_size,)
return output_shape, (embedding_table,)
def apply_fun(params, inputs, **kwargs):
embedding_table = params[0]
return embedding_table[inputs]
return init_fun, apply_fun
def elementwise(fun, **fun_kwargs):
"""Layer that applies a scalar function elementwise on its inputs."""
init_fun = lambda rng, input_shape: (input_shape, ())
apply_fun = lambda params, inputs, **kwargs: fun(inputs, **fun_kwargs)
return init_fun, apply_fun
Tanh = elementwise(np.tanh)
Relu = elementwise(relu)
Exp = elementwise(np.exp)
LogSoftmax = elementwise(log_softmax, axis=-1)
Softmax = elementwise(softmax, axis=-1)
Softplus = elementwise(softplus)
Sigmoid = elementwise(sigmoid)
Elu = elementwise(elu)
LeakyRelu = elementwise(leaky_relu)
Selu = elementwise(selu)
Gelu = elementwise(gelu)
def _pooling_layer(reducer, init_val, rescaler=None):
def PoolingLayer(window_shape, strides=None, padding='VALID', spec=None):
"""Layer construction function for a pooling layer."""
strides = strides or (1,) * len(window_shape)
rescale = rescaler(window_shape, strides, padding) if rescaler else None
if spec is None:
non_spatial_axes = 0, len(window_shape) + 1
else:
non_spatial_axes = spec.index('N'), spec.index('C')
for i in sorted(non_spatial_axes):
window_shape = window_shape[:i] + (1,) + window_shape[i:]
strides = strides[:i] + (1,) + strides[i:]
def init_fun(rng, input_shape):
out_shape = lax.reduce_window_shape_tuple(input_shape, window_shape,
strides, padding)
return out_shape, ()
def apply_fun(params, inputs, **kwargs):
out = lax.reduce_window(inputs, init_val, reducer, window_shape,
strides, padding)
return rescale(out, inputs, spec) if rescale else out
return init_fun, apply_fun
return PoolingLayer
MaxPool = _pooling_layer(lax.max, -np.inf)
SumPool = _pooling_layer(lax.add, 0.)
def _normalize_by_window_size(dims, strides, padding):
def rescale(outputs, inputs, spec):
if spec is None:
non_spatial_axes = 0, inputs.ndim - 1
else:
non_spatial_axes = spec.index('N'), spec.index('C')
spatial_shape = tuple(inputs.shape[i]
for i in range(inputs.ndim)
if i not in non_spatial_axes)
one = np.ones(spatial_shape, dtype=inputs.dtype)
window_sizes = lax.reduce_window(one, 0., lax.add, dims, strides, padding)
for i in sorted(non_spatial_axes):
window_sizes = np.expand_dims(window_sizes, i)
return outputs / window_sizes
return rescale
AvgPool = _pooling_layer(lax.add, 0., _normalize_by_window_size)
def Flatten():
"""Layer construction function for flattening all but the leading dim."""
def init_fun(rng, input_shape):
output_shape = input_shape[0], functools.reduce(op.mul, input_shape[1:], 1)
return output_shape, ()
def apply_fun(params, inputs, **kwargs):
return np.reshape(inputs, (inputs.shape[0], -1))
return init_fun, apply_fun
Flatten = Flatten()
def Identity():
"""Layer construction function for an identity layer."""
init_fun = lambda rng, input_shape: (input_shape, ())
apply_fun = lambda params, inputs, **kwargs: inputs
return init_fun, apply_fun
Identity = Identity()
def FanOut(num):
"""Layer construction function for a fan-out layer."""
init_fun = lambda rng, input_shape: ([input_shape] * num, ())
apply_fun = lambda params, inputs, **kwargs: [inputs] * num
return init_fun, apply_fun
def FanInSum():
"""Layer construction function for a fan-in sum layer."""
init_fun = lambda rng, input_shape: (input_shape[0], ())
apply_fun = lambda params, inputs, **kwargs: sum(inputs)
return init_fun, apply_fun
FanInSum = FanInSum()
def FanInConcat(axis=-1):
"""Layer construction function for a fan-in concatenation layer."""
def init_fun(rng, input_shape):
ax = axis % len(input_shape[0])
concat_size = sum(shape[ax] for shape in input_shape)
out_shape = input_shape[0][:ax] + (concat_size,) + input_shape[0][ax+1:]
return out_shape, ()
def apply_fun(params, inputs, **kwargs):
return np.concatenate(inputs, axis)
return init_fun, apply_fun
def Dropout(rate, mode='train'):
"""Layer construction function for a dropout layer with given rate."""
def init_fun(rng, input_shape):
return input_shape, ()
def apply_fun(params, inputs, **kwargs):
rng = kwargs.get('rng', None)
if rng is None:
msg = ("Dropout layer requires apply_fun to be called with a PRNG key "
"argument. That is, instead of `apply_fun(params, inputs)`, call "
"it like `apply_fun(params, inputs, key)` where `key` is a "
"jax.random.PRNGKey value.")
raise ValueError(msg)
if mode == 'train':
keep = random.bernoulli(rng, rate, inputs.shape)
return np.where(keep, inputs / rate, 0)
else:
return inputs
return init_fun, apply_fun
# Composing layers via combinators
def serial(*layers):
"""Combinator for composing layers in serial.
Args:
*layers: a sequence of layers, each an (init_fun, apply_fun) pair.
Returns:
A new layer, meaning an (init_fun, apply_fun) pair, representing the serial
composition of the given sequence of layers.
"""
nlayers = len(layers)
init_funs, apply_funs = zip(*layers)
def init_fun(rng, input_shape):
params = []
for init_fun in init_funs:
rng, layer_rng = random.split(rng)
input_shape, param = init_fun(layer_rng, input_shape)
params.append(param)
return input_shape, params
def apply_fun(params, inputs, **kwargs):
rng = kwargs.pop('rng', None)
rngs = random.split(rng, nlayers) if rng is not None else (None,) * nlayers
for fun, param, rng in zip(apply_funs, params, rngs):
inputs = fun(param, inputs, rng=rng, **kwargs)
return inputs
return init_fun, apply_fun
def parallel(*layers):
"""Combinator for composing layers in parallel.
The layer resulting from this combinator is often used with the FanOut and
FanInSum layers.
Args:
*layers: a sequence of layers, each an (init_fun, apply_fun) pair.
Returns:
A new layer, meaning an (init_fun, apply_fun) pair, representing the
parallel composition of the given sequence of layers. In particular, the
returned layer takes a sequence of inputs and returns a sequence of outputs
with the same length as the argument `layers`.
"""
nlayers = len(layers)
init_funs, apply_funs = zip(*layers)
def init_fun(rng, input_shape):
rngs = random.split(rng, nlayers)
return zip(*[init(rng, shape) for init, rng, shape
in zip(init_funs, rngs, input_shape)])
def apply_fun(params, inputs, **kwargs):
rng = kwargs.pop('rng', None)
rngs = random.split(rng, nlayers) if rng is not None else (None,) * nlayers
return [f(p, x, rng=r, **kwargs) for f, p, x, r in zip(apply_funs, params, inputs, rngs)]
return init_fun, apply_fun
def shape_dependent(make_layer):
"""Combinator to delay layer constructor pair until input shapes are known.
Args:
make_layer: a one-argument function that takes an input shape as an argument
(a tuple of positive integers) and returns an (init_fun, apply_fun) pair.
Returns:
A new layer, meaning an (init_fun, apply_fun) pair, representing the same
layer as returned by `make_layer` but with its construction delayed until
input shapes are known.
"""
def init_fun(rng, input_shape):
return make_layer(input_shape)[0](rng, input_shape)
def apply_fun(params, inputs, **kwargs):
return make_layer(inputs.shape)[1](params, inputs, **kwargs)
return init_fun, apply_fun
| 37.933862
| 93
| 0.688123
|
65e9987db1356a3edb16d14e1e8b7ab614729859
| 1,120
|
py
|
Python
|
PythonExercicios/ex059.py
|
Luis-Emanuel/Python
|
92936dfb005b9755a53425d16c3ff54119eebe78
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex059.py
|
Luis-Emanuel/Python
|
92936dfb005b9755a53425d16c3ff54119eebe78
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex059.py
|
Luis-Emanuel/Python
|
92936dfb005b9755a53425d16c3ff54119eebe78
|
[
"MIT"
] | null | null | null |
#Crie um programa que leia dois valores e mostre um menu. Seu programa deverá realizar a operação solicitada em cada caso.
#[1] soma [2] multiplicar [3] maior [4] novos números [5] sair do programa.
num1 = int(input('Digite um número: '))
num2 = int(input('Digite outro número: '))
esc = 0
while esc != 5:
print('''-=-=-=--=-=-=-=-=-=-=--=-=-=
[1] SOMA
[2] MULTIPLICAR
[3] MAIOR
[4] NOVOS NÚMEROS
[5] SAIR DO PROGRAMA''')
esc = int(input('>>>>Qual é sua opção: '))
if esc == 1:
print('A soma entre {} + {} = {}'.format(num1, num2, num1 + num2))
elif esc == 2:
print('A multiplicação entre {} x {} = {}'.format(num1, num2, num1 * num2))
elif esc == 3:
if num1 > num2:
print('Entre {} e {} o maior é {}'.format(num1, num2, num1))
else:
print('Entre {} e {} o maior é {}'.format(num1, num2, num2))
elif esc == 4:
num1 = int(input('Digite o novo valor: '))
num2 = int(input('Digite o outro valor: '))
elif esc == 5:
print('Fim')
else:
print('___OPÇÃO INVALIDA___')
| 38.62069
| 122
| 0.534821
|
2d03fda39dec260f73b7dda63802a96d359722dc
| 51,662
|
py
|
Python
|
testing/test_collection.py
|
graingert/pytest
|
89dcfbf293802b6efacc5aea2893f9fec03787bc
|
[
"MIT"
] | null | null | null |
testing/test_collection.py
|
graingert/pytest
|
89dcfbf293802b6efacc5aea2893f9fec03787bc
|
[
"MIT"
] | 60
|
2020-12-07T03:10:15.000Z
|
2022-03-28T03:02:29.000Z
|
testing/test_collection.py
|
graingert/pytest
|
89dcfbf293802b6efacc5aea2893f9fec03787bc
|
[
"MIT"
] | 1
|
2020-12-09T02:24:06.000Z
|
2020-12-09T02:24:06.000Z
|
import os
import pprint
import shutil
import sys
import textwrap
from pathlib import Path
from typing import List
import py.path
import pytest
from _pytest.config import ExitCode
from _pytest.fixtures import FixtureRequest
from _pytest.main import _in_venv
from _pytest.main import Session
from _pytest.monkeypatch import MonkeyPatch
from _pytest.nodes import Item
from _pytest.pathlib import symlink_or_skip
from _pytest.pytester import HookRecorder
from _pytest.pytester import Pytester
def ensure_file(file_path: Path) -> Path:
"""Ensure that file exists"""
file_path.parent.mkdir(parents=True, exist_ok=True)
file_path.touch(exist_ok=True)
return file_path
class TestCollector:
def test_collect_versus_item(self) -> None:
from pytest import Collector
from pytest import Item
assert not issubclass(Collector, Item)
assert not issubclass(Item, Collector)
def test_check_equality(self, pytester: Pytester) -> None:
modcol = pytester.getmodulecol(
"""
def test_pass(): pass
def test_fail(): assert 0
"""
)
fn1 = pytester.collect_by_name(modcol, "test_pass")
assert isinstance(fn1, pytest.Function)
fn2 = pytester.collect_by_name(modcol, "test_pass")
assert isinstance(fn2, pytest.Function)
assert fn1 == fn2
assert fn1 != modcol
assert hash(fn1) == hash(fn2)
fn3 = pytester.collect_by_name(modcol, "test_fail")
assert isinstance(fn3, pytest.Function)
assert not (fn1 == fn3)
assert fn1 != fn3
for fn in fn1, fn2, fn3:
assert isinstance(fn, pytest.Function)
assert fn != 3 # type: ignore[comparison-overlap]
assert fn != modcol
assert fn != [1, 2, 3] # type: ignore[comparison-overlap]
assert [1, 2, 3] != fn # type: ignore[comparison-overlap]
assert modcol != fn
assert pytester.collect_by_name(modcol, "doesnotexist") is None
def test_getparent(self, pytester: Pytester) -> None:
modcol = pytester.getmodulecol(
"""
class TestClass:
def test_foo(self):
pass
"""
)
cls = pytester.collect_by_name(modcol, "TestClass")
assert isinstance(cls, pytest.Class)
instance = pytester.collect_by_name(cls, "()")
assert isinstance(instance, pytest.Instance)
fn = pytester.collect_by_name(instance, "test_foo")
assert isinstance(fn, pytest.Function)
module_parent = fn.getparent(pytest.Module)
assert module_parent is modcol
function_parent = fn.getparent(pytest.Function)
assert function_parent is fn
class_parent = fn.getparent(pytest.Class)
assert class_parent is cls
def test_getcustomfile_roundtrip(self, pytester: Pytester) -> None:
hello = pytester.makefile(".xxx", hello="world")
pytester.makepyfile(
conftest="""
import pytest
class CustomFile(pytest.File):
pass
def pytest_collect_file(path, parent):
if path.ext == ".xxx":
return CustomFile.from_parent(fspath=path, parent=parent)
"""
)
node = pytester.getpathnode(hello)
assert isinstance(node, pytest.File)
assert node.name == "hello.xxx"
nodes = node.session.perform_collect([node.nodeid], genitems=False)
assert len(nodes) == 1
assert isinstance(nodes[0], pytest.File)
def test_can_skip_class_with_test_attr(self, pytester: Pytester) -> None:
"""Assure test class is skipped when using `__test__=False` (See #2007)."""
pytester.makepyfile(
"""
class TestFoo(object):
__test__ = False
def __init__(self):
pass
def test_foo():
assert True
"""
)
result = pytester.runpytest()
result.stdout.fnmatch_lines(["collected 0 items", "*no tests ran in*"])
class TestCollectFS:
def test_ignored_certain_directories(self, pytester: Pytester) -> None:
tmpdir = pytester.path
ensure_file(tmpdir / "build" / "test_notfound.py")
ensure_file(tmpdir / "dist" / "test_notfound.py")
ensure_file(tmpdir / "_darcs" / "test_notfound.py")
ensure_file(tmpdir / "CVS" / "test_notfound.py")
ensure_file(tmpdir / "{arch}" / "test_notfound.py")
ensure_file(tmpdir / ".whatever" / "test_notfound.py")
ensure_file(tmpdir / ".bzr" / "test_notfound.py")
ensure_file(tmpdir / "normal" / "test_found.py")
for x in Path(str(tmpdir)).rglob("test_*.py"):
x.write_text("def test_hello(): pass", "utf-8")
result = pytester.runpytest("--collect-only")
s = result.stdout.str()
assert "test_notfound" not in s
assert "test_found" in s
@pytest.mark.parametrize(
"fname",
(
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
),
)
def test_ignored_virtualenvs(self, pytester: Pytester, fname: str) -> None:
bindir = "Scripts" if sys.platform.startswith("win") else "bin"
ensure_file(pytester.path / "virtual" / bindir / fname)
testfile = ensure_file(pytester.path / "virtual" / "test_invenv.py")
testfile.write_text("def test_hello(): pass")
# by default, ignore tests inside a virtualenv
result = pytester.runpytest()
result.stdout.no_fnmatch_line("*test_invenv*")
# allow test collection if user insists
result = pytester.runpytest("--collect-in-virtualenv")
assert "test_invenv" in result.stdout.str()
# allow test collection if user directly passes in the directory
result = pytester.runpytest("virtual")
assert "test_invenv" in result.stdout.str()
@pytest.mark.parametrize(
"fname",
(
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
),
)
def test_ignored_virtualenvs_norecursedirs_precedence(
self, pytester: Pytester, fname: str
) -> None:
bindir = "Scripts" if sys.platform.startswith("win") else "bin"
# norecursedirs takes priority
ensure_file(pytester.path / ".virtual" / bindir / fname)
testfile = ensure_file(pytester.path / ".virtual" / "test_invenv.py")
testfile.write_text("def test_hello(): pass")
result = pytester.runpytest("--collect-in-virtualenv")
result.stdout.no_fnmatch_line("*test_invenv*")
# ...unless the virtualenv is explicitly given on the CLI
result = pytester.runpytest("--collect-in-virtualenv", ".virtual")
assert "test_invenv" in result.stdout.str()
@pytest.mark.parametrize(
"fname",
(
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
),
)
def test__in_venv(self, pytester: Pytester, fname: str) -> None:
"""Directly test the virtual env detection function"""
bindir = "Scripts" if sys.platform.startswith("win") else "bin"
# no bin/activate, not a virtualenv
base_path = pytester.mkdir("venv")
assert _in_venv(py.path.local(base_path)) is False
# with bin/activate, totally a virtualenv
bin_path = base_path.joinpath(bindir)
bin_path.mkdir()
bin_path.joinpath(fname).touch()
assert _in_venv(py.path.local(base_path)) is True
def test_custom_norecursedirs(self, pytester: Pytester) -> None:
pytester.makeini(
"""
[pytest]
norecursedirs = mydir xyz*
"""
)
tmpdir = pytester.path
ensure_file(tmpdir / "mydir" / "test_hello.py").write_text("def test_1(): pass")
ensure_file(tmpdir / "xyz123" / "test_2.py").write_text("def test_2(): 0/0")
ensure_file(tmpdir / "xy" / "test_ok.py").write_text("def test_3(): pass")
rec = pytester.inline_run()
rec.assertoutcome(passed=1)
rec = pytester.inline_run("xyz123/test_2.py")
rec.assertoutcome(failed=1)
def test_testpaths_ini(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
pytester.makeini(
"""
[pytest]
testpaths = gui uts
"""
)
tmpdir = pytester.path
ensure_file(tmpdir / "env" / "test_1.py").write_text("def test_env(): pass")
ensure_file(tmpdir / "gui" / "test_2.py").write_text("def test_gui(): pass")
ensure_file(tmpdir / "uts" / "test_3.py").write_text("def test_uts(): pass")
# executing from rootdir only tests from `testpaths` directories
# are collected
items, reprec = pytester.inline_genitems("-v")
assert [x.name for x in items] == ["test_gui", "test_uts"]
# check that explicitly passing directories in the command-line
# collects the tests
for dirname in ("env", "gui", "uts"):
items, reprec = pytester.inline_genitems(tmpdir.joinpath(dirname))
assert [x.name for x in items] == ["test_%s" % dirname]
# changing cwd to each subdirectory and running pytest without
# arguments collects the tests in that directory normally
for dirname in ("env", "gui", "uts"):
monkeypatch.chdir(pytester.path.joinpath(dirname))
items, reprec = pytester.inline_genitems()
assert [x.name for x in items] == ["test_%s" % dirname]
class TestCollectPluginHookRelay:
def test_pytest_collect_file(self, pytester: Pytester) -> None:
wascalled = []
class Plugin:
def pytest_collect_file(self, path):
if not path.basename.startswith("."):
# Ignore hidden files, e.g. .testmondata.
wascalled.append(path)
pytester.makefile(".abc", "xyz")
pytest.main(py.path.local(pytester.path), plugins=[Plugin()])
assert len(wascalled) == 1
assert wascalled[0].ext == ".abc"
class TestPrunetraceback:
def test_custom_repr_failure(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
import not_exists
"""
)
pytester.makeconftest(
"""
import pytest
def pytest_collect_file(path, parent):
return MyFile.from_parent(fspath=path, parent=parent)
class MyError(Exception):
pass
class MyFile(pytest.File):
def collect(self):
raise MyError()
def repr_failure(self, excinfo):
if excinfo.errisinstance(MyError):
return "hello world"
return pytest.File.repr_failure(self, excinfo)
"""
)
result = pytester.runpytest(p)
result.stdout.fnmatch_lines(["*ERROR collecting*", "*hello world*"])
@pytest.mark.xfail(reason="other mechanism for adding to reporting needed")
def test_collect_report_postprocessing(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
import not_exists
"""
)
pytester.makeconftest(
"""
import pytest
@pytest.hookimpl(hookwrapper=True)
def pytest_make_collect_report():
outcome = yield
rep = outcome.get_result()
rep.headerlines += ["header1"]
outcome.force_result(rep)
"""
)
result = pytester.runpytest(p)
result.stdout.fnmatch_lines(["*ERROR collecting*", "*header1*"])
class TestCustomConftests:
def test_ignore_collect_path(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
def pytest_ignore_collect(path, config):
return path.basename.startswith("x") or \
path.basename == "test_one.py"
"""
)
sub = pytester.mkdir("xy123")
ensure_file(sub / "test_hello.py").write_text("syntax error")
sub.joinpath("conftest.py").write_text("syntax error")
pytester.makepyfile("def test_hello(): pass")
pytester.makepyfile(test_one="syntax error")
result = pytester.runpytest("--fulltrace")
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
def test_ignore_collect_not_called_on_argument(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
def pytest_ignore_collect(path, config):
return True
"""
)
p = pytester.makepyfile("def test_hello(): pass")
result = pytester.runpytest(p)
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stdout.fnmatch_lines(["*collected 0 items*"])
def test_collectignore_exclude_on_option(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
import py
from pathlib import Path
collect_ignore = [py.path.local('hello'), 'test_world.py', Path('bye')]
def pytest_addoption(parser):
parser.addoption("--XX", action="store_true", default=False)
def pytest_configure(config):
if config.getvalue("XX"):
collect_ignore[:] = []
"""
)
pytester.mkdir("hello")
pytester.makepyfile(test_world="def test_hello(): pass")
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stdout.no_fnmatch_line("*passed*")
result = pytester.runpytest("--XX")
assert result.ret == 0
assert "passed" in result.stdout.str()
def test_collectignoreglob_exclude_on_option(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
collect_ignore_glob = ['*w*l[dt]*']
def pytest_addoption(parser):
parser.addoption("--XX", action="store_true", default=False)
def pytest_configure(config):
if config.getvalue("XX"):
collect_ignore_glob[:] = []
"""
)
pytester.makepyfile(test_world="def test_hello(): pass")
pytester.makepyfile(test_welt="def test_hallo(): pass")
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stdout.fnmatch_lines(["*collected 0 items*"])
result = pytester.runpytest("--XX")
assert result.ret == 0
result.stdout.fnmatch_lines(["*2 passed*"])
def test_pytest_fs_collect_hooks_are_seen(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
import pytest
class MyModule(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule.from_parent(fspath=path, parent=parent)
"""
)
pytester.mkdir("sub")
pytester.makepyfile("def test_x(): pass")
result = pytester.runpytest("--co")
result.stdout.fnmatch_lines(["*MyModule*", "*test_x*"])
def test_pytest_collect_file_from_sister_dir(self, pytester: Pytester) -> None:
sub1 = pytester.mkpydir("sub1")
sub2 = pytester.mkpydir("sub2")
conf1 = pytester.makeconftest(
"""
import pytest
class MyModule1(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule1.from_parent(fspath=path, parent=parent)
"""
)
conf1.replace(sub1.joinpath(conf1.name))
conf2 = pytester.makeconftest(
"""
import pytest
class MyModule2(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule2.from_parent(fspath=path, parent=parent)
"""
)
conf2.replace(sub2.joinpath(conf2.name))
p = pytester.makepyfile("def test_x(): pass")
shutil.copy(p, sub1.joinpath(p.name))
shutil.copy(p, sub2.joinpath(p.name))
result = pytester.runpytest("--co")
result.stdout.fnmatch_lines(["*MyModule1*", "*MyModule2*", "*test_x*"])
class TestSession:
def test_collect_topdir(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
id = "::".join([p.name, "test_func"])
# XXX migrate to collectonly? (see below)
config = pytester.parseconfig(id)
topdir = pytester.path
rcol = Session.from_config(config)
assert topdir == rcol.fspath
# rootid = rcol.nodeid
# root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]
# assert root2 == rcol, rootid
colitems = rcol.perform_collect([rcol.nodeid], genitems=False)
assert len(colitems) == 1
assert colitems[0].fspath == p
def get_reported_items(self, hookrec: HookRecorder) -> List[Item]:
"""Return pytest.Item instances reported by the pytest_collectreport hook"""
calls = hookrec.getcalls("pytest_collectreport")
return [
x
for call in calls
for x in call.report.result
if isinstance(x, pytest.Item)
]
def test_collect_protocol_single_function(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
id = "::".join([p.name, "test_func"])
items, hookrec = pytester.inline_genitems(id)
(item,) = items
assert item.name == "test_func"
newid = item.nodeid
assert newid == id
pprint.pprint(hookrec.calls)
topdir = pytester.path # noqa
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == topdir"),
("pytest_make_collect_report", "collector.fspath == topdir"),
("pytest_collectstart", "collector.fspath == p"),
("pytest_make_collect_report", "collector.fspath == p"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.result[0].name == 'test_func'"),
]
)
# ensure we are reporting the collection of the single test item (#2464)
assert [x.name for x in self.get_reported_items(hookrec)] == ["test_func"]
def test_collect_protocol_method(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
class TestClass(object):
def test_method(self):
pass
"""
)
normid = p.name + "::TestClass::test_method"
for id in [p.name, p.name + "::TestClass", normid]:
items, hookrec = pytester.inline_genitems(id)
assert len(items) == 1
assert items[0].name == "test_method"
newid = items[0].nodeid
assert newid == normid
# ensure we are reporting the collection of the single test item (#2464)
assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
def test_collect_custom_nodes_multi_id(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
pytester.makeconftest(
"""
import pytest
class SpecialItem(pytest.Item):
def runtest(self):
return # ok
class SpecialFile(pytest.File):
def collect(self):
return [SpecialItem.from_parent(name="check", parent=self)]
def pytest_collect_file(path, parent):
if path.basename == %r:
return SpecialFile.from_parent(fspath=path, parent=parent)
"""
% p.name
)
id = p.name
items, hookrec = pytester.inline_genitems(id)
pprint.pprint(hookrec.calls)
assert len(items) == 2
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == collector.session.fspath"),
(
"pytest_collectstart",
"collector.__class__.__name__ == 'SpecialFile'",
),
("pytest_collectstart", "collector.__class__.__name__ == 'Module'"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith(p.name)"),
]
)
assert len(self.get_reported_items(hookrec)) == 2
def test_collect_subdir_event_ordering(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
aaa = pytester.mkpydir("aaa")
test_aaa = aaa.joinpath("test_aaa.py")
p.replace(test_aaa)
items, hookrec = pytester.inline_genitems()
assert len(items) == 1
pprint.pprint(hookrec.calls)
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith('aaa/test_aaa.py')"),
]
)
def test_collect_two_commandline_args(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
aaa = pytester.mkpydir("aaa")
bbb = pytester.mkpydir("bbb")
test_aaa = aaa.joinpath("test_aaa.py")
shutil.copy(p, test_aaa)
test_bbb = bbb.joinpath("test_bbb.py")
p.replace(test_bbb)
id = "."
items, hookrec = pytester.inline_genitems(id)
assert len(items) == 2
pprint.pprint(hookrec.calls)
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'aaa/test_aaa.py'"),
("pytest_collectstart", "collector.fspath == test_bbb"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'bbb/test_bbb.py'"),
]
)
def test_serialization_byid(self, pytester: Pytester) -> None:
pytester.makepyfile("def test_func(): pass")
items, hookrec = pytester.inline_genitems()
assert len(items) == 1
(item,) = items
items2, hookrec = pytester.inline_genitems(item.nodeid)
(item2,) = items2
assert item2.name == item.name
assert item2.fspath == item.fspath
def test_find_byid_without_instance_parents(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
class TestClass(object):
def test_method(self):
pass
"""
)
arg = p.name + "::TestClass::test_method"
items, hookrec = pytester.inline_genitems(arg)
assert len(items) == 1
(item,) = items
assert item.nodeid.endswith("TestClass::test_method")
# ensure we are reporting the collection of the single test item (#2464)
assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
class Test_getinitialnodes:
def test_global_file(self, pytester: Pytester) -> None:
tmpdir = pytester.path
x = ensure_file(tmpdir / "x.py")
with tmpdir.cwd():
config = pytester.parseconfigure(x)
col = pytester.getnode(config, x)
assert isinstance(col, pytest.Module)
assert col.name == "x.py"
assert col.parent is not None
assert col.parent.parent is None
for parent in col.listchain():
assert parent.config is config
def test_pkgfile(self, pytester: Pytester) -> None:
"""Verify nesting when a module is within a package.
The parent chain should match: Module<x.py> -> Package<subdir> -> Session.
Session's parent should always be None.
"""
tmpdir = pytester.path
subdir = tmpdir.joinpath("subdir")
x = ensure_file(subdir / "x.py")
ensure_file(subdir / "__init__.py")
with subdir.cwd():
config = pytester.parseconfigure(x)
col = pytester.getnode(config, x)
assert col is not None
assert col.name == "x.py"
assert isinstance(col, pytest.Module)
assert isinstance(col.parent, pytest.Package)
assert isinstance(col.parent.parent, pytest.Session)
# session is batman (has no parents)
assert col.parent.parent.parent is None
for parent in col.listchain():
assert parent.config is config
class Test_genitems:
def test_check_collect_hashes(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
def test_1():
pass
def test_2():
pass
"""
)
shutil.copy(p, p.parent / (p.stem + "2" + ".py"))
items, reprec = pytester.inline_genitems(p.parent)
assert len(items) == 4
for numi, i in enumerate(items):
for numj, j in enumerate(items):
if numj != numi:
assert hash(i) != hash(j)
assert i != j
def test_example_items1(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
import pytest
def testone():
pass
class TestX(object):
def testmethod_one(self):
pass
class TestY(TestX):
@pytest.mark.parametrize("arg0", [".["])
def testmethod_two(self, arg0):
pass
"""
)
items, reprec = pytester.inline_genitems(p)
assert len(items) == 4
assert items[0].name == "testone"
assert items[1].name == "testmethod_one"
assert items[2].name == "testmethod_one"
assert items[3].name == "testmethod_two[.[]"
# let's also test getmodpath here
assert items[0].getmodpath() == "testone" # type: ignore[attr-defined]
assert items[1].getmodpath() == "TestX.testmethod_one" # type: ignore[attr-defined]
assert items[2].getmodpath() == "TestY.testmethod_one" # type: ignore[attr-defined]
# PR #6202: Fix incorrect result of getmodpath method. (Resolves issue #6189)
assert items[3].getmodpath() == "TestY.testmethod_two[.[]" # type: ignore[attr-defined]
s = items[0].getmodpath(stopatmodule=False) # type: ignore[attr-defined]
assert s.endswith("test_example_items1.testone")
print(s)
def test_class_and_functions_discovery_using_glob(self, pytester: Pytester) -> None:
"""Test that Python_classes and Python_functions config options work
as prefixes and glob-like patterns (#600)."""
pytester.makeini(
"""
[pytest]
python_classes = *Suite Test
python_functions = *_test test
"""
)
p = pytester.makepyfile(
"""
class MyTestSuite(object):
def x_test(self):
pass
class TestCase(object):
def test_y(self):
pass
"""
)
items, reprec = pytester.inline_genitems(p)
ids = [x.getmodpath() for x in items] # type: ignore[attr-defined]
assert ids == ["MyTestSuite.x_test", "TestCase.test_y"]
def test_matchnodes_two_collections_same_file(pytester: Pytester) -> None:
pytester.makeconftest(
"""
import pytest
def pytest_configure(config):
config.pluginmanager.register(Plugin2())
class Plugin2(object):
def pytest_collect_file(self, path, parent):
if path.ext == ".abc":
return MyFile2.from_parent(fspath=path, parent=parent)
def pytest_collect_file(path, parent):
if path.ext == ".abc":
return MyFile1.from_parent(fspath=path, parent=parent)
class MyFile1(pytest.File):
def collect(self):
yield Item1.from_parent(name="item1", parent=self)
class MyFile2(pytest.File):
def collect(self):
yield Item2.from_parent(name="item2", parent=self)
class Item1(pytest.Item):
def runtest(self):
pass
class Item2(pytest.Item):
def runtest(self):
pass
"""
)
p = pytester.makefile(".abc", "")
result = pytester.runpytest()
assert result.ret == 0
result.stdout.fnmatch_lines(["*2 passed*"])
res = pytester.runpytest("%s::item2" % p.name)
res.stdout.fnmatch_lines(["*1 passed*"])
class TestNodekeywords:
def test_no_under(self, pytester: Pytester) -> None:
modcol = pytester.getmodulecol(
"""
def test_pass(): pass
def test_fail(): assert 0
"""
)
values = list(modcol.keywords)
assert modcol.name in values
for x in values:
assert not x.startswith("_")
assert modcol.name in repr(modcol.keywords)
def test_issue345(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
def test_should_not_be_selected():
assert False, 'I should not have been selected to run'
def test___repr__():
pass
"""
)
reprec = pytester.inline_run("-k repr")
reprec.assertoutcome(passed=1, failed=0)
def test_keyword_matching_is_case_insensitive_by_default(
self, pytester: Pytester
) -> None:
"""Check that selection via -k EXPRESSION is case-insensitive.
Since markers are also added to the node keywords, they too can
be matched without having to think about case sensitivity.
"""
pytester.makepyfile(
"""
import pytest
def test_sPeCiFiCToPiC_1():
assert True
class TestSpecificTopic_2:
def test(self):
assert True
@pytest.mark.sPeCiFiCToPic_3
def test():
assert True
@pytest.mark.sPeCiFiCToPic_4
class Test:
def test(self):
assert True
def test_failing_5():
assert False, "This should not match"
"""
)
num_matching_tests = 4
for expression in ("specifictopic", "SPECIFICTOPIC", "SpecificTopic"):
reprec = pytester.inline_run("-k " + expression)
reprec.assertoutcome(passed=num_matching_tests, failed=0)
COLLECTION_ERROR_PY_FILES = dict(
test_01_failure="""
def test_1():
assert False
""",
test_02_import_error="""
import asdfasdfasdf
def test_2():
assert True
""",
test_03_import_error="""
import asdfasdfasdf
def test_3():
assert True
""",
test_04_success="""
def test_4():
assert True
""",
)
def test_exit_on_collection_error(pytester: Pytester) -> None:
"""Verify that all collection errors are collected and no tests executed"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest()
assert res.ret == 2
res.stdout.fnmatch_lines(
[
"collected 2 items / 2 errors",
"*ERROR collecting test_02_import_error.py*",
"*No module named *asdfa*",
"*ERROR collecting test_03_import_error.py*",
"*No module named *asdfa*",
]
)
def test_exit_on_collection_with_maxfail_smaller_than_n_errors(
pytester: Pytester,
) -> None:
"""
Verify collection is aborted once maxfail errors are encountered ignoring
further modules which would cause more collection errors.
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--maxfail=1")
assert res.ret == 1
res.stdout.fnmatch_lines(
[
"collected 1 item / 1 error",
"*ERROR collecting test_02_import_error.py*",
"*No module named *asdfa*",
"*! stopping after 1 failures !*",
"*= 1 error in *",
]
)
res.stdout.no_fnmatch_line("*test_03*")
def test_exit_on_collection_with_maxfail_bigger_than_n_errors(
pytester: Pytester,
) -> None:
"""
Verify the test run aborts due to collection errors even if maxfail count of
errors was not reached.
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--maxfail=4")
assert res.ret == 2
res.stdout.fnmatch_lines(
[
"collected 2 items / 2 errors",
"*ERROR collecting test_02_import_error.py*",
"*No module named *asdfa*",
"*ERROR collecting test_03_import_error.py*",
"*No module named *asdfa*",
"*! Interrupted: 2 errors during collection !*",
"*= 2 errors in *",
]
)
def test_continue_on_collection_errors(pytester: Pytester) -> None:
"""
Verify tests are executed even when collection errors occur when the
--continue-on-collection-errors flag is set
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--continue-on-collection-errors")
assert res.ret == 1
res.stdout.fnmatch_lines(
["collected 2 items / 2 errors", "*1 failed, 1 passed, 2 errors*"]
)
def test_continue_on_collection_errors_maxfail(pytester: Pytester) -> None:
"""
Verify tests are executed even when collection errors occur and that maxfail
is honoured (including the collection error count).
4 tests: 2 collection errors + 1 failure + 1 success
test_4 is never executed because the test run is with --maxfail=3 which
means it is interrupted after the 2 collection errors + 1 failure.
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--continue-on-collection-errors", "--maxfail=3")
assert res.ret == 1
res.stdout.fnmatch_lines(["collected 2 items / 2 errors", "*1 failed, 2 errors*"])
def test_fixture_scope_sibling_conftests(pytester: Pytester) -> None:
"""Regression test case for https://github.com/pytest-dev/pytest/issues/2836"""
foo_path = pytester.mkdir("foo")
foo_path.joinpath("conftest.py").write_text(
textwrap.dedent(
"""\
import pytest
@pytest.fixture
def fix():
return 1
"""
)
)
foo_path.joinpath("test_foo.py").write_text("def test_foo(fix): assert fix == 1")
# Tests in `food/` should not see the conftest fixture from `foo/`
food_path = pytester.mkpydir("food")
food_path.joinpath("test_food.py").write_text("def test_food(fix): assert fix == 1")
res = pytester.runpytest()
assert res.ret == 1
res.stdout.fnmatch_lines(
[
"*ERROR at setup of test_food*",
"E*fixture 'fix' not found",
"*1 passed, 1 error*",
]
)
def test_collect_init_tests(pytester: Pytester) -> None:
"""Check that we collect files from __init__.py files when they patch the 'python_files' (#3773)"""
p = pytester.copy_example("collect/collect_init_tests")
result = pytester.runpytest(p, "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
result = pytester.runpytest("./tests", "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
# Ignores duplicates with "." and pkginit (#4310).
result = pytester.runpytest("./tests", ".", "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
# Same as before, but different order.
result = pytester.runpytest(".", "tests", "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
result = pytester.runpytest("./tests/test_foo.py", "--collect-only")
result.stdout.fnmatch_lines(
["<Package tests>", " <Module test_foo.py>", " <Function test_foo>"]
)
result.stdout.no_fnmatch_line("*test_init*")
result = pytester.runpytest("./tests/__init__.py", "--collect-only")
result.stdout.fnmatch_lines(
["<Package tests>", " <Module __init__.py>", " <Function test_init>"]
)
result.stdout.no_fnmatch_line("*test_foo*")
def test_collect_invalid_signature_message(pytester: Pytester) -> None:
"""Check that we issue a proper message when we can't determine the signature of a test
function (#4026).
"""
pytester.makepyfile(
"""
import pytest
class TestCase:
@pytest.fixture
def fix():
pass
"""
)
result = pytester.runpytest()
result.stdout.fnmatch_lines(
["Could not determine arguments of *.fix *: invalid method signature"]
)
def test_collect_handles_raising_on_dunder_class(pytester: Pytester) -> None:
"""Handle proxy classes like Django's LazySettings that might raise on
``isinstance`` (#4266).
"""
pytester.makepyfile(
"""
class ImproperlyConfigured(Exception):
pass
class RaisesOnGetAttr(object):
def raises(self):
raise ImproperlyConfigured
__class__ = property(raises)
raises = RaisesOnGetAttr()
def test_1():
pass
"""
)
result = pytester.runpytest()
result.stdout.fnmatch_lines(["*1 passed in*"])
assert result.ret == 0
def test_collect_with_chdir_during_import(pytester: Pytester) -> None:
subdir = pytester.mkdir("sub")
pytester.path.joinpath("conftest.py").write_text(
textwrap.dedent(
"""
import os
os.chdir(%r)
"""
% (str(subdir),)
)
)
pytester.makepyfile(
"""
def test_1():
import os
assert os.getcwd() == %r
"""
% (str(subdir),)
)
with pytester.path.cwd():
result = pytester.runpytest()
result.stdout.fnmatch_lines(["*1 passed in*"])
assert result.ret == 0
# Handles relative testpaths.
pytester.makeini(
"""
[pytest]
testpaths = .
"""
)
with pytester.path.cwd():
result = pytester.runpytest("--collect-only")
result.stdout.fnmatch_lines(["collected 1 item"])
def test_collect_pyargs_with_testpaths(
pytester: Pytester, monkeypatch: MonkeyPatch
) -> None:
testmod = pytester.mkdir("testmod")
# NOTE: __init__.py is not collected since it does not match python_files.
testmod.joinpath("__init__.py").write_text("def test_func(): pass")
testmod.joinpath("test_file.py").write_text("def test_func(): pass")
root = pytester.mkdir("root")
root.joinpath("pytest.ini").write_text(
textwrap.dedent(
"""
[pytest]
addopts = --pyargs
testpaths = testmod
"""
)
)
monkeypatch.setenv("PYTHONPATH", str(pytester.path), prepend=os.pathsep)
with root.cwd():
result = pytester.runpytest_subprocess()
result.stdout.fnmatch_lines(["*1 passed in*"])
def test_collect_symlink_file_arg(pytester: Pytester) -> None:
"""Collect a direct symlink works even if it does not match python_files (#4325)."""
real = pytester.makepyfile(
real="""
def test_nodeid(request):
assert request.node.nodeid == "symlink.py::test_nodeid"
"""
)
symlink = pytester.path.joinpath("symlink.py")
symlink_or_skip(real, symlink)
result = pytester.runpytest("-v", symlink)
result.stdout.fnmatch_lines(["symlink.py::test_nodeid PASSED*", "*1 passed in*"])
assert result.ret == 0
def test_collect_symlink_out_of_tree(pytester: Pytester) -> None:
"""Test collection of symlink via out-of-tree rootdir."""
sub = pytester.mkdir("sub")
real = sub.joinpath("test_real.py")
real.write_text(
textwrap.dedent(
"""
def test_nodeid(request):
# Should not contain sub/ prefix.
assert request.node.nodeid == "test_real.py::test_nodeid"
"""
),
)
out_of_tree = pytester.mkdir("out_of_tree")
symlink_to_sub = out_of_tree.joinpath("symlink_to_sub")
symlink_or_skip(sub, symlink_to_sub)
os.chdir(sub)
result = pytester.runpytest("-vs", "--rootdir=%s" % sub, symlink_to_sub)
result.stdout.fnmatch_lines(
[
# Should not contain "sub/"!
"test_real.py::test_nodeid PASSED"
]
)
assert result.ret == 0
def test_collect_symlink_dir(pytester: Pytester) -> None:
"""A symlinked directory is collected."""
dir = pytester.mkdir("dir")
dir.joinpath("test_it.py").write_text("def test_it(): pass", "utf-8")
pytester.path.joinpath("symlink_dir").symlink_to(dir)
result = pytester.runpytest()
result.assert_outcomes(passed=2)
def test_collectignore_via_conftest(pytester: Pytester) -> None:
"""collect_ignore in parent conftest skips importing child (issue #4592)."""
tests = pytester.mkpydir("tests")
tests.joinpath("conftest.py").write_text("collect_ignore = ['ignore_me']")
ignore_me = tests.joinpath("ignore_me")
ignore_me.mkdir()
ignore_me.joinpath("__init__.py").touch()
ignore_me.joinpath("conftest.py").write_text("assert 0, 'should_not_be_called'")
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
def test_collect_pkg_init_and_file_in_args(pytester: Pytester) -> None:
subdir = pytester.mkdir("sub")
init = subdir.joinpath("__init__.py")
init.write_text("def test_init(): pass")
p = subdir.joinpath("test_file.py")
p.write_text("def test_file(): pass")
# NOTE: without "-o python_files=*.py" this collects test_file.py twice.
# This changed/broke with "Add package scoped fixtures #2283" (2b1410895)
# initially (causing a RecursionError).
result = pytester.runpytest("-v", str(init), str(p))
result.stdout.fnmatch_lines(
[
"sub/test_file.py::test_file PASSED*",
"sub/test_file.py::test_file PASSED*",
"*2 passed in*",
]
)
result = pytester.runpytest("-v", "-o", "python_files=*.py", str(init), str(p))
result.stdout.fnmatch_lines(
[
"sub/__init__.py::test_init PASSED*",
"sub/test_file.py::test_file PASSED*",
"*2 passed in*",
]
)
def test_collect_pkg_init_only(pytester: Pytester) -> None:
subdir = pytester.mkdir("sub")
init = subdir.joinpath("__init__.py")
init.write_text("def test_init(): pass")
result = pytester.runpytest(str(init))
result.stdout.fnmatch_lines(["*no tests ran in*"])
result = pytester.runpytest("-v", "-o", "python_files=*.py", str(init))
result.stdout.fnmatch_lines(["sub/__init__.py::test_init PASSED*", "*1 passed in*"])
@pytest.mark.parametrize("use_pkg", (True, False))
def test_collect_sub_with_symlinks(use_pkg: bool, pytester: Pytester) -> None:
"""Collection works with symlinked files and broken symlinks"""
sub = pytester.mkdir("sub")
if use_pkg:
sub.joinpath("__init__.py").touch()
sub.joinpath("test_file.py").write_text("def test_file(): pass")
# Create a broken symlink.
symlink_or_skip("test_doesnotexist.py", sub.joinpath("test_broken.py"))
# Symlink that gets collected.
symlink_or_skip("test_file.py", sub.joinpath("test_symlink.py"))
result = pytester.runpytest("-v", str(sub))
result.stdout.fnmatch_lines(
[
"sub/test_file.py::test_file PASSED*",
"sub/test_symlink.py::test_file PASSED*",
"*2 passed in*",
]
)
def test_collector_respects_tbstyle(pytester: Pytester) -> None:
p1 = pytester.makepyfile("assert 0")
result = pytester.runpytest(p1, "--tb=native")
assert result.ret == ExitCode.INTERRUPTED
result.stdout.fnmatch_lines(
[
"*_ ERROR collecting test_collector_respects_tbstyle.py _*",
"Traceback (most recent call last):",
' File "*/test_collector_respects_tbstyle.py", line 1, in <module>',
" assert 0",
"AssertionError: assert 0",
"*! Interrupted: 1 error during collection !*",
"*= 1 error in *",
]
)
def test_does_not_eagerly_collect_packages(pytester: Pytester) -> None:
pytester.makepyfile("def test(): pass")
pydir = pytester.mkpydir("foopkg")
pydir.joinpath("__init__.py").write_text("assert False")
result = pytester.runpytest()
assert result.ret == ExitCode.OK
def test_does_not_put_src_on_path(pytester: Pytester) -> None:
# `src` is not on sys.path so it should not be importable
ensure_file(pytester.path / "src/nope/__init__.py")
pytester.makepyfile(
"import pytest\n"
"def test():\n"
" with pytest.raises(ImportError):\n"
" import nope\n"
)
result = pytester.runpytest()
assert result.ret == ExitCode.OK
def test_fscollector_from_parent(pytester: Pytester, request: FixtureRequest) -> None:
"""Ensure File.from_parent can forward custom arguments to the constructor.
Context: https://github.com/pytest-dev/pytest-cpp/pull/47
"""
class MyCollector(pytest.File):
def __init__(self, fspath, parent, x):
super().__init__(fspath, parent)
self.x = x
@classmethod
def from_parent(cls, parent, *, fspath, x):
return super().from_parent(parent=parent, fspath=fspath, x=x)
collector = MyCollector.from_parent(
parent=request.session, fspath=py.path.local(pytester.path) / "foo", x=10
)
assert collector.x == 10
class TestImportModeImportlib:
def test_collect_duplicate_names(self, pytester: Pytester) -> None:
"""--import-mode=importlib can import modules with same names that are not in packages."""
pytester.makepyfile(
**{
"tests_a/test_foo.py": "def test_foo1(): pass",
"tests_b/test_foo.py": "def test_foo2(): pass",
}
)
result = pytester.runpytest("-v", "--import-mode=importlib")
result.stdout.fnmatch_lines(
[
"tests_a/test_foo.py::test_foo1 *",
"tests_b/test_foo.py::test_foo2 *",
"* 2 passed in *",
]
)
def test_conftest(self, pytester: Pytester) -> None:
"""Directory containing conftest modules are not put in sys.path as a side-effect of
importing them."""
tests_dir = pytester.path.joinpath("tests")
pytester.makepyfile(
**{
"tests/conftest.py": "",
"tests/test_foo.py": """
import sys
def test_check():
assert r"{tests_dir}" not in sys.path
""".format(
tests_dir=tests_dir
),
}
)
result = pytester.runpytest("-v", "--import-mode=importlib")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def setup_conftest_and_foo(self, pytester: Pytester) -> None:
"""Setup a tests folder to be used to test if modules in that folder can be imported
due to side-effects of --import-mode or not."""
pytester.makepyfile(
**{
"tests/conftest.py": "",
"tests/foo.py": """
def foo(): return 42
""",
"tests/test_foo.py": """
def test_check():
from foo import foo
assert foo() == 42
""",
}
)
def test_modules_importable_as_side_effect(self, pytester: Pytester) -> None:
"""In import-modes `prepend` and `append`, we are able to import modules from folders
containing conftest.py files due to the side effect of changing sys.path."""
self.setup_conftest_and_foo(pytester)
result = pytester.runpytest("-v", "--import-mode=prepend")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def test_modules_not_importable_as_side_effect(self, pytester: Pytester) -> None:
"""In import-mode `importlib`, modules in folders containing conftest.py are not
importable, as don't change sys.path or sys.modules as side effect of importing
the conftest.py file.
"""
self.setup_conftest_and_foo(pytester)
result = pytester.runpytest("-v", "--import-mode=importlib")
result.stdout.fnmatch_lines(
[
"*ModuleNotFoundError: No module named 'foo'",
"tests?test_foo.py:2: ModuleNotFoundError",
"* 1 failed in *",
]
)
def test_does_not_crash_on_error_from_decorated_function(pytester: Pytester) -> None:
"""Regression test for an issue around bad exception formatting due to
assertion rewriting mangling lineno's (#4984)."""
pytester.makepyfile(
"""
@pytest.fixture
def a(): return 4
"""
)
result = pytester.runpytest()
# Not INTERNAL_ERROR
assert result.ret == ExitCode.INTERRUPTED
def test_does_not_crash_on_recursive_symlink(pytester: Pytester) -> None:
"""Regression test for an issue around recursive symlinks (#7951)."""
symlink_or_skip("recursive", pytester.path.joinpath("recursive"))
pytester.makepyfile(
"""
def test_foo(): assert True
"""
)
result = pytester.runpytest()
assert result.ret == ExitCode.OK
assert result.parseoutcomes() == {"passed": 1}
| 35.240109
| 103
| 0.587647
|
58939687baa699d22e9db901f107d26f9c724a04
| 5,923
|
py
|
Python
|
lazydata/storage/local.py
|
zbitouzakaria/lazydata
|
a568142a5878d5cf42165e0e8a420d8f738af846
|
[
"Apache-2.0"
] | null | null | null |
lazydata/storage/local.py
|
zbitouzakaria/lazydata
|
a568142a5878d5cf42165e0e8a420d8f738af846
|
[
"Apache-2.0"
] | null | null | null |
lazydata/storage/local.py
|
zbitouzakaria/lazydata
|
a568142a5878d5cf42165e0e8a420d8f738af846
|
[
"Apache-2.0"
] | null | null | null |
"""
An abstraction layer for the local cache
"""
from pathlib import Path, PurePosixPath
import yaml
import os
import stat
from peewee import SqliteDatabase, Model, CharField, IntegerField
from lazydata.storage.hash import calculate_file_sha256
import shutil
BASE_PATH = Path(Path.home().resolve(), ".lazydata")
METADB_PATH = Path(BASE_PATH, "metadb.sqlite3")
db = SqliteDatabase(str(METADB_PATH))
class LocalStorage:
"""
An abstraction layer for the local cache
This class always re-reads all the config files, making sure we have the current latest version.
"""
def __init__(self):
"""
Initialise the object and make sure the ~/.lazydata directory exists
"""
# base path where all the data and metadata is stored
self.base_path = BASE_PATH
self.config_path = Path(self.base_path, "config.yml")
self.data_path = Path(self.base_path, "data")
self.metadb_path = METADB_PATH
# make sure base path exists
if not self.base_path.exists():
self.base_path.mkdir()
# write a stub config file
with open(str(self.config_path), "w") as fp:
fp.write("version: 1\n")
# make sure the datafile store exists
if not self.data_path.exists():
self.data_path.mkdir()
# Load in the config file
with open(str(self.config_path)) as fp:
self.config = yaml.load(fp)
self.metadb = db
# open a connection to the sqlite database with file metadata
if not Path(self.metadb_path).exists():
# Make sure the DB exists
self.metadb.connect()
self.metadb.create_tables([DataFile])
elif self.metadb.is_closed():
self.metadb.connect()
def hash_to_file(self, sha256:str) -> Path:
"""Get the data storage path to a file with this hash
:param sha256:
:return: Path to the stored file
"""
return Path(self.data_path, sha256[:2], sha256[2:])
def hash_to_remote_path(self, sha256:str) -> PurePosixPath:
"""Get the remote path (in posix format)
:param sha256:
:return: Path to the stored file
"""
return PurePosixPath("data", sha256[:2], sha256[2:])
def store_file(self, path:str):
"""
Store a file in the local backend.
:ivar path: The path to the file to store
:return:
"""
stat = os.stat(path)
abspath = Path(path).resolve()
sha256 = calculate_file_sha256(path)
# see if we stored this file already
datapath = self.hash_to_file(sha256)
# copy over the the cache,
# TODO: option to hardlink
datapath.parent.mkdir(parents=True, exist_ok=True)
shutil.copyfile(str(abspath), str(datapath))
# Store in the metadata DB if doesn't exist already
existing_entries = DataFile.select().where(
(
(DataFile.abspath == abspath) &
(DataFile.sha256 == sha256) &
(DataFile.mtime == stat.st_mtime) &
(DataFile.size == stat.st_size)
)
)
if existing_entries.count() == 0:
DataFile.create(abspath=abspath, sha256=sha256, mtime=stat.st_mtime, size=stat.st_size)
def get_file_sha256(self, path:str) -> list:
"""
Checks if the file has a stored sha256 value
:param path:
:return: A list of sha256 strings
"""
stat = os.stat(path)
abspath = Path(path).resolve()
existing_entries = DataFile.select().where(
(
(DataFile.abspath == abspath) &
(DataFile.mtime == stat.st_mtime) &
(DataFile.size == stat.st_size)
)
)
sha256 = [e.sha256 for e in existing_entries]
return sha256
def copy_file_to(self, sha256:str, path:str) -> bool:
"""
Copy the file from local cache to user's local copy.
If the file is not available locally it will return False, otherwise return True if successful
:param config: The project config used to get the remote if the files needs downloading
:param sha256: The sha256 of the file we need
:param path: The path where it should be copied
:return: Returns
"""
cached_path = self.hash_to_file(sha256)
path_obj = Path(path)
if cached_path.exists():
if path_obj.exists():
if is_same_hard_link(str(cached_path), path):
# nothing to do, already linked
return True
else:
# delete the old file as we'll need to overwrite it
path_obj.unlink()
else:
# we might need to make some directories to pull the file...
path_obj.parent.mkdir(parents=True, exist_ok=True)
os.link(str(cached_path), path)
return True
else:
return False
def is_same_hard_link(filename:str, other:str):
s1 = os.stat(filename)
s2 = os.stat(other)
return (s1[stat.ST_INO], s1[stat.ST_DEV]) == \
(s2[stat.ST_INO], s2[stat.ST_DEV])
# MetaDB tables
class BaseModel(Model):
class Meta:
database = db
class DataFile(BaseModel):
"""
The model stores the mtime and size of the original file(s).
This makes it easy to quickly check if the file has changed when calling use()
:ivar fullpath: The full absolute path to the file when it was used
:ivar sha256: The SHA256 value
:ivar mtime: The original mtime value
:ivar size: The size of the file
"""
abspath = CharField(index=True)
sha256 = CharField(max_length=70, index=True)
mtime = IntegerField(index=True)
size = IntegerField(index=True)
| 29.034314
| 102
| 0.598008
|
ae93416d4c7a94038b1eac4518705560e5460f5f
| 89
|
py
|
Python
|
coding/coin_change/starter.py
|
alexanderywang/tech-interview-questions
|
a0098eda33db73d10787e198d1f105532ea32c09
|
[
"MIT"
] | null | null | null |
coding/coin_change/starter.py
|
alexanderywang/tech-interview-questions
|
a0098eda33db73d10787e198d1f105532ea32c09
|
[
"MIT"
] | null | null | null |
coding/coin_change/starter.py
|
alexanderywang/tech-interview-questions
|
a0098eda33db73d10787e198d1f105532ea32c09
|
[
"MIT"
] | null | null | null |
class Solution:
def coinChange(self, coins: List[int], amount: int) -> int:
| 22.25
| 63
| 0.606742
|
819ae0fb90247aa85be043fe2b3fb0f6c5789713
| 1,873
|
py
|
Python
|
setup.py
|
coleifer/unqlite-python
|
b0d121b20a0c11e23e65eca559eade711de3b671
|
[
"MIT"
] | 324
|
2015-02-08T12:11:39.000Z
|
2022-03-30T18:43:15.000Z
|
setup.py
|
coleifer/unqlite-python
|
b0d121b20a0c11e23e65eca559eade711de3b671
|
[
"MIT"
] | 54
|
2015-02-09T16:07:08.000Z
|
2022-02-10T14:54:53.000Z
|
setup.py
|
coleifer/unqlite-python
|
b0d121b20a0c11e23e65eca559eade711de3b671
|
[
"MIT"
] | 37
|
2015-02-08T10:50:11.000Z
|
2021-12-01T13:55:26.000Z
|
import glob
import warnings
from setuptools import setup
from setuptools.extension import Extension
try:
from Cython.Build import cythonize
except ImportError:
cython_installed = False
warnings.warn('Cython not installed, using pre-generated C source file.')
else:
cython_installed = True
if cython_installed:
python_source = 'unqlite.pyx'
else:
python_source = 'unqlite.c'
cythonize = lambda obj: obj
library_source = ['src/unqlite.c']
unqlite_extension = Extension(
'unqlite',
sources=[python_source] + library_source)
setup(
name='unqlite',
version='0.9.2',
description='Fast Python bindings for the UnQLite embedded NoSQL database.',
author='Charles Leifer',
author_email='',
url='https://github.com/coleifer/unqlite-python',
license='MIT',
install_requires=['Cython'],
setup_requires=['cython'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: C',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Database',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Embedded Systems',
'Topic :: Software Development :: Libraries :: Python Modules'],
ext_modules=cythonize([unqlite_extension])
)
| 32.293103
| 80
| 0.64976
|
65e6626b2df5419e54f9ca25265508c3961541ad
| 2,093
|
py
|
Python
|
backend/api/pdfreader.py
|
asa008/nhyai
|
d97f8a4d18e315dcbd1159c2ed03611adb76ba70
|
[
"Apache-2.0"
] | 3
|
2021-02-12T11:49:50.000Z
|
2022-03-07T02:05:54.000Z
|
backend/api/pdfreader.py
|
ettingshausen/nhyai
|
33be2078cf2835d85fedc901d343568e79a5941f
|
[
"Apache-2.0"
] | null | null | null |
backend/api/pdfreader.py
|
ettingshausen/nhyai
|
33be2078cf2835d85fedc901d343568e79a5941f
|
[
"Apache-2.0"
] | 3
|
2020-04-24T05:39:30.000Z
|
2021-08-16T03:23:29.000Z
|
from pdfminer.layout import LTTextBoxHorizontal, LAParams
from pdfminer.pdfparser import PDFParser, PDFDocument
from pdfminer.pdfinterp import PDFTextExtractionNotAllowed
from pdfminer.converter import PDFPageAggregator
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
import os.path
import pyocr
import importlib
import sys
import time
from tempfile import NamedTemporaryFile
importlib.reload(sys)
class PdfReader:
def parse(self, text_path):
'''解析PDF文本,并保存到TXT文件中'''
fp = open(text_path, 'rb')
# 用文件对象创建一个PDF文档分析器
parser = PDFParser(fp)
# 创建一个PDF文档
doc = PDFDocument()
# 连接分析器,与文档对象
parser.set_document(doc)
doc.set_parser(parser)
# 提供初始化密码,如果没有密码,就创建一个空的字符串
doc.initialize()
# 检测文档是否提供txt转换,不提供就忽略
if not doc.is_extractable:
raise PDFTextExtractionNotAllowed
else:
# 创建PDF,资源管理器,来共享资源
rsrcmgr = PDFResourceManager()
# 创建一个PDF设备对象
laparams = LAParams()
device = PDFPageAggregator(rsrcmgr, laparams=laparams)
# 创建一个PDF解释其对象
interpreter = PDFPageInterpreter(rsrcmgr, device)
# 循环遍历列表,每次处理一个page内容
# doc.get_pages() 获取page列表
results = ''
for page in doc.get_pages():
interpreter.process_page(page)
# 接受该页面的LTPage对象
layout = device.get_result()
# 这里layout是一个LTPage对象 里面存放着 这个page解析出的各种对象
# 一般包括LTTextBox, LTFigure, LTImage, LTTextBoxHorizontal 等等
# 想要获取文本就获得对象的text属性
for x in layout:
if(isinstance(x, LTTextBoxHorizontal)):
result = x.get_text()
results += result + "\n"
return results
if __name__ == '__main__':
time1 = time.time()
text_path = 'D:\\var\\www\\gallery\\media\\text\\ssss.pdf'
result = PdfReader().parse(text_path)
print (result)
time2 = time.time()
print("总共消耗时间为:", time2-time1)
| 34.883333
| 74
| 0.616818
|
17bdd4616927c288e3d484d2239de7b828d67fbb
| 19,187
|
py
|
Python
|
pywikibot/userinterfaces/terminal_interface_base.py
|
valhallasw/pywikibot-core
|
32a8c3c1298a5cb077381fe202daefde82c1c5d3
|
[
"MIT"
] | null | null | null |
pywikibot/userinterfaces/terminal_interface_base.py
|
valhallasw/pywikibot-core
|
32a8c3c1298a5cb077381fe202daefde82c1c5d3
|
[
"MIT"
] | null | null | null |
pywikibot/userinterfaces/terminal_interface_base.py
|
valhallasw/pywikibot-core
|
32a8c3c1298a5cb077381fe202daefde82c1c5d3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Base for terminal user interfaces."""
#
# (C) Pywikibot team, 2003-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id: 17bdd4616927c288e3d484d2239de7b828d67fbb $'
import getpass
import logging
import math
import re
import sys
from . import transliteration
import pywikibot
from pywikibot import config
from pywikibot.bot import VERBOSE, INFO, STDOUT, INPUT, WARNING
from pywikibot.tools import deprecated, PY2
from pywikibot.bot_choice import (
Option, OutputOption, StandardOption, ChoiceException, QuitKeyboardInterrupt,
)
transliterator = transliteration.transliterator(config.console_encoding)
colors = [
'default',
'black',
'blue',
'green',
'aqua',
'red',
'purple',
'yellow',
'lightgray',
'gray',
'lightblue',
'lightgreen',
'lightaqua',
'lightred',
'lightpurple',
'lightyellow',
'white',
]
colorTagR = re.compile('\03{(?P<name>%s)}' % '|'.join(colors))
class UI:
"""Base for terminal user interfaces."""
def __init__(self):
"""
Initialize the UI.
This caches the std-streams locally so any attempts to monkey-patch the
streams later will not work.
"""
self.stdin = sys.stdin
self.stdout = sys.stdout
self.stderr = sys.stderr
self.argv = sys.argv
self.encoding = config.console_encoding
self.transliteration_target = config.transliteration_target
self.stderr = sys.stderr
self.stdout = sys.stdout
def init_handlers(self, root_logger, default_stream='stderr'):
"""Initialize the handlers for user output.
This method initializes handler(s) for output levels VERBOSE (if
enabled by config.verbose_output), INFO, STDOUT, WARNING, ERROR,
and CRITICAL. STDOUT writes its output to sys.stdout; all the
others write theirs to sys.stderr.
"""
if default_stream == 'stdout':
default_stream = self.stdout
elif default_stream == 'stderr':
default_stream = self.stderr
# default handler for display to terminal
default_handler = TerminalHandler(self, strm=default_stream)
if config.verbose_output:
default_handler.setLevel(VERBOSE)
else:
default_handler.setLevel(INFO)
# this handler ignores levels above INPUT
default_handler.addFilter(MaxLevelFilter(INPUT))
default_handler.setFormatter(
TerminalFormatter(fmt="%(message)s%(newline)s"))
root_logger.addHandler(default_handler)
# handler for level STDOUT
output_handler = TerminalHandler(self, strm=self.stdout)
output_handler.setLevel(STDOUT)
output_handler.addFilter(MaxLevelFilter(STDOUT))
output_handler.setFormatter(
TerminalFormatter(fmt="%(message)s%(newline)s"))
root_logger.addHandler(output_handler)
# handler for levels WARNING and higher
warning_handler = TerminalHandler(self, strm=self.stderr)
warning_handler.setLevel(WARNING)
warning_handler.setFormatter(
TerminalFormatter(fmt="%(levelname)s: %(message)s%(newline)s"))
root_logger.addHandler(warning_handler)
warnings_logger = logging.getLogger("py.warnings")
warnings_logger.addHandler(warning_handler)
def printNonColorized(self, text, targetStream):
"""
Write the text non colorized to the target stream.
To each line which contains a color tag a ' ***' is added at the end.
"""
lines = text.split('\n')
for i, line in enumerate(lines):
if i > 0:
line = "\n" + line
line, count = colorTagR.subn('', line)
if count > 0:
line += ' ***'
if PY2:
line = line.encode(self.encoding, 'replace')
targetStream.write(line)
printColorized = printNonColorized
def _print(self, text, targetStream):
if config.colorized_output:
self.printColorized(text, targetStream)
else:
self.printNonColorized(text, targetStream)
def output(self, text, toStdout=False, targetStream=None):
"""
Output text to a stream.
If a character can't be displayed in the encoding used by the user's
terminal, it will be replaced with a question mark or by a
transliteration.
"""
if config.transliterate:
# Encode our unicode string in the encoding used by the user's
# console, and decode it back to unicode. Then we can see which
# characters can't be represented in the console encoding.
# We need to take min(console_encoding, transliteration_target)
# the first is what the terminal is capable of
# the second is how unicode-y the user would like the output
codecedText = text.encode(self.encoding,
'replace').decode(self.encoding)
if self.transliteration_target:
codecedText = codecedText.encode(self.transliteration_target,
'replace').decode(self.transliteration_target)
transliteratedText = ''
# Note: A transliteration replacement might be longer than the
# original character, e.g. ч is transliterated to ch.
prev = "-"
for i in range(len(codecedText)):
# work on characters that couldn't be encoded, but not on
# original question marks.
if codecedText[i] == '?' and text[i] != u'?':
try:
transliterated = transliterator.transliterate(
text[i], default='?', prev=prev, next=text[i + 1])
except IndexError:
transliterated = transliterator.transliterate(
text[i], default='?', prev=prev, next=' ')
# transliteration was successful. The replacement
# could consist of multiple letters.
# mark the transliterated letters in yellow.
transliteratedText += '\03{lightyellow}%s\03{default}' \
% transliterated
# memorize if we replaced a single letter by multiple
# letters.
if len(transliterated) > 0:
prev = transliterated[-1]
else:
# no need to try to transliterate.
transliteratedText += codecedText[i]
prev = codecedText[i]
text = transliteratedText
if not targetStream:
if toStdout:
targetStream = self.stdout
else:
targetStream = self.stderr
self._print(text, targetStream)
def _raw_input(self):
if not PY2:
return input()
else:
return raw_input() # noqa
def input(self, question, password=False, default='', force=False):
"""
Ask the user a question and return the answer.
Works like raw_input(), but returns a unicode string instead of ASCII.
Unlike raw_input, this function automatically adds a colon and space
after the question if they are not already present. Also recognises
a trailing question mark.
@param question: The question, without trailing whitespace.
@type question: basestring
@param password: if True, hides the user's input (for password entry).
@type password: bool
@param default: The default answer if none was entered. None to require
an answer.
@type default: basestring
@param force: Automatically use the default
@type force: bool
@rtype: unicode
"""
assert(not password or not default)
end_marker = ':'
question = question.strip()
if question[-1] == ':':
question = question[:-1]
elif question[-1] == '?':
question = question[:-1]
end_marker = '?'
if default:
question = question + ' (default: %s)' % default
question = question + end_marker
if force:
self.output(question + '\n')
return default
# sound the terminal bell to notify the user
if config.ring_bell:
sys.stdout.write('\07')
# TODO: make sure this is logged as well
while True:
self.output(question + ' ')
text = self._input_reraise_cntl_c(password)
if text:
return text
if default is not None:
return default
def _input_reraise_cntl_c(self, password):
"""Input and decode, and re-raise Control-C."""
try:
if password:
# Python 3 requires that stderr gets flushed, otherwise is the
# message only visible after the query.
self.stderr.flush()
text = getpass.getpass('')
else:
text = self._raw_input()
except KeyboardInterrupt:
raise QuitKeyboardInterrupt()
if PY2:
text = text.decode(self.encoding)
return text
def input_choice(self, question, options, default=None, return_shortcut=True,
automatic_quit=True, force=False):
"""
Ask the user and returns a value from the options.
Depending on the options setting return_shortcut to False may not be
sensible when the option supports multiple values as it'll return an
ambiguous index.
@param question: The question, without trailing whitespace.
@type question: basestring
@param options: All available options. Each entry contains the full
length answer and a shortcut of only one character. The shortcut
must not appear in the answer. Alternatively they may be a
Option (or subclass) instance. ChoiceException instances which have
a full option and shortcut and will be raised if selected.
@type options: iterable containing sequences of length 2 or Option
@param default: The default answer if no was entered. None to require
an answer.
@type default: basestring
@param return_shortcut: Whether the shortcut or the index in the option
should be returned.
@type return_shortcut: bool
@param automatic_quit: Adds the option 'Quit' ('q') if True and throws a
L{QuitKeyboardInterrupt} if selected.
@type automatic_quit: bool
@param force: Automatically use the default
@type force: bool
@return: If return_shortcut the shortcut of options or the value of
default (if it's not None). Otherwise the index of the answer in
options. If default is not a shortcut, it'll return -1.
@rtype: int (if not return_shortcut), lowercased basestring (otherwise)
"""
if force and default is None:
raise ValueError('With no default option it cannot be forced')
options = list(options)
if len(options) == 0:
raise ValueError(u'No options are given.')
if automatic_quit:
options += [QuitKeyboardInterrupt()]
if default:
default = default.lower()
for i, option in enumerate(options):
if not isinstance(option, Option):
if len(option) != 2:
raise ValueError(u'Option #{0} does not consist of an '
u'option and shortcut.'.format(i))
options[i] = StandardOption(*option)
# TODO: Test for uniquity
handled = False
while not handled:
for option in options:
if isinstance(option, OutputOption) and option.before_question:
option.output()
output = Option.formatted(question, options, default)
if force:
self.output(output + '\n')
answer = default
else:
answer = self.input(output) or default
# something entered or default is defined
if answer:
for index, option in enumerate(options):
if option.handled(answer):
answer = option.result(answer)
handled = option.stop
break
if isinstance(answer, ChoiceException):
raise answer
elif not return_shortcut:
return index
else:
return answer
@deprecated('input_choice')
def inputChoice(self, question, options, hotkeys, default=None):
"""
Ask the user a question with a predefined list of acceptable answers.
DEPRECATED: Use L{input_choice} instead!
Directly calls L{input_choice} with the options and hotkeys zipped
into a tuple list. It always returns the hotkeys and throws no
L{QuitKeyboardInterrupt} if quit was selected.
"""
return self.input_choice(question=question, options=zip(options, hotkeys),
default=default, return_shortcut=True,
automatic_quit=False)
def input_list_choice(self, question, answers, default=None, force=False):
"""Ask the user to select one entry from a list of entries."""
message = question
clist = answers
line_template = u"{{0: >{0}}}: {{1}}".format(int(math.log10(len(clist)) + 1))
for n, i in enumerate(clist):
pywikibot.output(line_template.format(n + 1, i))
while True:
choice = self.input(message, default=default, force=force)
try:
choice = int(choice) - 1
except ValueError:
try:
choice = clist.index(choice)
except IndexError:
choice = -1
# User typed choice number
if 0 <= choice < len(clist):
return clist[choice]
else:
pywikibot.error("Invalid response")
def editText(self, text, jumpIndex=None, highlight=None):
"""Return the text as edited by the user.
Uses a Tkinter edit box because we don't have a console editor
@param text: the text to be edited
@type text: unicode
@param jumpIndex: position at which to put the caret
@type jumpIndex: int
@param highlight: each occurrence of this substring will be highlighted
@type highlight: unicode
@return: the modified text, or None if the user didn't save the text
file in his text editor
@rtype: unicode or None
"""
try:
from pywikibot.userinterfaces import gui
except ImportError as e:
print('Could not load GUI modules: %s' % e)
return text
editor = gui.EditBoxWindow()
return editor.edit(text, jumpIndex=jumpIndex, highlight=highlight)
def askForCaptcha(self, url):
"""Show the user a CAPTCHA image and return the answer."""
try:
import webbrowser
pywikibot.output(u'Opening CAPTCHA in your web browser...')
if webbrowser.open(url):
return pywikibot.input(
u'What is the solution of the CAPTCHA that is shown in '
u'your web browser?')
else:
raise
except:
pywikibot.output(u'Error in opening web browser: %s'
% sys.exc_info()[0])
pywikibot.output(
u'Please copy this url to your web browser and open it:\n %s'
% url)
return pywikibot.input(
u'What is the solution of the CAPTCHA at this url ?')
def argvu(self):
"""Return the decoded arguments from argv."""
try:
return [s.decode(self.encoding) for s in self.argv]
except AttributeError: # in python 3, self.argv is unicode and thus cannot be decoded
return [s for s in self.argv]
class TerminalHandler(logging.Handler):
"""A handler class that writes logging records to a terminal.
This class does not close the stream,
as sys.stdout or sys.stderr may be (and usually will be) used.
Slightly modified version of the StreamHandler class that ships with
logging module, plus code for colorization of output.
"""
# create a class-level lock that can be shared by all instances
import threading
sharedlock = threading.RLock()
def __init__(self, UI, strm=None):
"""Initialize the handler.
If strm is not specified, sys.stderr is used.
"""
logging.Handler.__init__(self)
# replace Handler's instance-specific lock with the shared class lock
# to ensure that only one instance of this handler can write to
# the console at a time
self.lock = TerminalHandler.sharedlock
if strm is None:
strm = sys.stderr
self.stream = strm
self.formatter = None
self.UI = UI
def flush(self):
"""Flush the stream."""
self.stream.flush()
def emit(self, record):
"""Emit the record formatted to the output and return it."""
if record.name == 'py.warnings':
# Each warning appears twice
# the second time it has a 'message'
if 'message' in record.__dict__:
return
# Remove the last line, if it appears to be the warn() call
msg = record.args[0]
is_useless_source_output = any(
s in msg for s in
(str('warn('), str('exceptions.'), str('Warning)'), str('Warning,')))
if is_useless_source_output:
record.args = ('\n'.join(record.args[0].splitlines()[0:-1]),)
if 'newline' not in record.__dict__:
record.__dict__['newline'] = '\n'
text = self.format(record)
return self.UI.output(text, targetStream=self.stream)
class TerminalFormatter(logging.Formatter):
"""Terminal logging formatter."""
pass
class MaxLevelFilter(logging.Filter):
"""Filter that only passes records at or below a specific level.
(setting handler level only passes records at or *above* a specified level,
so this provides the opposite functionality)
"""
def __init__(self, level=None):
"""Constructor."""
self.level = level
def filter(self, record):
"""Return true if the level is below or equal to the set level."""
if self.level:
return record.levelno <= self.level
else:
return True
| 36.686424
| 95
| 0.588315
|
c626d18ce2889c81d1635a8f0495d8e47f653401
| 5,204
|
py
|
Python
|
doc/oop/oop.py
|
rasql/edunum
|
c9e3be3dbf86a6eb6c28d46b055f8a72bff00ee5
|
[
"MIT"
] | 3
|
2021-02-15T22:30:03.000Z
|
2021-06-16T13:28:25.000Z
|
doc/oop/oop.py
|
rasql/edunum
|
c9e3be3dbf86a6eb6c28d46b055f8a72bff00ee5
|
[
"MIT"
] | null | null | null |
doc/oop/oop.py
|
rasql/edunum
|
c9e3be3dbf86a6eb6c28d46b055f8a72bff00ee5
|
[
"MIT"
] | 3
|
2021-02-15T10:37:12.000Z
|
2021-08-10T13:25:42.000Z
|
import pyglet, sys, random, math
# Create a bouncing ball window
# for k, v in pyglet.options.items():
# print(k, '=', v)
# press C to add circles
# press R to add rectangles
class Vec2:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return f'Vec2({self.x}, {self.y})'
def __add__(self, other):
x = self.x + other.x
y = self.y + other.y
return Vec2(x, y)
def __sub__(self, other):
x = self.x - other.x
y = self.y - other.y
return Vec2(x, y)
def __mul__(self, k):
return Vec2(self.x * k , self.y * k)
def __div__(self, k):
return Vec2(self.x / k , self.y / k)
def mag(self):
return (self.x ** 2 + self.y ** 2) ** 0.5
def normalize(self):
d = self.mag()
self.x /= d
self.y /= d
class Ball(pyglet.shapes.Circle):
def __init__(self, win, *args):
super().__init__(x=100, y=100, radius=20, color=(255, 127, 0), batch=win.batch)
self.win = win
self.pos = Vec2(100, 100)
self.v = Vec2(2, 2)
def update(self, dt):
self.pos += self.v
if self.pos.x < 0:
self.v.x = abs(self.v.x)
if self.pos.x > self.win.width:
self.v.x = -abs(self.v.x)
if self.pos.y < 0:
self.v.y = abs(self.v.y)
if self.pos.y > self.win.height:
self.v.y = -abs(self.v.y)
self.x = self.pos.x
self.y = self.pos.y
class Circle(pyglet.shapes.Circle):
def __init__(self, win, x, y, r):
super().__init__(x, y, r, batch=win.batch)
self.win = win
class Rect(pyglet.shapes.Rectangle):
def __init__(self, win, *args, **kwargs):
super().__init__(*args, **kwargs, batch=win.batch)
self.win = win
class AppWindow(pyglet.window.Window):
# This is the app window
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.batch = pyglet.graphics.Batch()
self.status = pyglet.text.Label('status', x=10, y=10, batch=self.batch)
self.selection_rect = pyglet.shapes.BorderedRectangle(200, 200, 100, 50,
color=(100, 100, 100), batch=self.batch)
self.objects = []
self.ball = Ball(self)
self.rect = Rect(self, 100, 50, 100, 50, (100, 100, 255))
pyglet.clock.schedule_interval(self.update, 1/120.0)
def update(self, dt):
self.ball.update(dt)
def on_draw(self):
self.clear()
self.batch.draw()
def on_key_press(self, symbol, modifiers):
self.status.text = f'key press: {symbol}, mffod={modifiers}'
if symbol == pyglet.window.key.R:
self.ball.v.x = random.randint(-3, 3)
self.ball.v.y = random.randint(-3, 3)
if symbol == pyglet.window.key.F:
print('full screen')
# self.set_fullscreen() # BUG remains with a black screen
if symbol == pyglet.window.key.C:
c = Circle(self, *self.mouse, 30)
self.objects.append(c)
if symbol == pyglet.window.key.R:
r = Rect(self, *self.mouse, 30, 15)
self.objects.append(r)
def on_key_release(self, symbol, modifiers):
self.status.text = f'key release: {symbol}, mod={modifiers}'
def on_mouse_press(self, x, y, button, modifiers):
self.status.text = f'mouse press at {x}, {y} button={button}'
self.selection_rect.position = x, y
def on_mouse_release(self, x, y, button, modifiers):
self.status.text = f'mouse realease at {x}, {y} button={button}'
def on_mouse_motion(self, x, y, dx, dy):
self.status.text = f'mouse motion to {x}, {y}'
self.mouse = x, y
w = x - self.selection_rect.x
h = y - self.selection_rect.y
self.selection_rect.w = w
self.selection_rect.h = h
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
self.status.text = f'mouse drag {x}, {y}'
def on_mouse_leave(self, x, y):
self.status.text = f'mouse leave {x}, {y}'
def on_mouse_enter(self, x, y):
self.status.text = f'mouse enter {x}, {y}'
def on_mouse_scroll(self, x, y, scroll_x, scroll_y):
self.status.text = f'mouse scroll {scroll_x}, {scroll_y}'
def on_move(self, x, y):
self.status.text = f'window move: {x}, {y}'
# makes the program stop on a black window
# def on_resize(self, width, height):
# self.status.text = f'window resize: {width}, {height}'
#
# def on_close(self):
# print('on close')
def on_exit(self):
print('on exit')
if __name__ == '__main__':
win = AppWindow()
win2 = AppWindow(800, 200, 'Bouncing Ball')
win2.ball.v = Vec2(3, 5)
win2.ball.color = (255, 0, 0)
win3 = AppWindow(caption='Press R to change speed')
win3.ball.v = Vec2(5, -3)
win3.ball.color = (0, 0, 255)
pyglet.app.run()
sys.exit()
| 29.235955
| 87
| 0.538816
|
33e1d17e9a1683db427e7a9f0f65a7e519a3d29c
| 2,109
|
py
|
Python
|
main.py
|
adamwigg/mlp
|
69d48a7705d6d93b6b5348311e98a2f81af07214
|
[
"MIT"
] | null | null | null |
main.py
|
adamwigg/mlp
|
69d48a7705d6d93b6b5348311e98a2f81af07214
|
[
"MIT"
] | null | null | null |
main.py
|
adamwigg/mlp
|
69d48a7705d6d93b6b5348311e98a2f81af07214
|
[
"MIT"
] | null | null | null |
"""
Simple ANN (ann.py) Experiment setup and execution
Adam Wigg @ University of Canberra for Soft Computing S2/2021
"""
import ann
def main():
config = { # All new experiment variables
# Data
"data_file": "data/iris_str.csv", # Relative filepath to csv file
"data_seperator": ",", # Usually ' ' or ','
"header_rows": 0, # Number of rows to ignore
"normalize": True, # normalize data
"normalize_values": (-1, 1), # Min and Max values
"target_index": -1, # Target index, -1 being last column
"data_split": (0.66, 0.33), # Train, test (val = test if sum >= .99 else remaining)
# Misc
"random_seed": 2021, # Used to initiate the random generator
# Experiment parameters
"max_epochs": 200, # Max number of epochs to train
# eta and hidden - multiple values in the list create a grid search
"eta": 0.1,
# Hidden layer in the form (no of hidden layers, no of nodes, activation function)
# Available activation functions: ann.Relu(), ann.Tanh(), ann.Sigmoid() (output uses softmax)
"hidden_layers": (2, 8, ann.Relu())
}
"""
Experiment file
- Directory/filename for save experiment (pickle file)
"""
experiment_dir = "experiments/"
experiment_file = "exp01_02_08_sigmoid.pickle"
"""
Run experiment
- New experiment - loads csv data and uses config to build model
"""
my_experiment = ann.Experiment.new(config)
"""
Alternatively, load experiment
- Loads all the previous run properties (including data)
"""
# my_experiment = sann.Experiment.load(experiment_dir + experiment_file)
"""Modifying loaded experiment examples"""
# my_experiment.max_epochs = 500
# my_experiment.eta = 0.1
# my_experiment.hidden_layers = (4, 12, ann.Relu())
"""Run the experiment and report the results"""
my_experiment.run()
"""Save the experiment"""
my_experiment.save(experiment_dir + experiment_file)
print(f"\U0001F4BB Done!")
if __name__ == "__main__":
main()
| 32.953125
| 101
| 0.63395
|
e5d63f45b34eb290e1da5d11c6fe41cf3b4592dc
| 636
|
py
|
Python
|
HLTrigger/Configuration/python/HLT_75e33/eventsetup/hltPhase2L3MuonHighPtTripletStepChi2Est_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 1
|
2021-11-30T16:24:46.000Z
|
2021-11-30T16:24:46.000Z
|
HLTrigger/Configuration/python/HLT_75e33/eventsetup/hltPhase2L3MuonHighPtTripletStepChi2Est_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 4
|
2021-11-29T13:57:56.000Z
|
2022-03-29T06:28:36.000Z
|
HLTrigger/Configuration/python/HLT_75e33/eventsetup/hltPhase2L3MuonHighPtTripletStepChi2Est_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 1
|
2021-11-30T16:16:05.000Z
|
2021-11-30T16:16:05.000Z
|
import FWCore.ParameterSet.Config as cms
hltPhase2L3MuonHighPtTripletStepChi2Est = cms.ESProducer("Chi2ChargeMeasurementEstimatorESProducer",
ComponentName = cms.string('hltPhase2L3MuonHighPtTripletStepChi2Est'),
MaxChi2 = cms.double(16.0),
MaxDisplacement = cms.double(0.5),
MaxSagitta = cms.double(2),
MinPtForHitRecoveryInGluedDet = cms.double(1000000.0),
MinimalTolerance = cms.double(0.5),
appendToDataLabel = cms.string(''),
clusterChargeCut = cms.PSet(
refToPSet_ = cms.string('SiStripClusterChargeCutLoose')
),
nSigma = cms.double(3),
pTChargeCutThreshold = cms.double(-1)
)
| 37.411765
| 100
| 0.735849
|
906bbb945cefda3270acfb20d15731894f64c843
| 14,577
|
py
|
Python
|
py36/lib/python3.6/site-packages/pyhandle/clientcredentials.py
|
soulflysof/Dockerfiles
|
0158c015a97873cff3181b2a9ebfa6753aaf3110
|
[
"Apache-2.0"
] | null | null | null |
py36/lib/python3.6/site-packages/pyhandle/clientcredentials.py
|
soulflysof/Dockerfiles
|
0158c015a97873cff3181b2a9ebfa6753aaf3110
|
[
"Apache-2.0"
] | null | null | null |
py36/lib/python3.6/site-packages/pyhandle/clientcredentials.py
|
soulflysof/Dockerfiles
|
0158c015a97873cff3181b2a9ebfa6753aaf3110
|
[
"Apache-2.0"
] | 1
|
2020-11-04T04:23:50.000Z
|
2020-11-04T04:23:50.000Z
|
'''
This module provides the class PIDClientCredentials
which handles the credentials for Handle server
Interaction and for the Search Servlet.
Author: Merret Buurman (DKRZ), 2015-2016
'''
import json
import os
import logging
import pyhandle
from pyhandle.handleexceptions import CredentialsFormatError, HandleSyntaxError
import pyhandle.utilhandle as utilhandle
import pyhandle.util as util
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(util.NullHandler())
class PIDClientCredentials(object):
'''
Provides authentication information to access a Handle server, either by
specifying username and password or by providing a json file containing
the relevant information.
'''
@staticmethod
def load_from_JSON(json_filename):
'''
Create a new instance of a PIDClientCredentials with information read
from a local JSON file.
:param json_filename: The path to the json credentials file. The json
file should have the following format:
.. code:: json
{
"handle_server_url": "https://url.to.your.handle.server",
"username": "index:prefix/suffix",
"password": "ZZZZZZZ",
"prefix": "prefix_to_use_for_writing_handles",
"handleowner": "username_to_own_handles"
}
Any additional key-value-pairs are stored in the instance as
config.
:raises: :exc:`~pyhandle.handleexceptions.CredentialsFormatError`
:raises: :exc:`~pyhandle.handleexceptions.HandleSyntaxError`
:return: An instance.
'''
try:
jsonfilecontent = json.loads(open(json_filename, 'r').read())
except ValueError as exc:
raise CredentialsFormatError(msg="Invalid JSON syntax: "+str(exc))
instance = PIDClientCredentials(credentials_filename=json_filename,**jsonfilecontent)
return instance
def __init__(self, **args):
'''
Initialize client credentials instance.
The constructor checks if enough arguments are passed to
authenticate at a handle server or search servlet. For this,
the following parameters are checked. Depending on the
chosen authentication method, only a subset of them are
required.
All other parameters passed are stored and can be retrieved
using 'get_config()'. If a credentials objects is used to
initialize the client, these key-value pairs are passed on
to the client constructor.
:param client: Client object to the HS ('rest' or 'db')
:param handle_server_url: Optional. The URL of the Handle System
server to read from. Defaults to 'https://hdl.handle.net'
:param username: Optional. This must be a handle value reference in
the format "index:prefix/suffix". The method will throw an exception
upon bad syntax or non-existing Handle. The existence or validity
of the password in the handle is not checked at this moment.
:param password: Optional. This is the password stored as secret key
in the actual Handle value the username points to.
:param handleowner: Optional. The username that will be given admin
permissions over every newly created handle. By default, it is
'200:0.NA/xyz' (where xyz is the prefix of the handle being created.
:param private_key: Optional. The path to a file containing the private
key that will be used for authentication in write mode. If this is
specified, a certificate needs to be specified too.
:param certificate_only: Optional. The path to a file containing the
client certificate that will be used for authentication in write
mode. If this is specified, a private key needs to be specified too.
:param certificate_and_key: Optional. The path to a file containing both
certificate and private key, used for authentication in write mode.
:param prefix: Prefix. This is not used by the library, but may be
retrieved by the user.
:credentials_filename: This is the file location of the credentials file,
if read from JSON. It is used to find the certificate/key files, if any.
:param \**args: Any other key-value pairs are stored and can be accessed
using 'get_config()'.
:raises: :exc:`~pyhandle.handleexceptions.HandleSyntaxError`
'''
util.log_instantiation(LOGGER, 'PIDClientCredentials', args, ['password','reverselookup_password'])
# Possible arguments:
useful_args = [
'client',
'handle_server_url',
'username',
'password',
'private_key',
'certificate_only',
'certificate_and_key',
'prefix',
'handleowner',
'reverselookup_password',
'reverselookup_username',
'reverselookup_baseuri',
'credentials_filename',
'db_host',
'db_user',
'db_password',
'db_name'
]
util.add_missing_optional_args_with_value_none(args, useful_args)
# Store args
self.__all_args = args
# Args that the constructor understands:
self.__client = args['client']
self.__handle_server_url = args['handle_server_url']
self.__username = args['username']
self.__password = args['password']
self.__prefix = args['prefix']
self.__handleowner = args['handleowner']
self.__private_key = args['private_key']
self.__certificate_only = args['certificate_only']
self.__certificate_and_key = args['certificate_and_key']
self.__reverselookup_password = args['reverselookup_password']
self.__reverselookup_username = args['reverselookup_username']
self.__reverselookup_baseuri = args['reverselookup_baseuri']
self.__credentials_filename = args['credentials_filename']
self.__db_host = args['db_host']
self.__db_user = args['db_user']
self.__db_password = args['db_password']
self.__db_name = args['db_name']
# All the other args collected as "additional config":
self.__additional_config = self.__collect_additional_arguments(args, useful_args)
# Some checks:
self.__check_handle_syntax()
self.__check_file_existence()
if self.__check_client_existence():
if self.__client == 'db':
self.__check_if_enough_args_for_hs_auth_db(args)
elif self.__client == 'rest':
self.__check_if_enough_args_for_revlookup_auth(args)
self.__check_if_enough_args_for_hs_auth()
else:
msg = 'Client not provided or empty'
raise CredentialsFormatError(msg=msg)
def __check_client_existence(self):
if not self.__client:
return False
return True
def __check_if_enough_args_for_hs_auth_db(self, args):
db_args = ['db_host', 'db_user', 'db_password', 'db_name']
empty_args = []
for k in db_args:
if not args[k]:
empty_args.append(k)
if empty_args:
msg = '(%s) are missing or empty' % empty_args
raise CredentialsFormatError(msg=msg)
def __collect_additional_arguments(self, args, used_args):
temp_additional_config = {}
for argname in args.keys():
if argname not in used_args:
temp_additional_config[argname] = args[argname]
if len(temp_additional_config) > 0:
return temp_additional_config
else:
return None
def __check_if_enough_args_for_revlookup_auth(self, args):
user = args['reverselookup_username'] or args['username']
pw = args['reverselookup_password'] or args['password']
url = args['reverselookup_baseuri'] or args['handle_server_url']
if user and pw and url:
self.__reverselookup = True
self.__reverselookup_username = user
self.__reverselookup_password = pw
self.__reverselookup_baseuri = url
LOGGER.debug('Sufficient information given for reverselookup.')
else:
self.__reverselookup = False
def __check_handle_syntax(self):
if self.__handleowner:
pyhandle.utilhandle.check_handle_syntax_with_index(self.__handleowner)
if self.__username:
pyhandle.utilhandle.check_handle_syntax_with_index(self.__username)
def __check_file_existence(self):
if self.__certificate_only:
try:
self.__certificate_only = self.__get_path_and_check_file_existence(self.__certificate_only)
except ValueError as e:
msg = '(certficate file): '+e.message
raise CredentialsFormatError(msg=msg)
if self.__certificate_and_key:
try:
self.__certificate_and_key = self.__get_path_and_check_file_existence(self.__certificate_and_key)
except ValueError as e:
msg = '(certficate and key file): '+e.message
raise CredentialsFormatError(msg=msg)
if self.__private_key:
try:
self.__private_key = self.__get_path_and_check_file_existence(self.__private_key)
except ValueError as e:
msg = '(private key file): '+e.message
raise CredentialsFormatError(msg=msg)
def __get_path_and_check_file_existence(self, path):
try:
path = util.get_absolute_path(path, self.__credentials_filename)
except ValueError: # not a valid path
thisdir = util.get_this_directory(self.__credentials_filename)
msg = ('Please provide an absolute path or a path relative to '
'the location of the credentials file\'s location (%s), '
'starting with %s.' % (thisdir, os.path.curdir))
raise ValueError(msg)
if not os.path.isfile(path): # file does not exist
msg = 'The file was not found at the specified path: '+path
raise ValueError(msg)
return path
def __check_if_enough_args_for_hs_auth(self):
# Which authentication method?
authentication_method = None
# DB authentication
if self.__db_host and self.__db_user and self.__db_password and self.__db_name:
authentication_method = 'db_auth'
# Username and Password
if self.__username and self.__password:
authentication_method = 'user_password'
# Certificate file and Key file
if self.__certificate_only and self.__private_key:
authentication_method = 'auth_cert_2files'
# Certificate and Key in one file
if self.__certificate_and_key:
authentication_method = 'auth_cert_1file'
# None was provided:
if authentication_method is None:
if self.__reverselookup is True:
msg = ('Insufficient credentials for writing to handle '
'server, but sufficient credentials for searching.')
LOGGER.info(msg)
else:
msg = ''
if self.__username and not self.__password:
msg += 'Username was provided, but no password. '
elif self.__password and not self.__username:
msg += 'Password was provided, but no username. '
if self.__certificate_only and not self.__private_key:
msg += 'A client certificate was provided, but no private key. '
elif self.__private_key and not self.__certificate_only:
msg += 'A private key was provided, but no client certificate. '
if self.__reverselookup is None:
msg += 'Reverse lookup credentials not checked yet.'
elif self.__reverselookup is False:
msg += 'Insufficient credentials for searching.'
raise CredentialsFormatError(msg=msg)
def get_all_args(self):
# pylint: disable=missing-docstring
return self.__all_args
def get_client(self):
# pylint: disable=missing-docstring
return self.__client
def get_username(self):
# pylint: disable=missing-docstring
return self.__username
def get_password(self):
# pylint: disable=missing-docstring
return self.__password
def get_server_URL(self):
# pylint: disable=missing-docstring
return self.__handle_server_url
def get_prefix(self):
# pylint: disable=missing-docstring
return self.__prefix
def get_handleowner(self):
# pylint: disable=missing-docstring
return self.__handleowner
def get_config(self):
# pylint: disable=missing-docstring
return self.__additional_config
def get_path_to_private_key(self):
# pylint: disable=missing-docstring
return self.__private_key
def get_path_to_file_certificate(self):
# pylint: disable=missing-docstring
return self.__certificate_only or self.__certificate_and_key
def get_path_to_file_certificate_only(self):
# pylint: disable=missing-docstring
return self.__certificate_only
def get_path_to_file_certificate_and_key(self):
# pylint: disable=missing-docstring
return self.__certificate_and_key
def get_reverselookup_username(self):
# pylint: disable=missing-docstring
return self.__reverselookup_username
def get_reverselookup_password(self):
# pylint: disable=missing-docstring
return self.__reverselookup_password
def get_reverselookup_baseuri(self):
# pylint: disable=missing-docstring
return self.__reverselookup_baseuri
def get_db_host(self):
# pylint: disable=missing-docstring
return self.__db_host
def get_db_user(self):
# pylint: disable=missing-docstring
return self.__db_user
def get_db_password(self):
# pylint: disable=missing-docstring
return self.__db_password
def get_db_name(self):
# pylint: disable=missing-docstring
return self.__db_name
| 38.159686
| 113
| 0.643136
|
932c23be2f0d7b759503325acadc00febaf79c4b
| 2,751
|
py
|
Python
|
cupcake/editor/minimap.py
|
billyeatcookies/cupcake
|
2f2d1d5f8a1a454e50283547cf433cc82d1825d6
|
[
"MIT"
] | 3
|
2022-03-29T12:55:24.000Z
|
2022-03-30T17:06:11.000Z
|
cupcake/editor/minimap.py
|
billyeatcookies/Cupcake
|
2f2d1d5f8a1a454e50283547cf433cc82d1825d6
|
[
"MIT"
] | null | null | null |
cupcake/editor/minimap.py
|
billyeatcookies/Cupcake
|
2f2d1d5f8a1a454e50283547cf433cc82d1825d6
|
[
"MIT"
] | null | null | null |
import tkinter as tk
class Minimap(tk.Frame):
def __init__(self, master, textw, *args, **kwargs):
super().__init__(master, *args, **kwargs)
self.master = master
self.tw = textw
self.font = ("Arial", 1, "bold")
self.config(bg="#252526", highlightthickness=0)
self.cw = tk.Canvas(self, bg="#1e1e1e", width=100, highlightthickness=0)
self.cw.pack(fill=tk.BOTH, expand=True, side=tk.LEFT)
self.slider_image = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAG4AAABFCAYAAACrMNMO
AAAACXBIWXMAAABfAAAAXwEqnu0dAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAMBJRE
FUeJzt0UENwCAAwMAxLajjhwOkz8M+pMmdgiYda5/5kPPeDuAf46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo
46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46
KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIuyrgo46KMizIu6gNeAwIJ
26ERewAAAABJRU5ErkJggg==""")
self.cw.create_image(0, 0, image=self.slider_image, anchor=tk.NW, tag="slider")
self.y_top_lim = 0
self._drag_data = {"y": 0, "item": None}
self.yvalue = 0
self.cw.tag_bind("slider", "<ButtonPress-1>", self.drag_start)
self.cw.tag_bind("slider", "<ButtonRelease-1>", self.drag_stop)
self.cw.tag_bind("slider", "<B1-Motion>", self.drag)
if textw:
self.redraw()
def attach(self, textw):
self.tw = textw
def redraw(self):
self.cw.delete("redrawn")
self.text = self.tw.get_all_text()
self.cw.create_text(5, 0, text=self.text, anchor=tk.NW, font=self.font, fill="#678ca0", tag="redrawn")
y = int(self.tw.textw.index(tk.INSERT).split(".")[0]) * 2
self.cw.create_line(0, y, 100, y, fill="#22374b", width=2, tag="redrawn")
self.y_bottom_lim = int(self.tw.textw.index(tk.END).split(".")[0]) * 2 + 10
def drag_start(self, event):
self._drag_data["item"] = self.cw.find_closest(event.x, event.y)[0]
self._drag_data["y"] = event.y
def drag_stop(self, event):
self._drag_data["item"] = None
self._drag_data["y"] = 0
def drag(self, event):
item = self._drag_data["item"]
if item != 1:
return
delta_y = event.y - self._drag_data["y"]
self.cw.move(item, 0, delta_y)
self._drag_data["y"] = event.y
self.yvalue = y = self.cw.coords(item)[1]
if y <= self.y_top_lim:
self.cw.move("slider", 0, -(y - self.y_top_lim))
elif y >= self.y_bottom_lim:
self.cw.move("slider", 0, -(y - self.y_bottom_lim))
self.tw.textw.yview(int(y / self.cw.winfo_height() * 100))
self.tw.master.redraw_ln()
| 37.175676
| 110
| 0.628135
|
301066464efb67b7028096275dfd3571b7c43add
| 59
|
py
|
Python
|
run.py
|
jpjenk/data_archiver
|
2e1658c1def0f4cb312f4ddc52fbd2c5e13f567d
|
[
"MIT"
] | 3
|
2019-01-24T10:36:44.000Z
|
2019-09-30T17:39:40.000Z
|
run.py
|
jpjenk/data_archiver
|
2e1658c1def0f4cb312f4ddc52fbd2c5e13f567d
|
[
"MIT"
] | 5
|
2019-02-15T01:32:05.000Z
|
2019-08-01T15:54:31.000Z
|
run.py
|
jpjenk/data_archiver
|
2e1658c1def0f4cb312f4ddc52fbd2c5e13f567d
|
[
"MIT"
] | null | null | null |
FLASK_APP=archiver.py flask run --host=0.0.0.0 --port=3080
| 29.5
| 58
| 0.728814
|
64b421255f6cf24e6cf3fc5a792a2a37da9946a2
| 2,596
|
py
|
Python
|
backend/utils/utils.py
|
napari-hub-bot/napari-hub
|
626b53a82e62bd00dab6f9ab0529441da6506c19
|
[
"MIT"
] | null | null | null |
backend/utils/utils.py
|
napari-hub-bot/napari-hub
|
626b53a82e62bd00dab6f9ab0529441da6506c19
|
[
"MIT"
] | null | null | null |
backend/utils/utils.py
|
napari-hub-bot/napari-hub
|
626b53a82e62bd00dab6f9ab0529441da6506c19
|
[
"MIT"
] | null | null | null |
import os
from typing import List
import requests
from bs4 import BeautifulSoup
from markdown import markdown
from requests import HTTPError
# Environment variable set through ecs stack terraform module
slack_url = os.environ.get('SLACK_URL')
def get_attribute(obj: dict, path: list):
"""
Get attribute iteratively from a json object.
:param obj: object to iterate on
:param path: list of string to get sub path within json
:return: the value if the path is accessible, empty string if not found
"""
current_location = obj
for token in path:
if isinstance(current_location, dict) and token in current_location:
current_location = current_location[token]
elif isinstance(current_location, list) and token < len(current_location):
current_location = current_location[token]
else:
return ""
return current_location
def filter_prefix(str_list: List[str], prefix: str) -> list:
"""
Filter the list for strings with the given prefix.
:param str_list: list of strings to filter
:param prefix: prefix to filter on
:return: list of filtered strings
"""
return [string for string in str_list if string.startswith(prefix)]
def render_description(description: str) -> str:
"""
Render description with beautiful soup to generate html format description text.
:param description: raw description to render
:return: rendered description html text
"""
if description != '':
html = markdown(description)
soup = BeautifulSoup(html, 'html.parser')
return soup.get_text()
return ''
def send_alert(message: str):
"""
Send alert to slack with a message.
:param message: message to send alongside the alert
"""
payload = {
"text": message
}
if not slack_url:
print(f"Unable to send alert because slack URL is not set: {message}")
else:
try:
requests.post(slack_url, json=payload)
except HTTPError:
print("Unable to send alert")
def reformat_ssh_key_to_pem_bytes(ssh_key_str: str) -> bytes:
"""
reformat the ssh key string to pem format bytes for github client.
:param ssh_key_str: utf-8 string without header and footer for the github app rsa private key
:return: pem formatted private key in bytes with header and footer
"""
chunked = '\n'.join(ssh_key_str[i:i+64] for i in range(0, len(ssh_key_str), 64))
return f"-----BEGIN RSA PRIVATE KEY-----\n{chunked}\n-----END RSA PRIVATE KEY-----\n".encode("utf-8")
| 30.541176
| 105
| 0.676425
|
b211871e89556734410f0a05062bdd2a111a87b9
| 8,134
|
py
|
Python
|
data/example_graphs.py
|
EddieCunningham/CausalInference
|
5938787a41222ae1810d5c649a1f3b93285fbb1e
|
[
"MIT"
] | 1
|
2020-04-21T08:44:05.000Z
|
2020-04-21T08:44:05.000Z
|
data/example_graphs.py
|
hebo910820/CausalInference
|
5938787a41222ae1810d5c649a1f3b93285fbb1e
|
[
"MIT"
] | null | null | null |
data/example_graphs.py
|
hebo910820/CausalInference
|
5938787a41222ae1810d5c649a1f3b93285fbb1e
|
[
"MIT"
] | 1
|
2019-09-17T02:34:31.000Z
|
2019-09-17T02:34:31.000Z
|
from host.src.graph import Graph
def polyTree1():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 2 ] )
return graph
def polyTree2():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 4 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 5 ] )
graph.addEdge( parents=[ 4, 5 ], children=[ 6, 7 ] )
return graph
def polyTree3():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 3 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 4 ] )
return graph
def polyTree4():
graph = Graph()
graph.addEdge( parents=[ 0, 1, 2 ], children=[ 4, 5 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 6 ] )
return graph
def polyTree5():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 6, 7 ] )
graph.addEdge( parents=[ 2 ], children=[ 8 ] )
graph.addEdge( parents=[ 3, 4, 5 ], children=[ 9, 10, 11 ] )
graph.addEdge( parents=[ 7, 8 ], children=[ 12 ] )
graph.addEdge( parents=[ 6, 9 ], children=[ 13 ] )
graph.addEdge( parents=[ 15, 16 ], children=[ 17 ] )
graph.addEdge( parents=[ 13, 17 ], children=[ 14 ] )
return graph
def polyTree6():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1 ] )
return graph
def polyTree7():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 2 ] )
graph.addEdge( parents=[ 2 ], children=[ 3 ] )
graph.addEdge( parents=[ 3 ], children=[ 4 ] )
return graph
def polyTree8():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1 ] )
graph.addEdge( parents=[ 1 ], children=[ 2 ] )
graph.addEdge( parents=[ 2 ], children=[ 3 ] )
graph.addEdge( parents=[ 3 ], children=[ 4 ] )
graph.addEdge( parents=[ 4 ], children=[ 5 ] )
graph.addEdge( parents=[ 5 ], children=[ 6 ] )
graph.addEdge( parents=[ 6 ], children=[ 7 ] )
graph.addEdge( parents=[ 7 ], children=[ 8 ] )
graph.addEdge( parents=[ 8 ], children=[ 9 ] )
graph.addEdge( parents=[ 9 ], children=[ 10 ] )
graph.addEdge( parents=[ 10 ], children=[ 11 ] )
graph.addEdge( parents=[ 11 ], children=[ 12 ] )
graph.addEdge( parents=[ 12 ], children=[ 13 ] )
graph.addEdge( parents=[ 13 ], children=[ 14 ] )
graph.addEdge( parents=[ 14 ], children=[ 15 ] )
return graph
def graph1():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 2, 3 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 4 ] )
# fbs = np.array( [ 2 ] )
# return graph, fbs
return graph
def graph2():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 3 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 4 ] )
graph.addEdge( parents=[ 2, 3, 4 ], children=[ 5, 6 ] )
# fbs = np.array( [ 2, 4 ] )
# return graph, fbs
return graph
def graph3():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 2, 3, 9 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 4 ] )
graph.addEdge( parents=[ 1, 2, 3, 8 ], children=[ 5, 6 ] )
graph.addEdge( parents=[ 1, 2, 4, 5, 6, 9 ], children=[ 7, 10 ] )
# fbs = np.array( [ 1, 2, 3, 4, 6 ] )
# return graph, fbs
return graph
def graph4():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1 ] )
graph.addEdge( parents=[ 1 ], children=[ 0 ] )
# fbs = np.array( [ 0 ] )
assert 0, 'This graph has a feedback cycle'
# return graph, fbs
return graph
def graph5():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 2, 3, 9 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 4 ] )
graph.addEdge( parents=[ 1, 2, 3, 8 ], children=[ 5, 6 ] )
graph.addEdge( parents=[ 1, 2, 4, 5, 6, 9 ], children=[ 7, 10 ] )
graph.addEdge( parents=[ 7, 10 ], children=[ 0 ] )
# fbs = np.array( [ 0, 1, 2, 3, 4, 6 ] )
assert 0, 'This graph has a feedback cycle'
# return graph, fbs
return graph
def graph6():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 2 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 0 ] )
# fbs = np.array( [ 0 ] )
assert 0, 'This graph has a feedback cycle'
# return graph, fbs
return graph
def graph7():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1, 2 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 3 ] )
# fbs = np.array( [ 2 ] )
# return graph, fbs
return graph
def graph8():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1, 2 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 3 ] )
# fbs = np.array( [ 2 ] )
# return graph, fbs
return graph
def graph9():
graph = Graph()
graph.addEdge( parents=[ 7, 8 ], children=[ 0 ] )
graph.addEdge( parents=[ 0, 1 ], children=[ 3 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 4 ] )
graph.addEdge( parents=[ 2, 3, 4 ], children=[ 5, 6 ] )
# fbs = np.array( [ 2, 4 ] )
# return graph, fbs
return graph
def graph10():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1, 2 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 3 ] )
graph.addEdge( parents=[ 3 ], children=[ 4, 5 ] )
graph.addEdge( parents=[ 4, 5 ], children=[ 6 ] )
# fbs = np.array( [ 1, 4 ] )
# return graph, fbs
return graph
def graph11():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 3, 4, 5 ] )
graph.addEdge( parents=[ 3, 4 ], children=[ 6 ] )
graph.addEdge( parents=[ 6 ], children=[ 7 ] )
graph.addEdge( parents=[ 5, 6 ], children=[ 8 ] )
# fbs = np.array( [ 3, 5 ] )
# return graph, fbs
return graph
def graph12():
graph = Graph()
graph.addEdge( parents=[ 0, 1, 2 ], children=[ 6, 7 ] )
graph.addEdge( parents=[ 3 ], children=[ 8 ] )
graph.addEdge( parents=[ 4, 5 ], children=[ 9 ] )
graph.addEdge( parents=[ 5, 6 ], children=[ 10 ] )
graph.addEdge( parents=[ 6 ], children=[ 11 ] )
graph.addEdge( parents=[ 7, 8 ], children=[ 12 ] )
graph.addEdge( parents=[ 11 ], children=[ 13 ] )
graph.addEdge( parents=[ 11, 12 ], children=[ 14 ] )
graph.addEdge( parents=[ 9, 10, 11 ], children=[ 15 ] )
graph.addEdge( parents=[ 13, 14 ], children=[ 16 ] )
graph.addEdge( parents=[ 8, 14 ], children=[ 17 ] )
graph.addEdge( parents=[ 15, 16, 17 ], children=[ 18, 19, 20 ] )
# fbs = np.array( [ 5, 6, 11, 14 ] )
# return graph, fbs
return graph
def graph13():
graph = Graph()
graph.addEdge( parents=[ 0, 1 ], children=[ 3 ] )
graph.addEdge( parents=[ 1, 2 ], children=[ 4 ] )
graph.addEdge( parents=[ 3 ], children=[ 5 ] )
graph.addEdge( parents=[ 4 ], children=[ 7 ] )
graph.addEdge( parents=[ 5, 6 ], children=[ 8 ] )
graph.addEdge( parents=[ 6, 7 ], children=[ 9 ] )
# fbs = np.array( [ 6 ] )
# return graph, fbs
return graph
def graph14():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1, 2 ] )
graph.addEdge( parents=[ 1 ], children=[ 3 ] )
graph.addEdge( parents=[ 2 ], children=[ 4 ] )
graph.addEdge( parents=[ 3, 4 ], children=[ 5 ] )
# fbs = np.array( [ 1 ] )
# return graph, fbs
return graph
def graph15():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 1, 2 ] )
graph.addEdge( parents=[ 1 ], children=[ 3, 4 ] )
graph.addEdge( parents=[ 2 ], children=[ 5 ] )
graph.addEdge( parents=[ 3 ], children=[ 6 ] )
graph.addEdge( parents=[ 4, 6 ], children=[ 7 ] )
graph.addEdge( parents=[ 4, 5 ], children=[ 8 ] )
# fbs = np.array( [ 4 ] )
# return graph, fbs
return graph
def graph16():
graph = Graph()
graph.addEdge( parents=[ 0 ], children=[ 2, 3 ] )
graph.addEdge( parents=[ 1 ], children=[ 4, 5 ] )
graph.addEdge( parents=[ 2, 3 ], children=[ 6 ] )
graph.addEdge( parents=[ 4, 5 ], children=[ 7 ] )
graph.addEdge( parents=[ 6, 7 ], children=[ 8 ] )
graph.addEdge( parents=[ 2, 8 ], children=[ 9 ] )
graph.addEdge( parents=[ 5, 8 ], children=[ 10 ] )
# fbs = np.array( [ 3, 4, 6, 7 ] )
# return graph, fbs
return graph
| 26.668852
| 69
| 0.549914
|
4a0a0df4944e8651a6c47f8c62dee081bb1a8126
| 2,241
|
py
|
Python
|
sphinx/util/docstrings.py
|
merwok-forks/sphinx
|
b7cada236f765003a73ab5dca48f975d54c0c298
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/util/docstrings.py
|
merwok-forks/sphinx
|
b7cada236f765003a73ab5dca48f975d54c0c298
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/util/docstrings.py
|
merwok-forks/sphinx
|
b7cada236f765003a73ab5dca48f975d54c0c298
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
sphinx.util.docstrings
~~~~~~~~~~~~~~~~~~~~~~
Utilities for docstring processing.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
if False:
# For type annotation
from typing import List # NOQA
def prepare_docstring(s, ignore=1):
# type: (unicode, int) -> List[unicode]
"""Convert a docstring into lines of parseable reST. Remove common leading
indentation, where the indentation of a given number of lines (usually just
one) is ignored.
Return the docstring as a list of lines usable for inserting into a docutils
ViewList (used as argument of nested_parse().) An empty line is added to
act as a separator between this docstring and following content.
"""
lines = s.expandtabs().splitlines()
# Find minimum indentation of any non-blank lines after ignored lines.
margin = sys.maxsize
for line in lines[ignore:]:
content = len(line.lstrip())
if content:
indent = len(line) - content
margin = min(margin, indent)
# Remove indentation from ignored lines.
for i in range(ignore):
if i < len(lines):
lines[i] = lines[i].lstrip()
if margin < sys.maxsize:
for i in range(ignore, len(lines)):
lines[i] = lines[i][margin:]
# Remove any leading blank lines.
while lines and not lines[0]:
lines.pop(0)
# make sure there is an empty line at the end
if lines and lines[-1]:
lines.append('')
return lines
def prepare_commentdoc(s):
# type: (unicode) -> List[unicode]
"""Extract documentation comment lines (starting with #:) and return them
as a list of lines. Returns an empty list if there is no documentation.
"""
result = []
lines = [line.strip() for line in s.expandtabs().splitlines()]
for line in lines:
if line.startswith('#:'):
line = line[2:]
# the first space after the comment is ignored
if line and line[0] == ' ':
line = line[1:]
result.append(line)
if result and result[-1]:
result.append('')
return result
| 32.014286
| 80
| 0.616243
|
5dde9b3a41f51150e289a2d0c701421346b1c8fc
| 6,704
|
py
|
Python
|
mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/events/custom_resource_definition.py
|
bopopescu/cndw
|
ee432efef88a4351b355f3d6d5350defc7f4246b
|
[
"Apache-2.0"
] | null | null | null |
mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/events/custom_resource_definition.py
|
bopopescu/cndw
|
ee432efef88a4351b355f3d6d5350defc7f4246b
|
[
"Apache-2.0"
] | null | null | null |
mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/events/custom_resource_definition.py
|
bopopescu/cndw
|
ee432efef88a4351b355f3d6d5350defc7f4246b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wraps a CRD message, making fields more convenient."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import re
from googlecloudsdk.api_lib.run import k8s_object
# Identify parameters that are used to set secret values
_SECRET_PROPERTY_PATTERN = '^.*[sS]ecret$'
def _IsSecretProperty(property_name, property_type):
return (re.match(_SECRET_PROPERTY_PATTERN, property_name) and
property_type == 'object')
class SourceSpecProperty(object):
"""Has details for a spec property of a source. Not write-through."""
def __init__(self, name, description, type_, required):
self.name = name
self.description = description
self.type = type_
self.required = required
_EVENT_TYPE_REGISTRY_KEY = 'registry.knative.dev/eventTypes'
class EventType(object):
"""Has details for an event type of a source. Not write-through."""
def __init__(self, source_crd, **kwargs):
"""Initialize a holder of info about an event type.
Args:
source_crd: SourceCustomResourceDefinition, the event type's parent
source CRD.
**kwargs: properties of the event type.
"""
self._crd = source_crd
self._properties = kwargs
def __getattr__(self, attr):
try:
return self._properties[attr]
except KeyError as e:
raise AttributeError(e.args[0])
@property
def crd(self):
"""Returns the source crd."""
return self._crd
@property
def details(self):
"""Returns a dict with details about this event type."""
details = self.AsDict()
details['category'] = self._crd.source_kind
return details
def AsDict(self):
"""Returns a dict with properties of this event type."""
return self._properties.copy()
def __eq__(self, other):
if isinstance(other, type(self)):
# pylint:disable=protected-access
return self._properties == other._properties and self._crd == other._crd
return False
class SourceCustomResourceDefinition(k8s_object.KubernetesObject):
"""Wraps an Source CRD message, making fields more convenient."""
API_CATEGORY = 'apiextensions.k8s.io'
KIND = 'CustomResourceDefinition'
READY_CONDITION = None # The status field is not currently used on CRDs
FIELD_BLACKLIST = ['openAPIV3Schema']
# These fields should not be exposed to the user as regular parameters to be
# set either because we'll provide another way to specify them, because
# we'll set them ourselves, or because they're not meant to be set.
_PRIVATE_PROPERTY_FIELDS = frozenset({'sink', 'ceOverrides'})
@property
def source_kind(self):
return self._m.spec.names.kind
@property
def source_kind_plural(self):
return self._m.spec.names.plural
@property
def source_api_category(self):
return self._m.spec.group
@property
def source_version(self):
return self._m.spec.version
@property
def schema(self):
return JsonSchemaPropsWrapper(self._m.spec.validation.openAPIV3Schema)
@property
def event_types(self):
"""Returns List[EventType] from the registry annotation json string."""
if _EVENT_TYPE_REGISTRY_KEY not in self.annotations:
return []
event_types = json.loads(self.annotations[_EVENT_TYPE_REGISTRY_KEY])
return [EventType(self, **et) for et in event_types]
@event_types.setter
def event_types(self, event_type_holders):
"""Sets the registry annotation given a List[EventType]."""
event_type_dicts = [et.AsDict() for et in event_type_holders]
self.annotations[_EVENT_TYPE_REGISTRY_KEY] = json.dumps(event_type_dicts)
@property
def secret_properties(self):
"""The properties used to define source secrets.
Returns:
List[SourceSpecProperty], modifying this list does *not* modify the
underlying properties in the SourceCRD.
"""
properties = []
required_properties = self.schema['spec'].required
for k, v in self.schema['spec'].items():
if (k not in self._PRIVATE_PROPERTY_FIELDS and
_IsSecretProperty(k, v.type)):
properties.append(
SourceSpecProperty(
name=k,
description=v.description,
type_=v.type,
required=k in required_properties))
return properties
@property
def properties(self):
"""The user-configurable properties of the source.
Returns:
List[SourceSpecProperty], modifying this list does *not* modify the
underlying properties in the SourceCRD.
"""
properties = []
required_properties = self.schema['spec'].required
for k, v in self.schema['spec'].items():
if (k not in self._PRIVATE_PROPERTY_FIELDS and
not _IsSecretProperty(k, v.type)):
properties.append(
SourceSpecProperty(
name=k,
description=v.description,
type_=v.type,
required=k in required_properties))
return properties
class JsonSchemaPropsWrapper(k8s_object.ListAsReadOnlyDictionaryWrapper):
"""Wrap a JSONSchemaProps message with properties in a dict-like manner.
Nesting in JSONSchemaProps messages is done via lists of its own type.
This class provides access to the underlying information in a dict-like
manner rather than needing to handle accessing the lists directly.
"""
def __init__(self, to_wrap):
"""Wrap the actual keys and values of a JSONSchemaProps message.
Args:
to_wrap: JSONSchemaProps message
"""
super(JsonSchemaPropsWrapper, self).__init__(
to_wrap.properties.additionalProperties, key_field='key')
self._wrapped_json = to_wrap
def __getattr__(self, attr):
"""Fallthrough to the underlying wrapped json to access other fields."""
return getattr(self._wrapped_json, attr)
def __getitem__(self, key):
item = super(JsonSchemaPropsWrapper, self).__getitem__(key)
value = item.value
if value.properties is None:
# It doesn't go any deeper, return the actual value
return value
return JsonSchemaPropsWrapper(value)
| 31.92381
| 78
| 0.70898
|
6600c6344c7dc3404bffcae114afe1b89366eb66
| 13,370
|
py
|
Python
|
lldb/test/API/macosx/lc-note/firmware-corefile/TestFirmwareCorefiles.py
|
LaudateCorpus1/llvm-project
|
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
|
[
"Apache-2.0"
] | 605
|
2019-10-18T01:15:54.000Z
|
2022-03-31T14:31:04.000Z
|
lldb/test/API/macosx/lc-note/firmware-corefile/TestFirmwareCorefiles.py
|
LaudateCorpus1/llvm-project
|
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
|
[
"Apache-2.0"
] | 3,180
|
2019-10-18T01:21:21.000Z
|
2022-03-31T23:25:41.000Z
|
lldb/test/API/macosx/lc-note/firmware-corefile/TestFirmwareCorefiles.py
|
LaudateCorpus1/llvm-project
|
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
|
[
"Apache-2.0"
] | 275
|
2019-10-18T05:27:22.000Z
|
2022-03-30T09:04:21.000Z
|
"""Test that corefiles with LC_NOTE "kern ver str" and "main bin spec" load commands works."""
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestFirmwareCorefiles(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipIf(debug_info=no_match(["dsym"]), bugnumber="This test is looking explicitly for a dSYM")
@skipIf(archs=no_match(['x86_64', 'arm64', 'arm64e', 'aarch64']))
@skipIfRemote
@skipUnlessDarwin
def test_lc_note_version_string(self):
self.build()
aout_exe_basename = "a.out"
aout_exe = self.getBuildArtifact(aout_exe_basename)
verstr_corefile = self.getBuildArtifact("verstr.core")
verstr_corefile_addr = self.getBuildArtifact("verstr-addr.core")
create_corefile = self.getBuildArtifact("create-empty-corefile")
slide = 0x70000000000
call(create_corefile + " version-string " + verstr_corefile + " " + aout_exe + " 0xffffffffffffffff 0xffffffffffffffff", shell=True)
call(create_corefile + " version-string " + verstr_corefile_addr + " " + aout_exe + (" 0x%x" % slide) + " 0xffffffffffffffff", shell=True)
if self.TraceOn():
self.runCmd("log enable lldb dyld host")
self.addTearDownHook(lambda: self.runCmd("log disable lldb dyld host"))
# Register the a.out binary with this UUID in lldb's global module
# cache, then throw the Target away.
target = self.dbg.CreateTarget(aout_exe)
self.dbg.DeleteTarget(target)
# First, try the "kern ver str" corefile
target = self.dbg.CreateTarget('')
err = lldb.SBError()
if self.TraceOn():
self.runCmd("script print('loading corefile %s')" % verstr_corefile)
process = target.LoadCore(verstr_corefile)
self.assertEqual(process.IsValid(), True)
if self.TraceOn():
self.runCmd("image list")
self.runCmd("target mod dump sections")
self.assertEqual(target.GetNumModules(), 1)
fspec = target.GetModuleAtIndex(0).GetFileSpec()
self.assertEqual(fspec.GetFilename(), aout_exe_basename)
self.dbg.DeleteTarget(target)
# Second, try the "kern ver str" corefile where it loads at an address
target = self.dbg.CreateTarget('')
err = lldb.SBError()
if self.TraceOn():
self.runCmd("script print('loading corefile %s')" % verstr_corefile_addr)
process = target.LoadCore(verstr_corefile_addr)
self.assertEqual(process.IsValid(), True)
if self.TraceOn():
self.runCmd("image list")
self.runCmd("target mod dump sections")
self.assertEqual(target.GetNumModules(), 1)
fspec = target.GetModuleAtIndex(0).GetFileSpec()
self.assertEqual(fspec.GetFilename(), aout_exe_basename)
main_sym = target.GetModuleAtIndex(0).FindSymbol("main", lldb.eSymbolTypeAny)
main_addr = main_sym.GetStartAddress()
self.assertGreater(main_addr.GetLoadAddress(target), slide)
self.assertNotEqual(main_addr.GetLoadAddress(target), lldb.LLDB_INVALID_ADDRESS)
self.dbg.DeleteTarget(target)
@skipIf(debug_info=no_match(["dsym"]), bugnumber="This test is looking explicitly for a dSYM")
@skipIf(archs=no_match(['x86_64', 'arm64', 'arm64e', 'aarch64']))
@skipIfRemote
@skipUnlessDarwin
def test_lc_note_main_bin_spec(self):
self.build()
aout_exe_basename = "a.out"
aout_exe = self.getBuildArtifact(aout_exe_basename)
create_corefile = self.getBuildArtifact("create-empty-corefile")
binspec_corefile = self.getBuildArtifact("binspec.core")
binspec_corefile_addr = self.getBuildArtifact("binspec-addr.core")
binspec_corefile_slideonly = self.getBuildArtifact("binspec-addr-slideonly.core")
slide = 0x70000000000
### Create our corefile
# 0xffffffffffffffff means load address unknown
call(create_corefile + " main-bin-spec " + binspec_corefile + " " + aout_exe + " 0xffffffffffffffff 0xffffffffffffffff", shell=True)
call(create_corefile + " main-bin-spec " + binspec_corefile_addr + " " + aout_exe + (" 0x%x" % slide) + " 0xffffffffffffffff", shell=True)
call(create_corefile + " main-bin-spec " + binspec_corefile_slideonly + " " + aout_exe + " 0xffffffffffffffff" + (" 0x%x" % slide), shell=True)
if self.TraceOn():
self.runCmd("log enable lldb dyld host")
self.addTearDownHook(lambda: self.runCmd("log disable lldb dyld host"))
# Register the a.out binary with this UUID in lldb's global module
# cache, then throw the Target away.
target = self.dbg.CreateTarget(aout_exe)
self.dbg.DeleteTarget(target)
# First, try the "main bin spec" corefile
target = self.dbg.CreateTarget('')
if self.TraceOn():
self.runCmd("script print('loading corefile %s')" % binspec_corefile)
process = target.LoadCore(binspec_corefile)
self.assertEqual(process.IsValid(), True)
if self.TraceOn():
self.runCmd("image list")
self.runCmd("target mod dump sections")
self.assertEqual(target.GetNumModules(), 1)
fspec = target.GetModuleAtIndex(0).GetFileSpec()
self.assertEqual(fspec.GetFilename(), aout_exe_basename)
self.dbg.DeleteTarget(target)
# Second, try the "main bin spec" corefile where it loads at an address
target = self.dbg.CreateTarget('')
if self.TraceOn():
self.runCmd("script print('loading corefile %s')" % binspec_corefile_addr)
process = target.LoadCore(binspec_corefile_addr)
self.assertEqual(process.IsValid(), True)
if self.TraceOn():
self.runCmd("image list")
self.runCmd("target mod dump sections")
self.assertEqual(target.GetNumModules(), 1)
fspec = target.GetModuleAtIndex(0).GetFileSpec()
self.assertEqual(fspec.GetFilename(), aout_exe_basename)
main_sym = target.GetModuleAtIndex(0).FindSymbol("main", lldb.eSymbolTypeAny)
main_addr = main_sym.GetStartAddress()
self.assertGreater(main_addr.GetLoadAddress(target), slide)
self.assertNotEqual(main_addr.GetLoadAddress(target), lldb.LLDB_INVALID_ADDRESS)
self.dbg.DeleteTarget(target)
# Third, try the "main bin spec" corefile where it loads at a slide
target = self.dbg.CreateTarget('')
if self.TraceOn():
self.runCmd("script print('loading corefile %s')" % binspec_corefile_slideonly)
process = target.LoadCore(binspec_corefile_slideonly)
self.assertEqual(process.IsValid(), True)
if self.TraceOn():
self.runCmd("image list")
self.runCmd("target mod dump sections")
self.assertEqual(target.GetNumModules(), 1)
fspec = target.GetModuleAtIndex(0).GetFileSpec()
self.assertEqual(fspec.GetFilename(), aout_exe_basename)
main_sym = target.GetModuleAtIndex(0).FindSymbol("main", lldb.eSymbolTypeAny)
main_addr = main_sym.GetStartAddress()
self.assertGreater(main_addr.GetLoadAddress(target), slide)
self.assertNotEqual(main_addr.GetLoadAddress(target), lldb.LLDB_INVALID_ADDRESS)
self.dbg.DeleteTarget(target)
@skipIf(debug_info=no_match(["dsym"]), bugnumber="This test is looking explicitly for a dSYM")
@skipIf(archs=no_match(['x86_64', 'arm64', 'arm64e', 'aarch64']))
@skipIfRemote
@skipUnlessDarwin
def test_lc_note_main_bin_spec_os_plugin(self):
self.build()
aout_exe = self.getBuildArtifact("a.out")
aout_exe_basename = "a.out"
create_corefile = self.getBuildArtifact("create-empty-corefile")
binspec_corefile_addr = self.getBuildArtifact("binspec-addr.core")
slide = 0x70000000000
### Create our corefile
# 0xffffffffffffffff means load address unknown
call(create_corefile + " main-bin-spec " + binspec_corefile_addr + " " + aout_exe + (" 0x%x" % slide) + " 0xffffffffffffffff", shell=True)
## We can hook in our dsym-for-uuid shell script to lldb with this env
## var instead of requiring a defaults write.
dsym_for_uuid = self.getBuildArtifact("dsym-for-uuid.sh")
os.environ['LLDB_APPLE_DSYMFORUUID_EXECUTABLE'] = dsym_for_uuid
if self.TraceOn():
print("Setting env var LLDB_APPLE_DSYMFORUUID_EXECUTABLE=" + dsym_for_uuid)
self.addTearDownHook(lambda: os.environ.pop('LLDB_APPLE_DSYMFORUUID_EXECUTABLE', None))
self.runCmd("settings set target.load-script-from-symbol-file true")
self.addTearDownHook(lambda: self.runCmd("settings set target.load-script-from-symbol-file false"))
dsym_python_dir = os.path.join('%s.dSYM' % aout_exe, 'Contents', 'Resources', 'Python')
os.makedirs(dsym_python_dir)
python_os_plugin_path = os.path.join(self.getSourceDir(),
'operating_system.py')
python_init = [
'def __lldb_init_module(debugger, internal_dict):',
' debugger.HandleCommand(\'settings set target.process.python-os-plugin-path %s\')' % python_os_plugin_path,
]
with open(os.path.join(dsym_python_dir, "a_out.py"), "w") as writer:
for l in python_init:
writer.write(l + '\n')
dwarfdump_uuid_regex = re.compile(
'UUID: ([-0-9a-fA-F]+) \(([^\(]+)\) .*')
dwarfdump_cmd_output = subprocess.check_output(
('/usr/bin/dwarfdump --uuid "%s"' % aout_exe), shell=True).decode("utf-8")
aout_uuid = None
for line in dwarfdump_cmd_output.splitlines():
match = dwarfdump_uuid_regex.search(line)
if match:
aout_uuid = match.group(1)
self.assertNotEqual(aout_uuid, None, "Could not get uuid of built a.out")
### Create our dsym-for-uuid shell script which returns aout_exe
shell_cmds = [
'#! /bin/sh',
'# the last argument is the uuid',
'while [ $# -gt 1 ]',
'do',
' shift',
'done',
'ret=0',
'echo "<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?>"',
'echo "<!DOCTYPE plist PUBLIC \\"-//Apple//DTD PLIST 1.0//EN\\" \\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\\">"',
'echo "<plist version=\\"1.0\\">"',
'',
'if [ "$1" != "%s" ]' % (aout_uuid),
'then',
' echo "<key>DBGError</key><string>not found</string>"',
' echo "</plist>"',
' exit 1',
'fi',
' uuid=%s' % aout_uuid,
' bin=%s' % aout_exe,
' dsym=%s.dSYM/Contents/Resources/DWARF/%s' % (aout_exe, os.path.basename(aout_exe)),
'echo "<dict><key>$uuid</key><dict>"',
'',
'echo "<key>DBGDSYMPath</key><string>$dsym</string>"',
'echo "<key>DBGSymbolRichExecutable</key><string>$bin</string>"',
'echo "</dict></dict></plist>"',
'exit $ret'
]
with open(dsym_for_uuid, "w") as writer:
for l in shell_cmds:
writer.write(l + '\n')
os.chmod(dsym_for_uuid, 0o755)
### Now run lldb on the corefile
### which will give us a UUID
### which we call dsym-for-uuid.sh with
### which gives us a binary and dSYM
### which lldb should load!
if self.TraceOn():
self.runCmd("log enable lldb dyld host")
self.addTearDownHook(lambda: self.runCmd("log disable lldb dyld host"))
# Now load the binary and confirm that we load the OS plugin.
target = self.dbg.CreateTarget('')
if self.TraceOn():
self.runCmd("script print('loading corefile %s with OS plugin')" % binspec_corefile_addr)
process = target.LoadCore(binspec_corefile_addr)
self.assertEqual(process.IsValid(), True)
if self.TraceOn():
self.runCmd("image list")
self.runCmd("target mod dump sections")
self.runCmd("thread list")
self.assertEqual(target.GetNumModules(), 1)
fspec = target.GetModuleAtIndex(0).GetFileSpec()
self.assertEqual(fspec.GetFilename(), aout_exe_basename)
# Verify our OS plug-in threads showed up
thread = process.GetThreadByID(0x111111111)
self.assertTrue(thread.IsValid(),
"Make sure there is a thread 0x111111111 after we load the python OS plug-in")
thread = process.GetThreadByID(0x222222222)
self.assertTrue(thread.IsValid(),
"Make sure there is a thread 0x222222222 after we load the python OS plug-in")
thread = process.GetThreadByID(0x333333333)
self.assertTrue(thread.IsValid(),
"Make sure there is a thread 0x333333333 after we load the python OS plug-in")
self.runCmd("settings clear target.process.python-os-plugin-path")
self.dbg.DeleteTarget(target)
| 47.243816
| 151
| 0.629544
|
c171847962c478a252d2f285cf57985ee823997e
| 1,440
|
py
|
Python
|
var/spack/repos/builtin/packages/py-natsort/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2
|
2020-10-15T01:08:42.000Z
|
2021-10-18T01:28:18.000Z
|
var/spack/repos/builtin/packages/py-natsort/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2
|
2019-07-30T10:12:28.000Z
|
2019-12-17T09:02:27.000Z
|
var/spack/repos/builtin/packages/py-natsort/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 5
|
2019-07-30T09:42:14.000Z
|
2021-01-25T05:39:20.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNatsort(PythonPackage):
"""Simple yet flexible natural sorting in Python."""
homepage = "https://pypi.org/project/natsort/"
url = "https://github.com/SethMMorton/natsort/archive/5.2.0.zip"
version('5.2.0', sha256='0ae15082842e8a3598750b4bbaa4f7c138caf004e59c7040429d56bf9e9631bd')
version('5.1.1', sha256='6467eeca268d9accb2e3149acace49649f865b0051a672006a64b20597f04561')
version('5.1.0', sha256='79279792cc97a0005b2075ed2bc9b8a3e25e5edffe43ee2fb26b116283f5dab4')
version('5.0.3', sha256='408f6fa87f6bbe3e09b255286d4db7b678bf22d6a5cd1651d05bfc1f99792a2e')
version('5.0.2', sha256='6315d94b6651edd9bf1e29cfd513a0349ec46a38ed38d33121a11d5162dbe556')
version('5.0.1', sha256='fe915cd4ddc90182947758b77873dda42935d5493819df8439f2daef01ffaacb')
version('5.0.0', sha256='b46b3569ac69e8f4a88f1a479d108872857538c7564226c32df1fd75e809c240')
version('4.0.4', sha256='14e5ddaf689de2f5ac33aa7963554fa2944b019f526ece1036d74fe60528531b')
version('4.0.3', sha256='8824792d7ebc37a57010e1ba301244653f8655ea20ddab6b0d546cf1d9ffedda')
version('4.0.1', sha256='1c1d29150938ca71f0943363a06765dbb2cea01f9c4d760ba880cc65f39baba0')
depends_on('py-setuptools', type=('build'))
| 53.333333
| 95
| 0.797222
|
d896e777ff4787d88f30a8c7b76ca7c954707c52
| 9,026
|
py
|
Python
|
docs/conf.py
|
edwardlo12/geojs-0.15.0
|
8bac0371515c7a2e5cf1f4af0923c7f9d8d567a5
|
[
"Apache-2.0"
] | null | null | null |
docs/conf.py
|
edwardlo12/geojs-0.15.0
|
8bac0371515c7a2e5cf1f4af0923c7f9d8d567a5
|
[
"Apache-2.0"
] | null | null | null |
docs/conf.py
|
edwardlo12/geojs-0.15.0
|
8bac0371515c7a2e5cf1f4af0923c7f9d8d567a5
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# geojs documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 27 13:44:49 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shutil
import re
from glob import glob
# from subprocess import check_output
# import graphviz
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
basepath = os.path.dirname(os.path.abspath(__file__))
toppath = os.path.join(basepath, '..')
for fname in glob(os.path.join(toppath, 'testing', 'test-runners', '*.py*')):
basename = os.path.split(fname)[-1].replace('.in', '')
with open(fname, 'r') as f:
with open(os.path.join(basepath, basename), 'w') as g:
r = re.compile(r'([^"])(@[\w]*@)([^"])')
for line in f.readlines():
g.write(r.subn(r'\1"\2"\3', line)[0])
sys.path.append(basepath)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.mathjax',
'sphinx.ext.autodoc',
'sphinx.ext.graphviz'
]
graphviz_output_format = 'svg'
autodoc_default_flags = ['show-inheritance']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'geojs'
copyright = u'Kitware, Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.15.0'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
if not os.environ.get('READTHEDOCS', None):
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'geojsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'geojs.tex', u'geojs Documentation',
u'Kitware, Inc.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'geojs', u'geojs Documentation',
[u'Kitware, Inc.'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'geojs', u'geojs Documentation',
u'Kitware, Inc.', 'geojs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 31.449477
| 79
| 0.710281
|
11208e4a57347b8cb5d657d84e11d8e1584c5fc8
| 3,046
|
py
|
Python
|
tx2/utils.py
|
ORNL/tx2
|
c2365580a994d35b31ec5a4c1be5acb3a4a7c8d7
|
[
"BSD-3-Clause"
] | 16
|
2021-01-29T19:28:50.000Z
|
2022-02-08T16:12:48.000Z
|
tx2/utils.py
|
ORNL/tx2
|
c2365580a994d35b31ec5a4c1be5acb3a4a7c8d7
|
[
"BSD-3-Clause"
] | 6
|
2021-11-05T13:42:07.000Z
|
2021-12-18T20:50:31.000Z
|
tx2/utils.py
|
ORNL/tx2
|
c2365580a994d35b31ec5a4c1be5acb3a4a7c8d7
|
[
"BSD-3-Clause"
] | 2
|
2020-12-13T02:39:21.000Z
|
2021-02-08T15:27:33.000Z
|
import asyncio
import numpy as np
from threading import Timer
from torch import cuda
DISABLE_DEBOUNCE = False
"""The main embedding plot is debounced to prevent excessive calling while editing text. Note that exceptions don't propagate due to threading when this occurs. Set this global to True to receive exceptions."""
def get_device() -> str:
"""Determine the device to put pytorch tensors on
:return: "cuda" or "cpu"
"""
device = "cuda" if cuda.is_available() else "cpu"
return device
# used in visualization
def which_cluster(index, clusters):
"""TODO Get cluster label from index of point in one of the clusters"""
for cluster in clusters:
if index in clusters[cluster]:
return cluster
return None
# used in wrapper
def set_defaults(params, **defaults):
"""TODO Set given parameters in given set of arguments if they do not
already have values.
"""
for key in defaults:
if key not in params:
params[key] = defaults[key]
return params
def array_elipsis(array, length, depth=1):
string = "["
for index, element in enumerate(array[:length]):
if depth == 1:
string += str(element)
else:
string += array_elipsis(array[index], length, depth - 1)
if index < len(array) - 1:
string += ", "
if index == length - 1 and index < len(array) - 1:
string += "..."
if index >= length - 1:
break
string += "]"
return string
# https://ipywidgets.readthedocs.io/en/latest/examples/Widget%20Events.html#Debouncing
class Timer:
# Used in debounce
def __init__(self, timeout, callback):
self._timeout = timeout
self._callback = callback
async def _job(self):
await asyncio.sleep(self._timeout)
self._callback()
def start(self):
if DISABLE_DEBOUNCE:
self._callback()
else:
self._task = asyncio.ensure_future(self._job())
def cancel(self):
if not DISABLE_DEBOUNCE:
self._task.cancel()
def debounce(wait):
"""TODO Decorator that will postpone a function's
execution until after `wait` seconds
have elapsed since the last time it was invoked."""
def decorator(fn):
timer = None
def debounced(*args, **kwargs):
nonlocal timer
def call_it():
fn(*args, **kwargs)
if timer is not None and not DISABLE_DEBOUNCE:
timer.cancel()
timer = Timer(wait, call_it)
timer.start()
return debounced
return decorator
def get_cat_by_index(idx, encodings):
"""Return the name of the category for the given encoded index"""
for key in encodings.keys():
if encodings[key] == idx:
return key
def get_pred_cat(pred, encodings):
"""Determine which category is predicted based on passed model output"""
idx = np.argmax(pred)
return get_cat_by_index(idx, encodings)
| 26.034188
| 210
| 0.622127
|
c918f629bc07e8881adf3ebd588ccec66a742ea4
| 4,328
|
py
|
Python
|
zerver/management/commands/soft_deactivate_users.py
|
shefali163/zulip
|
015af605cfb28b417b557ed9278835fd267bcfd8
|
[
"Apache-2.0"
] | 3
|
2018-12-04T01:44:43.000Z
|
2019-05-13T06:16:21.000Z
|
zerver/management/commands/soft_deactivate_users.py
|
alex784004/patient
|
a6510c4626392b9a8385cbac82698d9e23df0a55
|
[
"Apache-2.0"
] | 58
|
2018-11-27T15:18:54.000Z
|
2018-12-09T13:43:07.000Z
|
zerver/management/commands/soft_deactivate_users.py
|
alex784004/patient
|
a6510c4626392b9a8385cbac82698d9e23df0a55
|
[
"Apache-2.0"
] | 9
|
2019-11-04T18:59:29.000Z
|
2022-03-22T17:46:37.000Z
|
import sys
from argparse import ArgumentParser
from typing import Any, Dict, List
from django.conf import settings
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.soft_deactivation import do_soft_activate_users, \
do_soft_deactivate_users, get_users_for_soft_deactivation, logger
from zerver.models import Realm, UserProfile
class Command(ZulipBaseCommand):
help = """Soft activate/deactivate users. Users are recognised by there emails here."""
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser)
parser.add_argument('-d', '--deactivate',
dest='deactivate',
action='store_true',
default=False,
help='Used to deactivate user/users.')
parser.add_argument('-a', '--activate',
dest='activate',
action='store_true',
default=False,
help='Used to activate user/users.')
parser.add_argument('--inactive-for',
type=int,
default=28,
help='Number of days of inactivity before soft-deactivation')
parser.add_argument('users', metavar='<users>', type=str, nargs='*', default=[],
help="A list of user emails to soft activate/deactivate.")
def handle(self, *args: Any, **options: str) -> None:
if settings.STAGING:
print('This is a Staging server. Suppressing management command.')
sys.exit(0)
realm = self.get_realm(options)
user_emails = options['users']
activate = options['activate']
deactivate = options['deactivate']
filter_kwargs = {} # type: Dict[str, Realm]
if realm is not None:
filter_kwargs = dict(realm=realm)
if activate:
if not user_emails:
print('You need to specify at least one user to use the activate option.')
self.print_help("./manage.py", "soft_deactivate_users")
sys.exit(1)
users_to_activate = UserProfile.objects.filter(
email__in=user_emails,
**filter_kwargs
)
users_to_activate = list(users_to_activate)
if len(users_to_activate) != len(user_emails):
user_emails_found = [user.email for user in users_to_activate]
for user in user_emails:
if user not in user_emails_found:
raise Exception('User with email %s was not found. '
'Check if the email is correct.' % (user))
users_activated = do_soft_activate_users(users_to_activate)
logger.info('Soft Reactivated %d user(s)' % (len(users_activated)))
elif deactivate:
if user_emails:
users_to_deactivate = UserProfile.objects.filter(
email__in=user_emails,
**filter_kwargs
)
users_to_deactivate = list(users_to_deactivate)
if len(users_to_deactivate) != len(user_emails):
user_emails_found = [user.email for user in users_to_deactivate]
for user in user_emails:
if user not in user_emails_found:
raise Exception('User with email %s was not found. '
'Check if the email is correct.' % (user,))
print('Soft deactivating forcefully...')
else:
if realm is not None:
filter_kwargs = dict(user_profile__realm=realm)
users_to_deactivate = get_users_for_soft_deactivation(int(options['inactive_for']),
filter_kwargs)
if users_to_deactivate:
users_deactivated = do_soft_deactivate_users(users_to_deactivate)
logger.info('Soft Deactivated %d user(s)' % (len(users_deactivated)))
else:
self.print_help("./manage.py", "soft_deactivate_users")
sys.exit(1)
| 44.618557
| 99
| 0.554067
|
3bbaa82286a1d19a91488d26a2bb38d2cb872663
| 3,116
|
py
|
Python
|
docs/sphinx/source/conf.py
|
bernardopacini/f90wrapTemplate
|
801d3ab0a4394a5256f419dbae67457a110f1fb1
|
[
"MIT"
] | 3
|
2022-02-10T20:54:27.000Z
|
2022-02-13T22:40:21.000Z
|
docs/sphinx/source/conf.py
|
bernardopacini/f90wrapTemplate
|
801d3ab0a4394a5256f419dbae67457a110f1fb1
|
[
"MIT"
] | null | null | null |
docs/sphinx/source/conf.py
|
bernardopacini/f90wrapTemplate
|
801d3ab0a4394a5256f419dbae67457a110f1fb1
|
[
"MIT"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
this_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.abspath("."))
import fortranDoc
# -- Project information -----------------------------------------------------
project = "f90wrap Template"
copyright = "2022, Bernardo Pacini"
author = "Bernardo Pacini"
# The full version, including alpha/beta/rc tags
release = "1.0.0"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx-prompt",
"autoapi.extension",
"numpydoc",
"sphinxcontrib.bibtex",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinxfortran.fortran_domain",
"sphinxfortran.fortran_autodoc",
]
bibtex_bibfiles = ["refs.bib"]
autoapi_dirs = ["../../../python_src/f90wrapTemplate"]
autoapi_root = "developerDocs/Python/autodoc/"
autoapi_add_toctree_entry = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
## -- Options for Sphinx-Fortran ---------------------------------------------
# List of possible extensions in the case of a directory listing
fortran_ext = ["f90", "F90", "f95", "F95"]
# This variable must be set with file pattern, like "*.f90", or a list of them.
# It is also possible to specify a directory name; in this case, all files than
# have an extension matching those define by the config variable `fortran_ext`
# are used.
src = "../../../src/"
fortran_src = [
os.path.abspath(os.path.join(this_dir, src + path))
for path in (
"math/",
"statistics/",
)
]
# Indentation string or length (default 4). If it is an integer,
# indicates the number of spaces.
fortran_indent = 2
# Generate Fortran Documentation
fortranDoc.generateDoc()
| 33.505376
| 79
| 0.669769
|
269a258ae539dfdebb98e7507515d7eb29e1ebaa
| 1,244
|
py
|
Python
|
scripts/ai.py
|
jstnhuang/chatbot
|
331f5f847f027905cf5c22b66d731d151e282a51
|
[
"MIT"
] | 16
|
2015-05-01T04:37:17.000Z
|
2021-04-16T05:26:23.000Z
|
scripts/ai.py
|
jstnhuang/chatbot
|
331f5f847f027905cf5c22b66d731d151e282a51
|
[
"MIT"
] | 1
|
2016-09-06T04:35:39.000Z
|
2019-09-16T11:01:16.000Z
|
scripts/ai.py
|
jstnhuang/chatbot
|
331f5f847f027905cf5c22b66d731d151e282a51
|
[
"MIT"
] | 16
|
2015-05-01T04:37:17.000Z
|
2020-12-01T14:29:49.000Z
|
#!/usr/bin/env python
import aiml
import rospy
import os
import sys
from chatbot.msg import ChatMessage
from std_msgs.msg import String
class Chatbot():
def __init__(self):
self._kernel = aiml.Kernel()
rospy.init_node('chatbot_ai')
rospy.Subscriber('chatbot_speech', ChatMessage, self._request_callback)
self._response_publisher = rospy.Publisher(
'chatbot_responses',
String
)
def initialize(self, aiml_dir):
self._kernel.learn(os.sep.join([aiml_dir, '*.aiml']))
properties_file = open(os.sep.join([aiml_dir, 'bot.properties']))
for line in properties_file:
parts = line.split('=')
key = parts[0]
value = parts[1]
self._kernel.setBotPredicate(key, value)
rospy.logwarn('Done initializing chatbot.')
rospy.spin()
def _request_callback(self, chat_message):
response = ''
if chat_message.confidence < 50:
response = 'Could you say that again?'
else:
response = self._kernel.respond(chat_message.utterance)
message = String()
message.data = response
self._response_publisher.publish(message)
def main():
chatbot = Chatbot()
aiml_dir = sys.argv[1]
chatbot.initialize(aiml_dir)
if __name__ == '__main__':
main()
| 25.387755
| 75
| 0.685691
|
0cde288694905dadb83458256a681e9a26cd9df7
| 36,246
|
py
|
Python
|
code/tmp_rtrip/nntplib.py
|
emilyemorehouse/ast-and-me
|
3f58117512e125e1ecbe3c72f2f0d26adb80b7b3
|
[
"MIT"
] | 24
|
2018-01-23T05:28:40.000Z
|
2021-04-13T20:52:59.000Z
|
code/tmp_rtrip/nntplib.py
|
emilyemorehouse/ast-and-me
|
3f58117512e125e1ecbe3c72f2f0d26adb80b7b3
|
[
"MIT"
] | 17
|
2017-12-21T18:32:31.000Z
|
2018-12-18T17:09:50.000Z
|
code/tmp_rtrip/nntplib.py
|
emilyemorehouse/ast-and-me
|
3f58117512e125e1ecbe3c72f2f0d26adb80b7b3
|
[
"MIT"
] | null | null | null |
"""An NNTP client class based on:
- RFC 977: Network News Transfer Protocol
- RFC 2980: Common NNTP Extensions
- RFC 3977: Network News Transfer Protocol (version 2)
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print('Group', name, 'has', count, 'articles, range', first, 'to', last)
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last))
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'rb') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
import re
import socket
import collections
import datetime
import warnings
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
from email.header import decode_header as _email_decode_header
from socket import _GLOBAL_DEFAULT_TIMEOUT
__all__ = ['NNTP', 'NNTPError', 'NNTPReplyError', 'NNTPTemporaryError',
'NNTPPermanentError', 'NNTPProtocolError', 'NNTPDataError', 'decode_header'
]
_MAXLINE = 2048
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
NNTP_PORT = 119
NNTP_SSL_PORT = 563
_LONGRESP = {'100', '101', '211', '215', '220', '221', '222', '224', '225',
'230', '231', '282'}
_DEFAULT_OVERVIEW_FMT = ['subject', 'from', 'date', 'message-id',
'references', ':bytes', ':lines']
_OVERVIEW_FMT_ALTERNATIVES = {'bytes': ':bytes', 'lines': ':lines'}
_CRLF = b'\r\n'
GroupInfo = collections.namedtuple('GroupInfo', ['group', 'last', 'first',
'flag'])
ArticleInfo = collections.namedtuple('ArticleInfo', ['number', 'message_id',
'lines'])
def decode_header(header_str):
"""Takes a unicode string representing a munged header value
and decodes it as a (possibly non-ASCII) readable value."""
parts = []
for v, enc in _email_decode_header(header_str):
if isinstance(v, bytes):
parts.append(v.decode(enc or 'ascii'))
else:
parts.append(v)
return ''.join(parts)
def _parse_overview_fmt(lines):
"""Parse a list of string representing the response to LIST OVERVIEW.FMT
and return a list of header/metadata names.
Raises NNTPDataError if the response is not compliant
(cf. RFC 3977, section 8.4)."""
fmt = []
for line in lines:
if line[0] == ':':
name, _, suffix = line[1:].partition(':')
name = ':' + name
else:
name, _, suffix = line.partition(':')
name = name.lower()
name = _OVERVIEW_FMT_ALTERNATIVES.get(name, name)
fmt.append(name)
defaults = _DEFAULT_OVERVIEW_FMT
if len(fmt) < len(defaults):
raise NNTPDataError('LIST OVERVIEW.FMT response too short')
if fmt[:len(defaults)] != defaults:
raise NNTPDataError('LIST OVERVIEW.FMT redefines default fields')
return fmt
def _parse_overview(lines, fmt, data_process_func=None):
"""Parse the response to an OVER or XOVER command according to the
overview format `fmt`."""
n_defaults = len(_DEFAULT_OVERVIEW_FMT)
overview = []
for line in lines:
fields = {}
article_number, *tokens = line.split('\t')
article_number = int(article_number)
for i, token in enumerate(tokens):
if i >= len(fmt):
continue
field_name = fmt[i]
is_metadata = field_name.startswith(':')
if i >= n_defaults and not is_metadata:
h = field_name + ': '
if token and token[:len(h)].lower() != h:
raise NNTPDataError(
"OVER/XOVER response doesn't include names of additional headers"
)
token = token[len(h):] if token else None
fields[fmt[i]] = token
overview.append((article_number, fields))
return overview
def _parse_datetime(date_str, time_str=None):
"""Parse a pair of (date, time) strings, and return a datetime object.
If only the date is given, it is assumed to be date and time
concatenated together (e.g. response to the DATE command).
"""
if time_str is None:
time_str = date_str[-6:]
date_str = date_str[:-6]
hours = int(time_str[:2])
minutes = int(time_str[2:4])
seconds = int(time_str[4:])
year = int(date_str[:-4])
month = int(date_str[-4:-2])
day = int(date_str[-2:])
if year < 70:
year += 2000
elif year < 100:
year += 1900
return datetime.datetime(year, month, day, hours, minutes, seconds)
def _unparse_datetime(dt, legacy=False):
"""Format a date or datetime object as a pair of (date, time) strings
in the format required by the NEWNEWS and NEWGROUPS commands. If a
date object is passed, the time is assumed to be midnight (00h00).
The returned representation depends on the legacy flag:
* if legacy is False (the default):
date has the YYYYMMDD format and time the HHMMSS format
* if legacy is True:
date has the YYMMDD format and time the HHMMSS format.
RFC 3977 compliant servers should understand both formats; therefore,
legacy is only needed when talking to old servers.
"""
if not isinstance(dt, datetime.datetime):
time_str = '000000'
else:
time_str = '{0.hour:02d}{0.minute:02d}{0.second:02d}'.format(dt)
y = dt.year
if legacy:
y = y % 100
date_str = '{0:02d}{1.month:02d}{1.day:02d}'.format(y, dt)
else:
date_str = '{0:04d}{1.month:02d}{1.day:02d}'.format(y, dt)
return date_str, time_str
if _have_ssl:
def _encrypt_on(sock, context, hostname):
"""Wrap a socket in SSL/TLS. Arguments:
- sock: Socket to wrap
- context: SSL context to use for the encrypted connection
Returns:
- sock: New, encrypted socket.
"""
if context is None:
context = ssl._create_stdlib_context()
return context.wrap_socket(sock, server_hostname=hostname)
class _NNTPBase:
encoding = 'utf-8'
errors = 'surrogateescape'
def __init__(self, file, host, readermode=None, timeout=
_GLOBAL_DEFAULT_TIMEOUT):
"""Initialize an instance. Arguments:
- file: file-like object (open for read/write in binary mode)
- host: hostname of the server
- readermode: if true, send 'mode reader' command after
connecting.
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.file = file
self.debugging = 0
self.welcome = self._getresp()
self._caps = None
self.getcapabilities()
self.readermode_afterauth = False
if readermode and 'READER' not in self._caps:
self._setreadermode()
if not self.readermode_afterauth:
self._caps = None
self.getcapabilities()
self.tls_on = False
self.authenticated = False
def __enter__(self):
return self
def __exit__(self, *args):
is_connected = lambda : hasattr(self, 'file')
if is_connected():
try:
self.quit()
except (OSError, EOFError):
pass
finally:
if is_connected():
self._close()
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging:
print('*welcome*', repr(self.welcome))
return self.welcome
def getcapabilities(self):
"""Get the server capabilities, as read by __init__().
If the CAPABILITIES command is not supported, an empty dict is
returned."""
if self._caps is None:
self.nntp_version = 1
self.nntp_implementation = None
try:
resp, caps = self.capabilities()
except (NNTPPermanentError, NNTPTemporaryError):
self._caps = {}
else:
self._caps = caps
if 'VERSION' in caps:
self.nntp_version = max(map(int, caps['VERSION']))
if 'IMPLEMENTATION' in caps:
self.nntp_implementation = ' '.join(caps['IMPLEMENTATION'])
return self._caps
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
def _putline(self, line):
"""Internal: send one line to the server, appending CRLF.
The `line` must be a bytes-like object."""
line = line + _CRLF
if self.debugging > 1:
print('*put*', repr(line))
self.file.write(line)
self.file.flush()
def _putcmd(self, line):
"""Internal: send one command to the server (through _putline()).
The `line` must be a unicode string."""
if self.debugging:
print('*cmd*', repr(line))
line = line.encode(self.encoding, self.errors)
self._putline(line)
def _getline(self, strip_crlf=True):
"""Internal: return one line from the server, stripping _CRLF.
Raise EOFError if the connection is closed.
Returns a bytes object."""
line = self.file.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise NNTPDataError('line too long')
if self.debugging > 1:
print('*get*', repr(line))
if not line:
raise EOFError
if strip_crlf:
if line[-2:] == _CRLF:
line = line[:-2]
elif line[-1:] in _CRLF:
line = line[:-1]
return line
def _getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error.
Returns a unicode string."""
resp = self._getline()
if self.debugging:
print('*resp*', repr(resp))
resp = resp.decode(self.encoding, self.errors)
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def _getlongresp(self, file=None):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error.
Returns a (response, lines) tuple where `response` is a unicode
string and `lines` is a list of bytes objects.
If `file` is a file-like object, it must be open in binary mode.
"""
openedFile = None
try:
if isinstance(file, (str, bytes)):
openedFile = file = open(file, 'wb')
resp = self._getresp()
if resp[:3] not in _LONGRESP:
raise NNTPReplyError(resp)
lines = []
if file is not None:
terminators = b'.' + _CRLF, b'.\n'
while 1:
line = self._getline(False)
if line in terminators:
break
if line.startswith(b'..'):
line = line[1:]
file.write(line)
else:
terminator = b'.'
while 1:
line = self._getline()
if line == terminator:
break
if line.startswith(b'..'):
line = line[1:]
lines.append(line)
finally:
if openedFile:
openedFile.close()
return resp, lines
def _shortcmd(self, line):
"""Internal: send a command and get the response.
Same return value as _getresp()."""
self._putcmd(line)
return self._getresp()
def _longcmd(self, line, file=None):
"""Internal: send a command and get the response plus following text.
Same return value as _getlongresp()."""
self._putcmd(line)
return self._getlongresp(file)
def _longcmdstring(self, line, file=None):
"""Internal: send a command and get the response plus following text.
Same as _longcmd() and _getlongresp(), except that the returned `lines`
are unicode strings rather than bytes objects.
"""
self._putcmd(line)
resp, list = self._getlongresp(file)
return resp, [line.decode(self.encoding, self.errors) for line in list]
def _getoverviewfmt(self):
"""Internal: get the overview format. Queries the server if not
already done, else returns the cached value."""
try:
return self._cachedoverviewfmt
except AttributeError:
pass
try:
resp, lines = self._longcmdstring('LIST OVERVIEW.FMT')
except NNTPPermanentError:
fmt = _DEFAULT_OVERVIEW_FMT[:]
else:
fmt = _parse_overview_fmt(lines)
self._cachedoverviewfmt = fmt
return fmt
def _grouplist(self, lines):
return [GroupInfo(*line.split()) for line in lines]
def capabilities(self):
"""Process a CAPABILITIES command. Not supported by all servers.
Return:
- resp: server response if successful
- caps: a dictionary mapping capability names to lists of tokens
(for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] })
"""
caps = {}
resp, lines = self._longcmdstring('CAPABILITIES')
for line in lines:
name, *tokens = line.split()
caps[name] = tokens
return resp, caps
def newgroups(self, date, *, file=None):
"""Process a NEWGROUPS command. Arguments:
- date: a date or datetime object
Return:
- resp: server response if successful
- list: list of newsgroup names
"""
if not isinstance(date, (datetime.date, datetime.date)):
raise TypeError(
"the date parameter must be a date or datetime object, not '{:40}'"
.format(date.__class__.__name__))
date_str, time_str = _unparse_datetime(date, self.nntp_version < 2)
cmd = 'NEWGROUPS {0} {1}'.format(date_str, time_str)
resp, lines = self._longcmdstring(cmd, file)
return resp, self._grouplist(lines)
def newnews(self, group, date, *, file=None):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: a date or datetime object
Return:
- resp: server response if successful
- list: list of message ids
"""
if not isinstance(date, (datetime.date, datetime.date)):
raise TypeError(
"the date parameter must be a date or datetime object, not '{:40}'"
.format(date.__class__.__name__))
date_str, time_str = _unparse_datetime(date, self.nntp_version < 2)
cmd = 'NEWNEWS {0} {1} {2}'.format(group, date_str, time_str)
return self._longcmdstring(cmd, file)
def list(self, group_pattern=None, *, file=None):
"""Process a LIST or LIST ACTIVE command. Arguments:
- group_pattern: a pattern indicating which groups to query
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)
"""
if group_pattern is not None:
command = 'LIST ACTIVE ' + group_pattern
else:
command = 'LIST'
resp, lines = self._longcmdstring(command, file)
return resp, self._grouplist(lines)
def _getdescriptions(self, group_pattern, return_all):
line_pat = re.compile('^(?P<group>[^ \t]+)[ \t]+(.*)$')
resp, lines = self._longcmdstring('LIST NEWSGROUPS ' + group_pattern)
if not resp.startswith('215'):
resp, lines = self._longcmdstring('XGTITLE ' + group_pattern)
groups = {}
for raw_line in lines:
match = line_pat.search(raw_line.strip())
if match:
name, desc = match.group(1, 2)
if not return_all:
return desc
groups[name] = desc
if return_all:
return resp, groups
else:
return ''
def description(self, group):
"""Get a description for a single group. If more than one
group matches ('group' is a pattern), return the first. If no
group matches, return an empty string.
This elides the response code from the server, since it can
only be '215' or '285' (for xgtitle) anyway. If the response
code is needed, use the 'descriptions' method.
NOTE: This neither checks for a wildcard in 'group' nor does
it check whether the group actually exists."""
return self._getdescriptions(group, False)
def descriptions(self, group_pattern):
"""Get descriptions for a range of groups."""
return self._getdescriptions(group_pattern, True)
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles
- first: first article number
- last: last article number
- name: the group name
"""
resp = self._shortcmd('GROUP ' + name)
if not resp.startswith('211'):
raise NNTPReplyError(resp)
words = resp.split()
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = words[4].lower()
return resp, int(count), int(first), int(last), name
def help(self, *, file=None):
"""Process a HELP command. Argument:
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of strings returned by the server in response to the
HELP command
"""
return self._longcmdstring('HELP', file)
def _statparse(self, resp):
"""Internal: parse the response line of a STAT, NEXT, LAST,
ARTICLE, HEAD or BODY command."""
if not resp.startswith('22'):
raise NNTPReplyError(resp)
words = resp.split()
art_num = int(words[1])
message_id = words[2]
return resp, art_num, message_id
def _statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self._shortcmd(line)
return self._statparse(resp)
def stat(self, message_spec=None):
"""Process a STAT command. Argument:
- message_spec: article number or message id (if not specified,
the current article is selected)
Returns:
- resp: server response if successful
- art_num: the article number
- message_id: the message id
"""
if message_spec:
return self._statcmd('STAT {0}'.format(message_spec))
else:
return self._statcmd('STAT')
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self._statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self._statcmd('LAST')
def _artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, lines = self._longcmd(line, file)
resp, art_num, message_id = self._statparse(resp)
return resp, ArticleInfo(art_num, message_id, lines)
def head(self, message_spec=None, *, file=None):
"""Process a HEAD command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the headers in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of header lines)
"""
if message_spec is not None:
cmd = 'HEAD {0}'.format(message_spec)
else:
cmd = 'HEAD'
return self._artcmd(cmd, file)
def body(self, message_spec=None, *, file=None):
"""Process a BODY command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the body in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of body lines)
"""
if message_spec is not None:
cmd = 'BODY {0}'.format(message_spec)
else:
cmd = 'BODY'
return self._artcmd(cmd, file)
def article(self, message_spec=None, *, file=None):
"""Process an ARTICLE command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the article in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of article lines)
"""
if message_spec is not None:
cmd = 'ARTICLE {0}'.format(message_spec)
else:
cmd = 'ARTICLE'
return self._artcmd(cmd, file)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful
"""
return self._shortcmd('SLAVE')
def xhdr(self, hdr, str, *, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of (nr, value) strings
"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self._longcmdstring('XHDR {0} {1}'.format(hdr, str), file
)
def remove_number(line):
m = pat.match(line)
return m.group(1, 2) if m else line
return resp, [remove_number(line) for line in lines]
def xover(self, start, end, *, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of dicts containing the response fields
"""
resp, lines = self._longcmdstring('XOVER {0}-{1}'.format(start, end
), file)
fmt = self._getoverviewfmt()
return resp, _parse_overview(lines, fmt)
def over(self, message_spec, *, file=None):
"""Process an OVER command. If the command isn't supported, fall
back to XOVER. Arguments:
- message_spec:
- either a message id, indicating the article to fetch
information about
- or a (start, end) tuple, indicating a range of article numbers;
if end is None, information up to the newest message will be
retrieved
- or None, indicating the current article number must be used
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of dicts containing the response fields
NOTE: the "message id" form isn't supported by XOVER
"""
cmd = 'OVER' if 'OVER' in self._caps else 'XOVER'
if isinstance(message_spec, (tuple, list)):
start, end = message_spec
cmd += ' {0}-{1}'.format(start, end or '')
elif message_spec is not None:
cmd = cmd + ' ' + message_spec
resp, lines = self._longcmdstring(cmd, file)
fmt = self._getoverviewfmt()
return resp, _parse_overview(lines, fmt)
def xgtitle(self, group, *, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
warnings.warn(
'The XGTITLE extension is not actively used, use descriptions() instead'
, DeprecationWarning, 2)
line_pat = re.compile('^([^ \t]+)[ \t]+(.*)$')
resp, raw_lines = self._longcmdstring('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self, id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article
"""
warnings.warn('The XPATH extension is not actively used',
DeprecationWarning, 2)
resp = self._shortcmd('XPATH {0}'.format(id))
if not resp.startswith('223'):
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date(self):
"""Process the DATE command.
Returns:
- resp: server response if successful
- date: datetime object
"""
resp = self._shortcmd('DATE')
if not resp.startswith('111'):
raise NNTPReplyError(resp)
elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1]
if len(date) != 14:
raise NNTPDataError(resp)
return resp, _parse_datetime(date, None)
def _post(self, command, f):
resp = self._shortcmd(command)
if not resp.startswith('3'):
raise NNTPReplyError(resp)
if isinstance(f, (bytes, bytearray)):
f = f.splitlines()
for line in f:
if not line.endswith(_CRLF):
line = line.rstrip(b'\r\n') + _CRLF
if line.startswith(b'.'):
line = b'.' + line
self.file.write(line)
self.file.write(b'.\r\n')
self.file.flush()
return self._getresp()
def post(self, data):
"""Process a POST command. Arguments:
- data: bytes object, iterable or file containing the article
Returns:
- resp: server response if successful"""
return self._post('POST', data)
def ihave(self, message_id, data):
"""Process an IHAVE command. Arguments:
- message_id: message-id of the article
- data: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
return self._post('IHAVE {0}'.format(message_id), data)
def _close(self):
self.file.close()
del self.file
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
try:
resp = self._shortcmd('QUIT')
finally:
self._close()
return resp
def login(self, user=None, password=None, usenetrc=True):
if self.authenticated:
raise ValueError('Already logged in.')
if not user and not usenetrc:
raise ValueError(
'At least one of `user` and `usenetrc` must be specified')
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(self.host)
if auth:
user = auth[0]
password = auth[2]
except OSError:
pass
if not user:
return
resp = self._shortcmd('authinfo user ' + user)
if resp.startswith('381'):
if not password:
raise NNTPReplyError(resp)
else:
resp = self._shortcmd('authinfo pass ' + password)
if not resp.startswith('281'):
raise NNTPPermanentError(resp)
self._caps = None
self.getcapabilities()
if self.readermode_afterauth and 'READER' not in self._caps:
self._setreadermode()
self._caps = None
self.getcapabilities()
def _setreadermode(self):
try:
self.welcome = self._shortcmd('mode reader')
except NNTPPermanentError:
pass
except NNTPTemporaryError as e:
if e.response.startswith('480'):
self.readermode_afterauth = True
else:
raise
if _have_ssl:
def starttls(self, context=None):
"""Process a STARTTLS command. Arguments:
- context: SSL context to use for the encrypted connection
"""
if self.tls_on:
raise ValueError('TLS is already enabled.')
if self.authenticated:
raise ValueError('TLS cannot be started after authentication.')
resp = self._shortcmd('STARTTLS')
if resp.startswith('382'):
self.file.close()
self.sock = _encrypt_on(self.sock, context, self.host)
self.file = self.sock.makefile('rwb')
self.tls_on = True
self._caps = None
self.getcapabilities()
else:
raise NNTPError('TLS failed to start.')
class NNTP(_NNTPBase):
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=False, timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
- usenetrc: allow loading username and password from ~/.netrc file
if not specified explicitly
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.create_connection((host, port), timeout)
file = None
try:
file = self.sock.makefile('rwb')
_NNTPBase.__init__(self, file, host, readermode, timeout)
if user or usenetrc:
self.login(user, password, usenetrc)
except:
if file:
file.close()
self.sock.close()
raise
def _close(self):
try:
_NNTPBase._close(self)
finally:
self.sock.close()
if _have_ssl:
class NNTP_SSL(_NNTPBase):
def __init__(self, host, port=NNTP_SSL_PORT, user=None, password=
None, ssl_context=None, readermode=None, usenetrc=False,
timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""This works identically to NNTP.__init__, except for the change
in default port and the `ssl_context` argument for SSL connections.
"""
self.sock = socket.create_connection((host, port), timeout)
file = None
try:
self.sock = _encrypt_on(self.sock, ssl_context, host)
file = self.sock.makefile('rwb')
_NNTPBase.__init__(self, file, host, readermode=readermode,
timeout=timeout)
if user or usenetrc:
self.login(user, password, usenetrc)
except:
if file:
file.close()
self.sock.close()
raise
def _close(self):
try:
_NNTPBase._close(self)
finally:
self.sock.close()
__all__.append('NNTP_SSL')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description=
' nntplib built-in demo - display the latest articles in a newsgroup'
)
parser.add_argument('-g', '--group', default=
'gmane.comp.python.general', help=
'group to fetch messages from (default: %(default)s)')
parser.add_argument('-s', '--server', default='news.gmane.org', help=
'NNTP server hostname (default: %(default)s)')
parser.add_argument('-p', '--port', default=-1, type=int, help=
'NNTP port number (default: %s / %s)' % (NNTP_PORT, NNTP_SSL_PORT))
parser.add_argument('-n', '--nb-articles', default=10, type=int, help=
'number of articles to fetch (default: %(default)s)')
parser.add_argument('-S', '--ssl', action='store_true', default=False,
help='use NNTP over SSL')
args = parser.parse_args()
port = args.port
if not args.ssl:
if port == -1:
port = NNTP_PORT
s = NNTP(host=args.server, port=port)
else:
if port == -1:
port = NNTP_SSL_PORT
s = NNTP_SSL(host=args.server, port=port)
caps = s.getcapabilities()
if 'STARTTLS' in caps:
s.starttls()
resp, count, first, last, name = s.group(args.group)
print('Group', name, 'has', count, 'articles, range', first, 'to', last)
def cut(s, lim):
if len(s) > lim:
s = s[:lim - 4] + '...'
return s
first = str(int(last) - args.nb_articles + 1)
resp, overviews = s.xover(first, last)
for artnum, over in overviews:
author = decode_header(over['from']).split('<', 1)[0]
subject = decode_header(over['subject'])
lines = int(over[':lines'])
print('{:7} {:20} {:42} ({})'.format(artnum, cut(author, 20), cut(
subject, 42), lines))
s.quit()
| 36.20979
| 89
| 0.580202
|
ffd3bb3142b701d1f87d00f53e8f9c8db0315401
| 4,383
|
py
|
Python
|
src/urequests.py
|
ITJoker233/Esp32-Cam_IP_Camera
|
6bf1e6002393ee18b581aef4e41a3af4be13fb9d
|
[
"MIT"
] | 16
|
2020-07-22T12:24:47.000Z
|
2022-01-17T06:28:02.000Z
|
src/urequests.py
|
ITJoker233/Esp32-Cam_IP_Camera
|
6bf1e6002393ee18b581aef4e41a3af4be13fb9d
|
[
"MIT"
] | 1
|
2020-07-22T17:18:52.000Z
|
2021-07-04T13:24:54.000Z
|
src/urequests.py
|
ITJoker233/Esp32-Cam_IP_Camera
|
6bf1e6002393ee18b581aef4e41a3af4be13fb9d
|
[
"MIT"
] | 1
|
2021-04-01T07:30:03.000Z
|
2021-04-01T07:30:03.000Z
|
import usocket
class Response:
def __init__(self, f):
self.raw = f
self.encoding = 'utf-8'
self._cached = None
def close(self):
if self.raw:
self.raw.close()
self.raw = None
self._cached = None
@property
def content(self):
if self._cached is None:
try:
self._cached = self.raw.read()
finally:
self.raw.close()
self.raw = None
return self._cached
@property
def text(self):
return str(self.content, self.encoding)
def json(self):
import ujson
return ujson.loads(self.content)
def request(method, url, data=None, json=None, headers={}, stream=None, parse_headers=True):
redir_cnt = 1
if json is not None:
assert data is None
import ujson
data = ujson.dumps(json)
while True:
try:
proto, host, path = url.split('/', 2)
except ValueError:
proto, host = url.split('/', 1)
path = ''
if proto == 'http:':
port = 80
elif proto == 'https:':
import ussl
port = 443
else:
raise ValueError('Unsupported protocol: ' + proto)
if ':' in host:
host, port = host.split(':', 1)
port = int(port)
ai = usocket.getaddrinfo(host, port, 0, usocket.SOCK_STREAM)
ai = ai[0]
resp_d = None
if parse_headers is not False:
resp_d = {}
s = usocket.socket(ai[0], ai[1], ai[2])
try:
s.connect(ai[-1])
if proto == 'https:':
ctx = ussl.SSLContext()
s = ctx.wrap_socket(s, server_hostname=host)
s.write(b'%s /%s HTTP/1.0\r\n' % (method, path))
if not 'Host' in headers:
s.write(b'Host: %s\r\n' % host)
# Iterate over keys to avoid tuple alloc
for k in headers:
s.write(k)
s.write(b': ')
s.write(headers[k])
s.write(b'\r\n')
if json is not None:
s.write(b'Content-Type: application/json\r\n')
if data:
s.write(b'Content-Length: %d\r\n' % len(data))
s.write(b'Connection: close\r\n\r\n')
if data:
s.write(data)
l = s.readline()
#print(l)
l = l.split(None, 2)
status = int(l[1])
reason = ''
if len(l) > 2:
reason = l[2].rstrip()
while True:
l = s.readline()
if not l or l == b'\r\n':
break
#print(l)
if l.startswith(b'Transfer-Encoding:'):
if b'chunked' in l:
raise ValueError('Unsupported ' + l.decode())
elif l.startswith(b'Location:') and 300 <= status <= 399:
if not redir_cnt:
raise ValueError('Too many redirects')
redir_cnt -= 1
url = l[9:].decode().strip()
#print('redir to:', url)
status = 300
break
if parse_headers is False:
pass
elif parse_headers is True:
l = l.decode()
k, v = l.split(':', 1)
resp_d[k] = v.strip()
else:
parse_headers(l, resp_d)
except OSError:
s.close()
raise
if status != 300:
break
resp = Response(s)
resp.status_code = status
resp.reason = reason
if resp_d is not None:
resp.headers = resp_d
return resp
def head(url, **kw):
return request('HEAD', url, **kw)
def get(url, **kw):
return request('GET', url, **kw)
def post(url, **kw):
return request('POST', url, **kw)
def put(url, **kw):
return request('PUT', url, **kw)
def patch(url, **kw):
return request('PATCH', url, **kw)
def delete(url, **kw):
return request('DELETE', url, **kw)
| 28.647059
| 93
| 0.439197
|
252225c885c84dce956344afcdef31841ad34390
| 8,068
|
py
|
Python
|
python_on_whales/client_config.py
|
haruishi43/python-on-whales
|
e26c3e4367428588bad0b028c7258124a111c0c6
|
[
"MIT"
] | null | null | null |
python_on_whales/client_config.py
|
haruishi43/python-on-whales
|
e26c3e4367428588bad0b028c7258124a111c0c6
|
[
"MIT"
] | null | null | null |
python_on_whales/client_config.py
|
haruishi43/python-on-whales
|
e26c3e4367428588bad0b028c7258124a111c0c6
|
[
"MIT"
] | null | null | null |
import json
import shutil
import tempfile
import warnings
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional
import pydantic
from python_on_whales.download_binaries import DOCKER_BINARY_PATH, download_docker_cli
from python_on_whales.utils import to_list
from .utils import ValidPath, run
CACHE_VALIDITY_PERIOD = 0.01
class ParsingError(Exception):
pass
class Command(list):
def add_simple_arg(self, name: str, value: Any):
if value is not None:
self.extend([name, value])
def add_flag(self, name: str, value: bool):
if value:
self.append(name)
def add_args_list(self, arg_name: str, list_values: list):
for value in to_list(list_values):
self.extend([arg_name, value])
def __add__(self, other) -> "Command":
return Command(super().__add__(other))
@dataclass
class ClientConfig:
config: Optional[ValidPath] = None
context: Optional[str] = None
debug: Optional[bool] = None
host: Optional[str] = None
log_level: Optional[str] = None
tls: Optional[bool] = None
tlscacert: Optional[ValidPath] = None
tlscert: Optional[ValidPath] = None
tlskey: Optional[ValidPath] = None
tlsverify: Optional[bool] = None
client_binary_path: Optional[ValidPath] = None
compose_files: List[ValidPath] = field(default_factory=list)
def get_docker_path(self) -> ValidPath:
if self.client_binary_path is None:
docker_sys = shutil.which("docker")
if docker_sys is not None:
self.client_binary_path = docker_sys
elif DOCKER_BINARY_PATH.exists():
self.client_binary_path = DOCKER_BINARY_PATH
else:
warnings.warn(
"The docker client binary file was not found on your system. \n"
"Docker on whales will try to download it for you. \n"
"Don't worry, it "
"won't be in the PATH and won't have anything to do with "
"the package manager of your system. \n"
"Note: We are not installing the docker daemon, which is a lot "
"heavier and harder to install. We're just downloading a single "
"standalone binary file.\n"
"If you want to trigger the download of the client binary file "
"manually (for example if you want to do it in a Dockerfile), "
"you can run the following command:\n "
"$ python-on-whales download-cli \n"
)
download_docker_cli()
self.client_binary_path = DOCKER_BINARY_PATH
return self.client_binary_path
@property
def docker_cmd(self) -> Command:
result = Command([self.get_docker_path()])
if self.config is not None:
result += ["--config", self.config]
if self.context is not None:
result += ["--context", self.context]
if self.debug:
result.append("--debug")
if self.host is not None:
result += ["--host", self.host]
if self.log_level is not None:
result += ["--log-level", self.log_level]
if self.tls:
result.append("--tls")
if self.tlscacert is not None:
result += ["--tlscacert", self.tlscacert]
if self.tlscert is not None:
result += ["--tlscert", self.tlscert]
if self.tlskey is not None:
result += ["--tlskey", self.tlskey]
if self.tlsverify:
result.append("--tlsverify")
return result
@property
def docker_compose_cmd(self) -> Command:
base_cmd = self.docker_cmd + ["compose"]
base_cmd.add_args_list("--file", self.compose_files)
return base_cmd
class DockerCLICaller:
def __init__(self, client_config: ClientConfig):
self.client_config = client_config
@property
def docker_cmd(self) -> Command:
return self.client_config.docker_cmd
@property
def docker_compose_cmd(self) -> Command:
return self.client_config.docker_compose_cmd
class ReloadableObject(DockerCLICaller):
def __init__(
self,
client_config: ClientConfig,
id_in_inspect: str,
reference_or_id: str,
is_immutable_id: bool = False,
):
super().__init__(client_config)
self._last_refreshed_time = datetime.min
self._inspect_result = None
self._immutable_id = None
self._reference = None
self._id_in_inspect = id_in_inspect
if is_immutable_id:
self._immutable_id = reference_or_id
else:
self._set_inspect_result(
self._fetch_and_parse_inspect_result(reference_or_id)
)
self._immutable_id = getattr(self._inspect_result, self._id_in_inspect)
def __eq__(self, other):
return (
self._get_immutable_id() == other._get_immutable_id()
and self.client_config == other.client_config
)
def __str__(self):
return self._get_immutable_id()
def _needs_reload(self) -> bool:
return (datetime.now() - self._last_refreshed_time) >= timedelta(
seconds=CACHE_VALIDITY_PERIOD
)
def reload(self):
self._set_inspect_result(
self._fetch_and_parse_inspect_result(self._immutable_id)
)
def _fetch_and_parse_inspect_result(self, reference: str):
raise NotImplementedError
def _get_inspect_result(self):
if self._needs_reload():
self.reload()
return self._inspect_result
def _set_inspect_result(self, inspect_result):
self._inspect_result = inspect_result
self._last_refreshed_time = datetime.now()
def _get_immutable_id(self):
if self._immutable_id is None:
self.reload()
return self._immutable_id
def __hash__(self):
# maybe we can do better.
return hash(self._get_immutable_id())
class ReloadableObjectFromJson(ReloadableObject):
def _fetch_inspect_result_json(self, reference):
raise NotImplementedError
def _parse_json_object(self, json_object: Dict[str, Any]):
raise NotImplementedError
def _fetch_and_parse_inspect_result(self, reference: str):
json_str = self._fetch_inspect_result_json(reference)
json_object = json.loads(json_str)[0]
try:
return self._parse_json_object(json_object)
except pydantic.error_wrappers.ValidationError as err:
fd, json_response_file = tempfile.mkstemp(suffix=".json", text=True)
with open(json_response_file, "w") as f:
f.write(json_str)
raise ParsingError(
f"There was an error parsing the json response from the Docker daemon. \n"
f"This is a bug with python-on-whales itself. Please head to \n"
f"https://github.com/gabrieldemarmiesse/python-on-whales/issues \n"
f"and open an issue. You should copy this error message and \n"
f"the json response from the Docker daemon. The json response was put \n"
f"in {json_response_file} because it's a bit too big to be printed \n"
f"on the screen. Make sure that there are no sensitive data in the \n"
f"json file before copying it in the github issue."
) from err
def bulk_reload(docker_objects: List[ReloadableObjectFromJson]):
assert len(set(x.client_config for x in docker_objects)) == 1
all_ids = [x._get_immutable_id() for x in docker_objects]
full_cmd = docker_objects[0].docker_cmd + ["inspect"] + all_ids
json_str = run(full_cmd)
for json_obj, docker_object in zip(json.loads(json_str), docker_objects):
docker_object._set_inspect_result(docker_object._parse_json_object(json_obj))
| 34.042194
| 90
| 0.633118
|
7b2524acc07989ab18bea077c2af286720e61494
| 6,688
|
py
|
Python
|
djmodels/core/signing.py
|
iMerica/dj-models
|
fbe4a55ac362f9355a2298f58aa0deb0b6082e19
|
[
"BSD-3-Clause"
] | 5
|
2019-02-15T16:47:50.000Z
|
2021-12-26T18:52:23.000Z
|
djmodels/core/signing.py
|
iMerica/dj-models
|
fbe4a55ac362f9355a2298f58aa0deb0b6082e19
|
[
"BSD-3-Clause"
] | null | null | null |
djmodels/core/signing.py
|
iMerica/dj-models
|
fbe4a55ac362f9355a2298f58aa0deb0b6082e19
|
[
"BSD-3-Clause"
] | 2
|
2021-08-09T02:29:09.000Z
|
2021-08-20T03:30:11.000Z
|
"""
Functions for creating and restoring url-safe signed JSON objects.
The format used looks like this:
>>> signing.dumps("hello")
'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk'
There are two components here, separated by a ':'. The first component is a
URLsafe base64 encoded JSON of the object passed to dumps(). The second
component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret"
signing.loads(s) checks the signature and returns the deserialized object.
If the signature fails, a BadSignature exception is raised.
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")
'hello'
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified")
...
BadSignature: Signature failed: ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified
You can optionally compress the JSON prior to base64 encoding it to save
space, using the compress=True argument. This checks if compression actually
helps and only applies compression if the result is a shorter string:
>>> signing.dumps(range(1, 20), compress=True)
'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ'
The fact that the string is compressed is signalled by the prefixed '.' at the
start of the base64 JSON.
There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'.
These functions make use of all of them.
"""
import base64
import datetime
import json
import re
import time
import zlib
from djmodels.conf import settings
from djmodels.utils import baseconv
from djmodels.utils.crypto import constant_time_compare, salted_hmac
from djmodels.utils.encoding import force_bytes
from djmodels.utils.module_loading import import_string
_SEP_UNSAFE = re.compile(r'^[A-z0-9-_=]*$')
class BadSignature(Exception):
"""Signature does not match."""
pass
class SignatureExpired(BadSignature):
"""Signature timestamp is older than required max_age."""
pass
def b64_encode(s):
return base64.urlsafe_b64encode(s).strip(b'=')
def b64_decode(s):
pad = b'=' * (-len(s) % 4)
return base64.urlsafe_b64decode(s + pad)
def base64_hmac(salt, value, key):
return b64_encode(salted_hmac(salt, value, key).digest()).decode()
def get_cookie_signer(salt='djmodels.core.signing.get_cookie_signer'):
Signer = import_string(settings.SIGNING_BACKEND)
key = force_bytes(settings.SECRET_KEY) # SECRET_KEY may be str or bytes.
return Signer(b'djmodels.http.cookies' + key, salt=salt)
class JSONSerializer:
"""
Simple wrapper around json to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return json.dumps(obj, separators=(',', ':')).encode('latin-1')
def loads(self, data):
return json.loads(data.decode('latin-1'))
def dumps(obj, key=None, salt='djmodels.core.signing', serializer=JSONSerializer, compress=False):
"""
Return URL-safe, hmac/SHA1 signed base64 compressed JSON string. If key is
None, use settings.SECRET_KEY instead.
If compress is True (not the default), check if compressing using zlib can
save some space. Prepend a '.' to signify compression. This is included
in the signature, to protect against zip bombs.
Salt can be used to namespace the hash, so that a signed string is
only valid for a given namespace. Leaving this at the default
value or re-using a salt value across different parts of your
application without good cause is a security risk.
The serializer is expected to return a bytestring.
"""
data = serializer().dumps(obj)
# Flag for if it's been compressed or not
is_compressed = False
if compress:
# Avoid zlib dependency unless compress is being used
compressed = zlib.compress(data)
if len(compressed) < (len(data) - 1):
data = compressed
is_compressed = True
base64d = b64_encode(data).decode()
if is_compressed:
base64d = '.' + base64d
return TimestampSigner(key, salt=salt).sign(base64d)
def loads(s, key=None, salt='djmodels.core.signing', serializer=JSONSerializer, max_age=None):
"""
Reverse of dumps(), raise BadSignature if signature fails.
The serializer is expected to accept a bytestring.
"""
# TimestampSigner.unsign() returns str but base64 and zlib compression
# operate on bytes.
base64d = TimestampSigner(key, salt=salt).unsign(s, max_age=max_age).encode()
decompress = base64d[:1] == b'.'
if decompress:
# It's compressed; uncompress it first
base64d = base64d[1:]
data = b64_decode(base64d)
if decompress:
data = zlib.decompress(data)
return serializer().loads(data)
class Signer:
def __init__(self, key=None, sep=':', salt=None):
# Use of native strings in all versions of Python
self.key = key or settings.SECRET_KEY
self.sep = sep
if _SEP_UNSAFE.match(self.sep):
raise ValueError(
'Unsafe Signer separator: %r (cannot be empty or consist of '
'only A-z0-9-_=)' % sep,
)
self.salt = salt or '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
def signature(self, value):
return base64_hmac(self.salt + 'signer', value, self.key)
def sign(self, value):
return '%s%s%s' % (value, self.sep, self.signature(value))
def unsign(self, signed_value):
if self.sep not in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
if constant_time_compare(sig, self.signature(value)):
return value
raise BadSignature('Signature "%s" does not match' % sig)
class TimestampSigner(Signer):
def timestamp(self):
return baseconv.base62.encode(int(time.time()))
def sign(self, value):
value = '%s%s%s' % (value, self.sep, self.timestamp())
return super().sign(value)
def unsign(self, value, max_age=None):
"""
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
"""
result = super().unsign(value)
value, timestamp = result.rsplit(self.sep, 1)
timestamp = baseconv.base62.decode(timestamp)
if max_age is not None:
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
# Check timestamp is not older than max_age
age = time.time() - timestamp
if age > max_age:
raise SignatureExpired(
'Signature age %s > %s seconds' % (age, max_age))
return value
| 33.60804
| 98
| 0.685257
|
4af42df910fe9bf3722e44764f5d3c010301fe05
| 1,382
|
py
|
Python
|
Validation/GlobalDigis/python/globaldigis_analyze_cfi.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
Validation/GlobalDigis/python/globaldigis_analyze_cfi.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
Validation/GlobalDigis/python/globaldigis_analyze_cfi.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
globaldigisanalyze = cms.EDAnalyzer("GlobalDigisAnalyzer",
hitsProducer = cms.string('g4SimHits'),
MuCSCStripSrc = cms.InputTag("simMuonCSCDigis","MuonCSCStripDigi"),
MuDTSrc = cms.InputTag("simMuonDTDigis"),
Name = cms.untracked.string('GlobalDigisAnalyzer'),
SiPxlSrc = cms.InputTag("simSiPixelDigis"),
Verbosity = cms.untracked.int32(0), ## 0 provides no output
MuCSCWireSrc = cms.InputTag("simMuonCSCDigis","MuonCSCWireDigi"),
ECalEESrc = cms.InputTag("simEcalDigis","eeDigis"),
SiStripSrc = cms.InputTag("simSiStripDigis","ZeroSuppressed"),
# 1 assumes cm in SimVertex
ProvenanceLookup = cms.PSet(
PrintProvenanceInfo = cms.untracked.bool(False),
GetAllProvenances = cms.untracked.bool(False)
),
HCalSrc = cms.InputTag("g4SimHits","HcalHits"),
# 1 provides basic output
# 2 provides output of the fill step + 1
# 3 provides output of the store step + 2
Frequency = cms.untracked.int32(50),
MuRPCSrc = cms.InputTag("simMuonRPCDigis"),
ECalEBSrc = cms.InputTag("simEcalDigis","ebDigis"),
ECalESSrc = cms.InputTag("simEcalPreshowerDigis"),
# as of 110p2, needs to be 1. Anything ealier should be 0.
VtxUnit = cms.untracked.int32(1),
#InputTag HCalDigi = simHcalUnsuppressedDigis
HCalDigi = cms.InputTag("simHcalDigis")
)
| 39.485714
| 71
| 0.708394
|
da6c1fa992fbb6ace516d7717b624b790e447c1a
| 233
|
py
|
Python
|
oop/tdd/tictactoe/tictactoe.py
|
mikar/60-days-of-python
|
1b65ed978e908f3ae357c91abeb2a36564b13316
|
[
"MIT"
] | 10
|
2016-07-31T13:38:09.000Z
|
2021-07-21T16:52:26.000Z
|
oop/tdd/tictactoe/tictactoe.py
|
oguzhalit/60-days-of-python
|
1b65ed978e908f3ae357c91abeb2a36564b13316
|
[
"MIT"
] | null | null | null |
oop/tdd/tictactoe/tictactoe.py
|
oguzhalit/60-days-of-python
|
1b65ed978e908f3ae357c91abeb2a36564b13316
|
[
"MIT"
] | 9
|
2017-07-31T14:41:38.000Z
|
2021-07-21T16:52:26.000Z
|
class Board(object):
def __init__(self):
self.pawns = {0: "X", 1: "O"}
self.player = self.pawns[0]
self.npc = self.pawns[1]
if __name__ == "__main__":
b = Board()
print b.pawns, b.player, b.npc
| 19.416667
| 37
| 0.545064
|
d3797b77d9f2a01a058311abd0cd3b9ef9475c13
| 2,751
|
py
|
Python
|
commands/default_cmdsets.py
|
Miami-Blood-in-the-Water/miami-new
|
d57a741e30423c676d5044b25d939d7553d8959b
|
[
"MIT"
] | null | null | null |
commands/default_cmdsets.py
|
Miami-Blood-in-the-Water/miami-new
|
d57a741e30423c676d5044b25d939d7553d8959b
|
[
"MIT"
] | null | null | null |
commands/default_cmdsets.py
|
Miami-Blood-in-the-Water/miami-new
|
d57a741e30423c676d5044b25d939d7553d8959b
|
[
"MIT"
] | null | null | null |
"""
Command sets
All commands in the game must be grouped in a cmdset. A given command
can be part of any number of cmdsets and cmdsets can be added/removed
and merged onto entities at runtime.
To create new commands to populate the cmdset, see
`commands/command.py`.
This module wraps the default command sets of Evennia; overloads them
to add/remove commands from the default lineup. You can create your
own cmdsets by inheriting from them or directly from `evennia.CmdSet`.
"""
from evennia import default_cmds
from character.commands.chargen import CmdChargen, CmdRace
class CharacterCmdSet(default_cmds.CharacterCmdSet):
"""
The `CharacterCmdSet` contains general in-game commands like `look`,
`get`, etc available on in-game Character objects. It is merged with
the `AccountCmdSet` when an Account puppets a Character.
"""
key = "DefaultCharacter"
def at_cmdset_creation(self):
"""
Populates the cmdset
"""
super().at_cmdset_creation()
self.add(CmdChargen)
self.add(CmdRace)
#
# any commands you add below will overload the default ones.
#
class AccountCmdSet(default_cmds.AccountCmdSet):
"""
This is the cmdset available to the Account at all times. It is
combined with the `CharacterCmdSet` when the Account puppets a
Character. It holds game-account-specific commands, channel
commands, etc.
"""
key = "DefaultAccount"
def at_cmdset_creation(self):
"""
Populates the cmdset
"""
super().at_cmdset_creation()
#
# any commands you add below will overload the default ones.
#
class UnloggedinCmdSet(default_cmds.UnloggedinCmdSet):
"""
Command set available to the Session before being logged in. This
holds commands like creating a new account, logging in, etc.
"""
key = "DefaultUnloggedin"
def at_cmdset_creation(self):
"""
Populates the cmdset
"""
super().at_cmdset_creation()
#
# any commands you add below will overload the default ones.
#
class SessionCmdSet(default_cmds.SessionCmdSet):
"""
This cmdset is made available on Session level once logged in. It
is empty by default.
"""
key = "DefaultSession"
def at_cmdset_creation(self):
"""
This is the only method defined in a cmdset, called during
its creation. It should populate the set with command instances.
As and example we just add the empty base `Command` object.
It prints some info.
"""
super().at_cmdset_creation()
#
# any commands you add below will overload the default ones.
#
| 27.237624
| 72
| 0.666667
|
d536e509ac8cd1c52b4a4f12ab409a4357b5eb1f
| 8,640
|
py
|
Python
|
lib/JumpScale/baselib/codetools/WordReplacer.py
|
Jumpscale/jumpscale6_core
|
0502ddc1abab3c37ed982c142d21ea3955d471d3
|
[
"BSD-2-Clause"
] | 1
|
2015-10-26T10:38:13.000Z
|
2015-10-26T10:38:13.000Z
|
lib/JumpScale/baselib/codetools/WordReplacer.py
|
Jumpscale/jumpscale6_core
|
0502ddc1abab3c37ed982c142d21ea3955d471d3
|
[
"BSD-2-Clause"
] | null | null | null |
lib/JumpScale/baselib/codetools/WordReplacer.py
|
Jumpscale/jumpscale6_core
|
0502ddc1abab3c37ed982c142d21ea3955d471d3
|
[
"BSD-2-Clause"
] | null | null | null |
from JumpScale import j
import re,random
class Synonym():
def __init__(self,name='',replaceWith='', simpleSearch="", addConfluenceLinkTags=False, replaceExclude=''):
"""
@param name: Name of the sysnoym
@param replaceWith: The replacement of simpleSearch
@param simpleSearch: Search string that'll be replaced with replaceWith
@addConfluenceLinkTags: True to add confluence tags around the synonym
@defSynonym: If True then this is a definition synonym, which can be used in spectools
"""
self.simpleSearch=simpleSearch
self.regexFind=""
self.regexFindForReplace=""
self.name = name
self.replaceWith=replaceWith
self.addConfluenceLinkTags=addConfluenceLinkTags
self.replaceExclude = replaceExclude
self._markers=dict()
if simpleSearch<>"":
search=simpleSearch.replace("?","[ -_]?") #match " " or "-" or "_" one or 0 time
if addConfluenceLinkTags:
bracketMatchStart="(\[ *|)"
bracketMatchStop="( *\]|)"
else:
bracketMatchStart=""
bracketMatchStop=""
self.regexFind=r"(?i)%s\b%s\b%s" % (bracketMatchStart,search.lower(),bracketMatchStop)
#self.regexFind=r"%s\b%s\b%s" % (bracketMatchStart,search.lower(),bracketMatchStop)
self.regexFindForReplace=self.regexFind
def setRegexSearch(self,regexFind,regexFindForReplace):
self.regexFind=regexFind
if regexFindForReplace=="":
regexFindForReplace=regexFind
self.regexFindForReplace=regexFindForReplace
self.simpleSearch=""
def replace(self,text):
if self.replaceExclude:
# Check for any def tag that contains name "e.g: [ Q-Layer ]", remove them and put markers in place
text=self._replaceDefsWithMarkers(text)
text=j.codetools.regex.replace(regexFind=self.regexFind,regexFindsubsetToReplace=self.regexFindForReplace\
,replaceWith=self.replaceWith,text=text)
if self.replaceExclude:
# Remove the markers and put the original def tags back
text=self._replaceMarkersWithDefs(text)
return text
def _replaceDefsWithMarkers(self,text):
"""
Search for any def tags that contains the name of this synonym "e.g [Q-layer]" in text and replace that with a special marker. Also it stores markers and replaced string into the dict _markers
"""
# patterns you don't want to be replaced
pat=self.replaceExclude
matches = j.codetools.regex.findAll(pat,text)
for match in matches:
mark = "$$MARKER$$%s$$"%random.randint(0,1000)
self._markers[mark] = match
match = re.escape(match)
text=j.codetools.regex.replace(regexFind=match,regexFindsubsetToReplace=match,replaceWith=mark,text=text)
return text
def _replaceMarkersWithDefs(self,text):
"""
Removes markers out of text and puts the original strings back
"""
for marker,replacement in self._markers.iteritems():
marker = re.escape(marker)
text=j.codetools.regex.replace(regexFind=marker,regexFindsubsetToReplace=marker,replaceWith=replacement,text=text)
return text
def __str__(self):
out="name:%s simple:%s regex:%s regereplace:%s replacewith:%s\n" % (self.name,self.simpleSearch,self.regexFind,self.regexFindForReplace,self.replaceWith)
return out
def __repr__(self):
return self.__str__()
class WordReplacer():
def __init__(self):
self.synonyms=[] #array Synonym()
def synonymsPrint(self):
for syn in self.synonyms:
print syn
def synonymAdd(self,name='', simpleSearch='', regexFind='', regexFindForReplace='', replaceWith='',replaceExclude='', addConfluenceLinkTags =False):
"""
Adds a new synonym to this replacer
@param name: Synonym name
@param simpleSearch: Search text for sysnonym, if you supply this, then the synonym will automatically generate a matching regex pattern that'll be used to search for this string, if you want to specificy the regex explicitly then use regexFind instead.
@param regexFind: Provide this regex only if you didn't provide simpleSearch, it represents the regex that'll be used in search for this synonym . It overrides the default synonym search pattern
@param regexFindForReplace: The subset within regexFind that'll be replaced for this synonym
"""
synonym = Synonym(name,replaceWith, simpleSearch, addConfluenceLinkTags, replaceExclude)
if regexFind:
synonym.setRegexSearch(regexFind, regexFindForReplace)
self.synonyms.append(synonym)
def reset(self):
self.synonyms=[]
def synonymsAddFromFile(self,path,addConfluenceLinkTags=False):
"""
load synonym satements from a file in the following format
[searchStatement]:[replaceto]
or
'[regexFind]':'[regexReplace]':replaceto
note: delimiter is :
note: '' around regex statements
e.g.
******
master?daemon:ApplicationServer
application?server:ApplicationServer
'application[ -_]+server':'application[ -_]+server':ApplicationServer
'\[application[ -_]+server\]':'application[ -_]+server':ApplicationServer
******
@param addConfluenceLinkTags id True then replaced items will be surrounded by [] (Boolean)
"""
txt=j.system.fs.fileGetContents(path)
for line in txt.split("\n"):
line=line.strip()
if line<>"" and line.find(":")<>-1:
if j.codetools.regex.match("^'",line):
#found line which is regex format
splitted=line.split("'")
if len(splitted)<>4:
raise RuntimeError("syntax error in synonym line (has to be 2 'regex' statements" % line)
syn=Synonym(replaceWith=splitted[2])
syn.setRegexSearch(regexFind=splitted[0],regexFindForReplace=splitted[1])
else:
find=line.split(":")[0]
replace=line.split(":")[1].strip()
syn=Synonym(replaceWith=replace,simpleSearch=find,addConfluenceLinkTags=addConfluenceLinkTags)
self.synonyms.append(syn)
def removeConfluenceLinks(self,text):
"""
find [...] and remove the [ and the ]
@todo 2 (id:19)
"""
raise RuntimeError("todo needs to be done, is not working now")
def replaceinside(matchobj):
match=matchobj.group()
#we found a match now
#print "regex:%s match:%s replace:%s" % (searchitem[1],match,searchitem[2])
if match.find("|")==-1:
match=re.sub("( *\])|(\[ *)","",match)
toreplace=searchitem[2]
searchregexReplace=searchitem[1]
match = re.sub(searchregexReplace, toreplace,match)
return match
else:
return match
for searchitem in self.synonyms:
#text = re.sub(searchitem[0],searchitem[1], text)
text = re.sub(searchitem[0], replaceinside, text)
return text
def replace(self,text):
for syn in self.synonyms:
text=syn.replace(text)
return text
def replaceInConfluence(self, text):
"""
@[..|.] will also be looked for and replaced
"""
def replaceinside(matchobj):
match=matchobj.group()
#we found a match now
#print "regex:%s match:%s replace:%s" % (searchitem[1],match,searchitem[2])
if match.find("|")==-1:
match=re.sub("( *\])|(\[ *)","",match)
match = re.sub(syn.regexFind, syn.replaceWith,match)
return match
else:
return match
for syn in self.synonyms:
#call function replaceinside when match
text = re.sub(syn.regexFind, replaceinside, text)
return text
def _addConfluenceLinkTags(self,word):
"""
add [ & ] to word
"""
if word.find("[")==-1 and word.find("]")==-1:
word="[%s]" % word
return word
| 43.2
| 261
| 0.596875
|
b69f1b490dc76ed94c5d19e5f001839cff1e04a6
| 3,385
|
py
|
Python
|
App/tests.py
|
steve-njuguna-k/Django-Gallery
|
4970a265ea67340f2c49723a373db4b0f04f2240
|
[
"MIT"
] | 1
|
2022-02-01T17:52:23.000Z
|
2022-02-01T17:52:23.000Z
|
App/tests.py
|
steve-njuguna-k/Django-Gallery
|
4970a265ea67340f2c49723a373db4b0f04f2240
|
[
"MIT"
] | null | null | null |
App/tests.py
|
steve-njuguna-k/Django-Gallery
|
4970a265ea67340f2c49723a373db4b0f04f2240
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from .models import Image, Category, Location
from django.contrib.auth.models import User
user = User.objects.get(id=1)
# Create your tests here.
class LocationTestCLass(TestCase):
#Set up Method
def setUp(self):
self.loc = Location(name="Kenya")
self.loc.save_location()
def test_instance(self):
self.assertTrue(isinstance(self.loc,Location))
def test_save_method(self):
self.loc.save_location()
locations = Location.objects.all()
self.assertTrue(len(locations) > 0)
def test_delete_method(self):
self.loc.save_location()
self.loc.delete_location()
location = Location.objects.all()
self.assertTrue(len(location) == 0)
def test_update(self):
location = Location.get_location_id(self.loc.id)
location.update_location('Tanzania')
location = Location.get_location_id(self.loc.id)
self.assertTrue(location.name == 'Tanzania')
class CategoryTestClass(TestCase):
# Set up Method
def setUp(self):
self.cat = Category(name="fashion")
self.cat.save_category()
def test_instance(self):
self.assertTrue(isinstance(self.cat, Category))
def test_save_method(self):
self.cat.save_category()
category = Category.objects.all()
self.assertTrue(len(category) > 0)
def test_delete_method(self):
self.cat.save_category()
self.cat.delete_category()
category = Category.objects.all()
self.assertTrue(len(category) == 0)
def test_update(self):
category = Category.get_category_id(self.cat.id)
category.update_category('Travel')
category = Category.get_category_id(self.cat.id)
self.assertTrue(category.name == 'Travel')
class ImageTestClass(TestCase):
# Set up Method
def setUp(self):
self.cat = Category(name="Fashion")
self.cat.save_category()
self.loc = Location(name="Kenya")
self.loc.save_location()
self.image = Image(title='image test', caption='my test', author=user, category=self.cat, location=self.loc)
self.image.save_image()
def test_instance(self):
self.assertTrue(isinstance(self.image, Image))
def tearDown(self):
self.image.delete_image()
self.cat.delete_category()
self.loc.delete_location()
def test_save_method(self):
self.image.save_image()
images = Image.objects.all()
self.assertTrue(len(images)>0)
def test_get_all_images(self):
images = Image.get_all_images()
self.assertTrue(len(images)>0)
def test_get_image_by_id(self):
images= Image.get_image_by_id(self.image.id)
self.assertTrue(len(images) == 1)
def test_search_by_category(self):
images = Image.search_by_category('fashion')
self.assertTrue(len(images)>0)
def test_filter_by_location(self):
images = Image.filter_by_location('1')
print(images)
self.assertTrue(len(images)>0)
def test_update_image(self):
self.image.save_image()
image = Image.update_image(self.image.id, 'test update', 'my test', user, self.loc, self.cat)
updateimage = Image.objects.filter(id = self.image.id)
print(updateimage)
self.assertTrue(Image.title == 'test update')
| 31.635514
| 116
| 0.656721
|
25b3c354d5a964b30d507656422152d813cc6751
| 4,036
|
py
|
Python
|
got10k/datasets/got10k.py
|
ZhangSanFengByGit/toolkit
|
9f2958bfd67d140afbc22f12c8d38995996330b0
|
[
"MIT"
] | 1
|
2019-03-19T02:59:23.000Z
|
2019-03-19T02:59:23.000Z
|
got10k/datasets/got10k.py
|
ZhangSanFengByGit/toolkit
|
9f2958bfd67d140afbc22f12c8d38995996330b0
|
[
"MIT"
] | null | null | null |
got10k/datasets/got10k.py
|
ZhangSanFengByGit/toolkit
|
9f2958bfd67d140afbc22f12c8d38995996330b0
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, print_function
import os
import glob
import numpy as np
import six
class GOT10k(object):
r"""`GOT-10K <http://got-10k.aitestunion.com//>`_ Dataset.
Publication:
``GOT-10k: A Large High-Diversity Benchmark for Generic Object
Tracking in the Wild``, L. Huang, X. Zhao and K. Huang, ArXiv 2018.
Args:
root_dir (string): Root directory of dataset where ``train``,
``val`` and ``test`` folders exist.
subset (string, optional): Specify ``train``, ``val`` or ``test``
subset of GOT-10k.
return_meta (string, optional): If True, returns ``meta``
of each sequence in ``__getitem__`` function, otherwise
only returns ``img_files`` and ``anno``.
"""
def __init__(self, root_dir, subset='test', return_meta=False):
super(GOT10k, self).__init__()
assert subset in ['train', 'val', 'test'], 'Unknown subset.'
self.root_dir = root_dir
self.subset = subset
self.return_meta = False if subset == 'test' else return_meta
self._check_integrity(root_dir, subset)
list_file = os.path.join(root_dir, subset, 'list.txt')
with open(list_file, 'r') as f:
self.seq_names = f.read().strip().split('\n')
self.seq_dirs = [os.path.join(root_dir, subset, s)
for s in self.seq_names]
self.anno_files = [os.path.join(d, 'groundtruth.txt')
for d in self.seq_dirs]
def __getitem__(self, index):
r"""
Args:
index (integer or string): Index or name of a sequence.
Returns:
tuple: (img_files, anno) if ``return_meta`` is False, otherwise
(img_files, anno, meta), where ``img_files`` is a list of
file names, ``anno`` is a N x 4 (rectangles) numpy array, while
``meta`` is a dict contains meta information about the sequence.
"""
if isinstance(index, six.string_types):
if not index in self.seq_names:
raise Exception('Sequence {} not found.'.format(index))
index = self.seq_names.index(index)
img_files = sorted(glob.glob(os.path.join(
self.seq_dirs[index], '*.jpg')))
anno = np.loadtxt(self.anno_files[index], delimiter=',')
if self.subset == 'test' and anno.ndim == 1:
assert len(anno) == 4
anno = anno[np.newaxis, :]
else:
assert len(img_files) == len(anno)
if self.return_meta:
meta = self._fetch_meta(self.seq_dirs[index])
return img_files, anno, meta
else:
return img_files, anno
def __len__(self):
return len(self.seq_names)
def _check_integrity(self, root_dir, subset):
assert subset in ['train', 'val', 'test']
list_file = os.path.join(root_dir, subset, 'list.txt')
if os.path.isfile(list_file):
with open(list_file, 'r') as f:
seq_names = f.read().strip().split('\n')
# check each sequence folder
for seq_name in seq_names:
seq_dir = os.path.join(root_dir, subset, seq_name)
if not os.path.isdir(seq_dir):
print('Warning: sequence %s not exists.' % seq_name)
else:
# dataset not exists
raise Exception('Dataset not found or corrupted.')
def _fetch_meta(self, seq_dir):
# meta information
meta_file = os.path.join(seq_dir, 'meta_info.ini')
with open(meta_file) as f:
meta = f.read().strip().split('\n')[1:]
meta = [line.split(': ') for line in meta]
meta = {line[0]: line[1] for line in meta}
# attributes
attributes = ['cover', 'absence', 'cut_by_image']
for att in attributes:
meta[att] = np.loadtxt(os.path.join(seq_dir, att + '.label'))
return meta
| 37.37037
| 80
| 0.566898
|
3eb5aa31f910629dcd9e00fd97c08b90652a141a
| 1,094
|
py
|
Python
|
toolchain/riscv/MSYS/python/Lib/test/testcodec.py
|
zhiqiang-hu/bl_iot_sdk
|
154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d
|
[
"Apache-2.0"
] | 207
|
2018-10-01T08:53:01.000Z
|
2022-03-14T12:15:54.000Z
|
toolchain/riscv/MSYS/python/Lib/test/testcodec.py
|
zhiqiang-hu/bl_iot_sdk
|
154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d
|
[
"Apache-2.0"
] | 8
|
2019-06-29T14:18:51.000Z
|
2022-02-19T07:30:27.000Z
|
toolchain/riscv/MSYS/python/Lib/test/testcodec.py
|
zhiqiang-hu/bl_iot_sdk
|
154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d
|
[
"Apache-2.0"
] | 76
|
2020-03-16T01:47:46.000Z
|
2022-03-21T16:37:07.000Z
|
""" Test Codecs (used by test_charmapcodec)
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x78: "abc", # 1-n decoding mapping
b"abc": 0x0078,# 1-n encoding mapping
0x01: None, # decoding mapping to <undefined>
0x79: "", # decoding mapping to <remove character>
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
| 22.326531
| 69
| 0.66362
|
648e36332add911a915cd13b99630eb74de35431
| 741
|
py
|
Python
|
django_fsm_log/admin.py
|
key-capture-energy/django-fsm-log
|
8e45717b26239589167a3e3b070427a5ca3c80ca
|
[
"MIT"
] | 20
|
2022-01-19T08:31:57.000Z
|
2022-03-23T08:47:05.000Z
|
django_fsm_log/admin.py
|
key-capture-energy/django-fsm-log
|
8e45717b26239589167a3e3b070427a5ca3c80ca
|
[
"MIT"
] | 22
|
2022-01-14T14:26:14.000Z
|
2022-03-27T15:25:34.000Z
|
django_fsm_log/admin.py
|
key-capture-energy/django-fsm-log
|
8e45717b26239589167a3e3b070427a5ca3c80ca
|
[
"MIT"
] | 5
|
2022-01-14T21:59:03.000Z
|
2022-03-21T16:45:28.000Z
|
from django.contrib.contenttypes.admin import GenericTabularInline
from django.db.models import F
from .models import StateLog
__all__ = ("StateLogInline",)
class StateLogInline(GenericTabularInline):
model = StateLog
can_delete = False
def has_add_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
return True
fields = (
"transition",
"source_state",
"state",
"by",
"description",
"timestamp",
)
def get_readonly_fields(self, request, obj=None):
return self.fields
def get_queryset(self, request):
return super().get_queryset(request).order_by(F("timestamp").desc())
| 22.454545
| 76
| 0.65857
|
f596ffe3dd6ed7e4817d705ff8a9e3d170a2ca56
| 6,039
|
py
|
Python
|
codegen/parse.py
|
nonebot/adapter-qqguild
|
a3e4d353bfdaafb296743bc0f15ed5d643c64d85
|
[
"MIT"
] | 39
|
2021-12-23T14:26:41.000Z
|
2022-03-22T14:11:19.000Z
|
codegen/parse.py
|
nonebot/adapter-qqguild
|
a3e4d353bfdaafb296743bc0f15ed5d643c64d85
|
[
"MIT"
] | 4
|
2022-01-22T17:59:50.000Z
|
2022-03-22T12:40:10.000Z
|
codegen/parse.py
|
nonebot/adapter-qqguild
|
a3e4d353bfdaafb296743bc0f15ed5d643c64d85
|
[
"MIT"
] | 2
|
2022-01-16T02:38:51.000Z
|
2022-03-01T15:48:36.000Z
|
import warnings
from urllib.parse import urlparse
from contextvars import ContextVar
from typing import Any, Dict, List, Tuple, Optional
from yarl import URL
from pydantic import parse_obj_as
from .config import Config
from .source import Source
from .model import API, Type, Object, DataType, PathParam, QueryParam, obj_schemas
current_source: ContextVar[Source] = ContextVar("current_source")
def _resolve_ref(obj: DataType, loc: Tuple[str, ...]) -> DataType:
source = current_source.get()
if isinstance(obj, dict) and "$ref" in obj:
ref = URL(obj["$ref"])
source = source.resolve(ref)
s_token = current_source.set(source)
result = _resolve_ref(
source.resolve_fragment(ref.fragment), (ref.fragment.split("/")[-1],)
)
current_source.reset(s_token)
return result
elif isinstance(obj, dict):
is_object_schema = ("type" in obj and obj["type"] == "object") or (
"properties" in obj and isinstance(obj["properties"], dict)
)
new_obj = {"name": "_".join(loc), "type": "object"} if is_object_schema else {}
for key, value in obj.items():
new_obj[key] = _resolve_ref(value, loc + (key,))
return new_obj
elif isinstance(obj, list):
new_obj = []
for index, item in enumerate(obj):
new_obj.append(_resolve_ref(item, loc + (str(index),)))
return new_obj
else:
return obj
def _schema_to_model(schema: dict, loc: Tuple[str, ...]) -> Type:
schema = _resolve_ref(schema, loc)
model = parse_obj_as(Type, schema)
if isinstance(model, Object):
obj_schemas[model.name] = model
return model
def parse(source: Source) -> List[API]:
apis: List[API] = []
current_source.set(source)
schemas = source.data.get("components", {}).get("schemas", {})
for schema_name, schema in schemas.items():
_schema_to_model(schema, (schema_name,))
paths = source.data.get("paths", {}) or {}
for path, path_item in paths.items():
path_token = None
if "$ref" in path_item:
ref = URL(path_item["$ref"])
path_source = source.resolve(ref)
path_token = current_source.set(path_source)
path_item = path_source.resolve_fragment(ref.fragment)
common_parameters = path_item.get("parameters", [])
for method, operation in path_item.items():
if method not in {
"get",
"put",
"post",
"delete",
"options",
"head",
"patch",
"trace",
}:
continue
# get api name from doc url
doc_url = operation.get("externalDocs", {}).get("url", None)
if doc_url is None:
warnings.warn(f"{path} {method} has no external docs")
continue
name = (
urlparse(doc_url)
.path.split("/")[-1]
.removesuffix(".html")
.replace("-", "_")
)
# get api params
path_params: List[PathParam] = []
query_params: List[QueryParam] = []
parameters = common_parameters + operation.get("parameters", [])
for param in parameters:
param_token = None
if "$ref" in param:
ref = URL(param["$ref"])
param_source = source.resolve(ref)
param_token = current_source.set(param_source)
param = param_source.resolve_fragment(ref.fragment)
type = param["in"]
if type == "path":
path_params.append(
PathParam(
name=param["name"],
required=param.get("required", False),
type=_schema_to_model(
param["schema"], (name, param["name"])
),
)
)
elif type == "query":
query_params.append(
QueryParam(
name=param["name"],
required=param.get("required", False),
type=_schema_to_model(
param["schema"], (name, param["name"])
),
)
)
else:
warnings.warn(f"Unsupported param type: {param}")
if param_token:
current_source.reset(param_token)
# get api body
body_schema = (
operation.get("requestBody", {})
.get("content", {})
.get("application/json", {})
.get("schema", None)
)
body = (
_schema_to_model(body_schema, (name, "body")) if body_schema else None
)
# get api return value
responses = operation.get("responses", {})
response = responses.get(200, {}) or responses.get("default", {})
return_schema = (
response.get("content", {})
.get("application/json", {})
.get("schema", None)
)
return_type = (
_schema_to_model(return_schema, (name, "return"))
if return_schema
else None
)
apis.append(
API(
name=name,
method=method,
path=path,
path_params=path_params,
query_params=query_params,
body=body,
return_type=return_type,
)
)
if path_token:
current_source.reset(path_token)
return apis
| 34.508571
| 87
| 0.489154
|
292c8383b7eea30cf6d972f33570327843279dd0
| 13,476
|
py
|
Python
|
xnas/pruning/pruning/iterative_pruner.py
|
dercaft/XNAS
|
d6d0fde0d4475210a41607181939188b177e44b1
|
[
"MIT"
] | 2
|
2021-09-23T01:59:19.000Z
|
2021-12-13T04:28:38.000Z
|
nni/algorithms/compression/v2/pytorch/pruning/iterative_pruner.py
|
Micheallei/nni
|
29fd8cfae4fe99b08a91f9a67be4297093483832
|
[
"MIT"
] | 1
|
2020-06-30T08:49:53.000Z
|
2020-06-30T08:49:53.000Z
|
nni/algorithms/compression/v2/pytorch/pruning/iterative_pruner.py
|
Micheallei/nni
|
29fd8cfae4fe99b08a91f9a67be4297093483832
|
[
"MIT"
] | 2
|
2021-12-17T07:32:47.000Z
|
2021-12-19T08:45:05.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
from typing import Dict, List, Callable, Optional
from torch import Tensor
from torch.nn import Module
from .basic_pruner import (
LevelPruner,
L1NormPruner,
L2NormPruner,
FPGMPruner,
SlimPruner,
ActivationAPoZRankPruner,
ActivationMeanRankPruner,
TaylorFOWeightPruner,
ADMMPruner
)
from .basic_scheduler import PruningScheduler
from .tools import (
LinearTaskGenerator,
AGPTaskGenerator,
LotteryTicketTaskGenerator,
SimulatedAnnealingTaskGenerator
)
_logger = logging.getLogger(__name__)
__all__ = ['LinearPruner', 'AGPPruner', 'LotteryTicketPruner', 'SimulatedAnnealingPruner']
PRUNER_DICT = {
'level': LevelPruner,
'l1': L1NormPruner,
'l2': L2NormPruner,
'fpgm': FPGMPruner,
'slim': SlimPruner,
'apoz': ActivationAPoZRankPruner,
'mean_activation': ActivationMeanRankPruner,
'taylorfo': TaylorFOWeightPruner,
'admm': ADMMPruner
}
class IterativePruner(PruningScheduler):
def _wrap_model(self):
"""
Deprecated function.
"""
_logger.warning('Nothing will happen when calling this function.\
This pruner is an iterative pruner and does not directly wrap the model.')
def _unwrap_model(self):
"""
Deprecated function.
"""
_logger.warning('Nothing will happen when calling this function.\
This pruner is an iterative pruner and does not directly wrap the model.')
def export_model(self, *args, **kwargs):
"""
Deprecated function.
"""
_logger.warning('Nothing will happen when calling this function.\
The best result (and intermediate result if keeped) during iteration is under `log_dir` (default: \\.).')
class LinearPruner(IterativePruner):
"""
Parameters
----------
model : Module
The origin unwrapped pytorch model to be pruned.
config_list : List[Dict]
The origin config list provided by the user.
pruning_algorithm : str
Supported pruning algorithm ['level', 'l1', 'l2', 'fpgm', 'slim', 'apoz', 'mean_activation', 'taylorfo', 'admm'].
This iterative pruner will use the chosen corresponding pruner to prune the model in each iteration.
total_iteration : int
The total iteration number.
log_dir : str
The log directory use to saving the result, you can find the best result under this folder.
keep_intermediate_result : bool
If keeping the intermediate result, including intermediate model and masks during each iteration.
finetuner : Optional[Callable[[Module], None]]
The finetuner handled all finetune logic, use a pytorch module as input.
It will be called at the end of each iteration, usually for neutralizing the accuracy loss brought by the pruning in this iteration.
speed_up : bool
If set True, speed up the model at the end of each iteration to make the pruned model compact.
dummy_input : Optional[torch.Tensor]
If `speed_up` is True, `dummy_input` is required for tracing the model in speed up.
evaluator : Optional[Callable[[Module], float]]
Evaluate the pruned model and give a score.
If evaluator is None, the best result refers to the latest result.
pruning_params : Dict
If the chosen pruning_algorithm has extra parameters, put them as a dict to pass in.
"""
def __init__(self, model: Module, config_list: List[Dict], pruning_algorithm: str,
total_iteration: int, log_dir: str = '.', keep_intermediate_result: bool = False,
finetuner: Optional[Callable[[Module], None]] = None, speed_up: bool = False, dummy_input: Optional[Tensor] = None,
evaluator: Optional[Callable[[Module], float]] = None, pruning_params: Dict = {}):
task_generator = LinearTaskGenerator(total_iteration=total_iteration,
origin_model=model,
origin_config_list=config_list,
log_dir=log_dir,
keep_intermediate_result=keep_intermediate_result)
pruner = PRUNER_DICT[pruning_algorithm](None, None, **pruning_params)
super().__init__(pruner, task_generator, finetuner=finetuner, speed_up=speed_up, dummy_input=dummy_input,
evaluator=evaluator, reset_weight=False)
class AGPPruner(IterativePruner):
"""
Parameters
----------
model : Module
The origin unwrapped pytorch model to be pruned.
config_list : List[Dict]
The origin config list provided by the user.
pruning_algorithm : str
Supported pruning algorithm ['level', 'l1', 'l2', 'fpgm', 'slim', 'apoz', 'mean_activation', 'taylorfo', 'admm'].
This iterative pruner will use the chosen corresponding pruner to prune the model in each iteration.
total_iteration : int
The total iteration number.
log_dir : str
The log directory use to saving the result, you can find the best result under this folder.
keep_intermediate_result : bool
If keeping the intermediate result, including intermediate model and masks during each iteration.
finetuner : Optional[Callable[[Module], None]]
The finetuner handled all finetune logic, use a pytorch module as input.
It will be called at the end of each iteration, usually for neutralizing the accuracy loss brought by the pruning in this iteration.
speed_up : bool
If set True, speed up the model at the end of each iteration to make the pruned model compact.
dummy_input : Optional[torch.Tensor]
If `speed_up` is True, `dummy_input` is required for tracing the model in speed up.
evaluator : Optional[Callable[[Module], float]]
Evaluate the pruned model and give a score.
If evaluator is None, the best result refers to the latest result.
pruning_params : Dict
If the chosen pruning_algorithm has extra parameters, put them as a dict to pass in.
"""
def __init__(self, model: Module, config_list: List[Dict], pruning_algorithm: str,
total_iteration: int, log_dir: str = '.', keep_intermediate_result: bool = False,
finetuner: Optional[Callable[[Module], None]] = None, speed_up: bool = False, dummy_input: Optional[Tensor] = None,
evaluator: Optional[Callable[[Module], float]] = None, pruning_params: Dict = {}):
task_generator = AGPTaskGenerator(total_iteration=total_iteration,
origin_model=model,
origin_config_list=config_list,
log_dir=log_dir,
keep_intermediate_result=keep_intermediate_result)
pruner = PRUNER_DICT[pruning_algorithm](None, None, **pruning_params)
super().__init__(pruner, task_generator, finetuner=finetuner, speed_up=speed_up, dummy_input=dummy_input,
evaluator=evaluator, reset_weight=False)
class LotteryTicketPruner(IterativePruner):
"""
Parameters
----------
model : Module
The origin unwrapped pytorch model to be pruned.
config_list : List[Dict]
The origin config list provided by the user.
pruning_algorithm : str
Supported pruning algorithm ['level', 'l1', 'l2', 'fpgm', 'slim', 'apoz', 'mean_activation', 'taylorfo', 'admm'].
This iterative pruner will use the chosen corresponding pruner to prune the model in each iteration.
total_iteration : int
The total iteration number.
log_dir : str
The log directory use to saving the result, you can find the best result under this folder.
keep_intermediate_result : bool
If keeping the intermediate result, including intermediate model and masks during each iteration.
finetuner : Optional[Callable[[Module], None]]
The finetuner handled all finetune logic, use a pytorch module as input.
It will be called at the end of each iteration if reset_weight is False, will be called at the beginning of each iteration otherwise.
speed_up : bool
If set True, speed up the model at the end of each iteration to make the pruned model compact.
dummy_input : Optional[torch.Tensor]
If `speed_up` is True, `dummy_input` is required for tracing the model in speed up.
evaluator : Optional[Callable[[Module], float]]
Evaluate the pruned model and give a score.
If evaluator is None, the best result refers to the latest result.
reset_weight : bool
If set True, the model weight will reset to the original model weight at the end of each iteration step.
pruning_params : Dict
If the chosen pruning_algorithm has extra parameters, put them as a dict to pass in.
"""
def __init__(self, model: Module, config_list: List[Dict], pruning_algorithm: str,
total_iteration: int, log_dir: str = '.', keep_intermediate_result: bool = False,
finetuner: Optional[Callable[[Module], None]] = None, speed_up: bool = False, dummy_input: Optional[Tensor] = None,
evaluator: Optional[Callable[[Module], float]] = None, reset_weight: bool = True,
pruning_params: Dict = {}):
task_generator = LotteryTicketTaskGenerator(total_iteration=total_iteration,
origin_model=model,
origin_config_list=config_list,
log_dir=log_dir,
keep_intermediate_result=keep_intermediate_result)
pruner = PRUNER_DICT[pruning_algorithm](None, None, **pruning_params)
super().__init__(pruner, task_generator, finetuner=finetuner, speed_up=speed_up, dummy_input=dummy_input,
evaluator=evaluator, reset_weight=reset_weight)
class SimulatedAnnealingPruner(IterativePruner):
"""
Parameters
----------
model : Module
The origin unwrapped pytorch model to be pruned.
config_list : List[Dict]
The origin config list provided by the user.
evaluator : Callable[[Module], float]
Evaluate the pruned model and give a score.
start_temperature : float
Start temperature of the simulated annealing process.
stop_temperature : float
Stop temperature of the simulated annealing process.
cool_down_rate : float
Cool down rate of the temperature.
perturbation_magnitude : float
Initial perturbation magnitude to the sparsities. The magnitude decreases with current temperature.
pruning_algorithm : str
Supported pruning algorithm ['level', 'l1', 'l2', 'fpgm', 'slim', 'apoz', 'mean_activation', 'taylorfo', 'admm'].
This iterative pruner will use the chosen corresponding pruner to prune the model in each iteration.
pruning_params : Dict
If the chosen pruning_algorithm has extra parameters, put them as a dict to pass in.
log_dir : str
The log directory use to saving the result, you can find the best result under this folder.
keep_intermediate_result : bool
If keeping the intermediate result, including intermediate model and masks during each iteration.
finetuner : Optional[Callable[[Module], None]]
The finetuner handled all finetune logic, use a pytorch module as input, will be called in each iteration.
speed_up : bool
If set True, speed up the model at the end of each iteration to make the pruned model compact.
dummy_input : Optional[torch.Tensor]
If `speed_up` is True, `dummy_input` is required for tracing the model in speed up.
"""
def __init__(self, model: Module, config_list: List[Dict], evaluator: Callable[[Module], float], start_temperature: float = 100,
stop_temperature: float = 20, cool_down_rate: float = 0.9, perturbation_magnitude: float = 0.35,
pruning_algorithm: str = 'level', pruning_params: Dict = {}, log_dir: str = '.', keep_intermediate_result: bool = False,
finetuner: Optional[Callable[[Module], None]] = None, speed_up: bool = False, dummy_input: Optional[Tensor] = None):
task_generator = SimulatedAnnealingTaskGenerator(origin_model=model,
origin_config_list=config_list,
start_temperature=start_temperature,
stop_temperature=stop_temperature,
cool_down_rate=cool_down_rate,
perturbation_magnitude=perturbation_magnitude,
log_dir=log_dir,
keep_intermediate_result=keep_intermediate_result)
pruner = PRUNER_DICT[pruning_algorithm](None, None, **pruning_params)
super().__init__(pruner, task_generator, finetuner=finetuner, speed_up=speed_up, dummy_input=dummy_input,
evaluator=evaluator, reset_weight=False)
| 52.232558
| 141
| 0.65331
|
8d6a522c88a37f3262602b654fe9be4cfce86a5b
| 1,437
|
py
|
Python
|
setup.py
|
nicoloverardo/matrix_regression
|
03fb5dc79792e0a12dc787ce476eaff55b49a5cc
|
[
"MIT"
] | null | null | null |
setup.py
|
nicoloverardo/matrix_regression
|
03fb5dc79792e0a12dc787ce476eaff55b49a5cc
|
[
"MIT"
] | null | null | null |
setup.py
|
nicoloverardo/matrix_regression
|
03fb5dc79792e0a12dc787ce476eaff55b49a5cc
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
from setuptools import find_packages
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setup(
name="matrixreg",
packages=find_packages(),
version="0.1.1",
license="MIT",
description="Implementation of the MatrixRegression (MR) algorithm for online-learning multi-label text classification, by Popa, Zeitouni & Gardarin",
long_description=long_description,
long_description_content_type="text/markdown",
author="Nicolò Verardo",
author_email="n.verardo@outlook.com",
url="https://github.com/nicoloverardo/matrix_regression",
download_url="https://github.com/nicoloverardo/matrix_regression/archive/refs/tags/v0.1.1.tar.gz",
keywords=["text-classification", "multi-label-classification", "online-learning"],
install_requires=[
"numpy>=1.18.5",
"scipy>=1.4.1",
"scikit_learn>=0.24.1"
],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
)
| 35.925
| 154
| 0.652749
|
b89bc48c3d81574bdb6b7ace56f57dbb966eaa01
| 3,881
|
py
|
Python
|
LibrairieVideoAna/Unclean/BehavClasses.py
|
JostTim/custom_libs
|
8f9e3f45c6f5f7e47b6582e072d09a8910efddd3
|
[
"MIT"
] | null | null | null |
LibrairieVideoAna/Unclean/BehavClasses.py
|
JostTim/custom_libs
|
8f9e3f45c6f5f7e47b6582e072d09a8910efddd3
|
[
"MIT"
] | null | null | null |
LibrairieVideoAna/Unclean/BehavClasses.py
|
JostTim/custom_libs
|
8f9e3f45c6f5f7e47b6582e072d09a8910efddd3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 30 13:33:55 2018
@author: Timothe
"""
import numpy as np
import math
import pickle
import re
class MazeData:
def __init__(self, Filename):
self.Filename = Filename
regexpresult=re.match(r"(.*mouse([0-9]+)_([0-9]+)).txt", self.Filename)
self.MouseNo=regexpresult.group(2)
self.SessionNo=regexpresult.group(3)
self.FilenameNoextension=regexpresult.group(1)
self.file_object = open(self.Filename, "r")
self.RawContent = self.file_object.readlines()
def Organize(self):
ArrContent = np.zeros((0,5))
LastLine=[]
Catch=0
ActualTime=0
self.Mode="ChangeBased"
for lines in range(len(self.RawContent)):
CurrtLine = self.RawContent[lines][:-1].split("\t")
for item in range(len(CurrtLine)):
if not CurrtLine[item]:
CurrtLine.pop(item)
if len(CurrtLine)<4 :
if Catch==1:
break
continue
else:
if Catch == 0:
Catch = 1
else:
ActualTime=ActualTime+1
if CurrtLine == LastLine:
if self.Mode == "ChangeBased":
self.Mode="TimingBased"
continue
else:
LastLine = CurrtLine
CurrtLine=list(map(int, CurrtLine))
if self.Mode == "TimingBased":
OutLine=[ActualTime, CurrtLine[0], -1, CurrtLine[1], CurrtLine[2]]
elif self.Mode == "ChangeBased":
if len(CurrtLine)<5:
OutLine=[CurrtLine[0], CurrtLine[1], -1, CurrtLine[2], CurrtLine[3]]
else :
OutLine=[CurrtLine[0], CurrtLine[1], CurrtLine[2], CurrtLine[3], CurrtLine[4]]
else:
raise ValueError('Class ''Mode'' property does not match any value expected during ''Organize'' method call')
ArrContent=np.vstack((ArrContent, OutLine))
if np.amax(ArrContent[:,0])>2000000:
ArrContent[:,0]=ArrContent[:,0]/1000
self.OrganizedContent=ArrContent
if (np.all(self.OrganizedContent[0,1:])):
self.Begining=self.OrganizedContent[0,0]
else:
self.Begining=0
self.Duration=self.OrganizedContent[-1,0]-self.Begining
self.DurationMinSec=[math.floor(self.Duration/60000), math.floor((self.Duration % 60000)/1000)]
def LickRate(self,seconds):
lastValue=0
ArrLickRate = [0] * math.ceil(self.Duration/(seconds*1000))
for value in range(len(self.OrganizedContent)):
if self.OrganizedContent[value,1] != lastValue:
time=math.floor((self.OrganizedContent[value,0]-self.Begining)/(1000*seconds))
ArrLickRate[time]=ArrLickRate[time]+1
lastValue=self.OrganizedContent[value,1]
if self.DurationMinSec[1] < seconds-5 :
self.ArrLickRate=ArrLickRate[:-1]
else:
self.ArrLickRate=ArrLickRate
def SaveLoad(self,ask):#use sql to save and load dynamically
if ask=="save":
with open(self.FilenameNoextension+'.pkl', 'wb') as f:
pickle.dump([self.OrganizedContent, self.Mode, self.Duration, self.DurationMinSec], f)
elif ask=="load":
with open('objs.pkl', 'rb') as f:
self.OrganizedContent, self.Mode, self.Duration, self.DurationMinSec = pickle.load(f)
| 36.961905
| 133
| 0.519711
|
8853601dd9b4a449a9cd05b8f031861545ef22cc
| 1,001
|
py
|
Python
|
scanner/Multiprocess-Scanner.py
|
AabhaasSinghal/python_scripts
|
b41803bff049fa65ed43a0313471c3c72fa59912
|
[
"Apache-2.0"
] | null | null | null |
scanner/Multiprocess-Scanner.py
|
AabhaasSinghal/python_scripts
|
b41803bff049fa65ed43a0313471c3c72fa59912
|
[
"Apache-2.0"
] | null | null | null |
scanner/Multiprocess-Scanner.py
|
AabhaasSinghal/python_scripts
|
b41803bff049fa65ed43a0313471c3c72fa59912
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from multiprocessing import Process, Queue
from scapy.all import *
import sys
import Queue as TQ
def WorkerProcess(ip, pid, q) :
while True :
port = 0
try :
port = q.get(block=False)
except TQ.Empty :
print "Worker %d exiting ..." % (pid)
return 0
# port scanning to begin
# we rely on scapy to do this
response = sr1(IP(dst=ip)/TCP(dport=port, flags="S"), verbose=False, timeout=.2)
# only checking for SYN-ACK == flags = 18
# filtererd ports etc. is another story altogether
if response :
if response[TCP].flags == 18 :
print "ThreadId %d: Received port number %d Status: OPEN" %(pid, port)
que = Queue()
for j in range (1,1000) :
que.put(j)
worker_ids = []
# two process are generated for scanning
for i in range(1, 3) :
print "Creating Worker : %d"%i
worker = Process(target=WorkerProcess, args= (sys.argv[1], i, que ))
worker.start()
worker_ids.append(worker)
print "WorkerThread %d Created!"%i
| 22.244444
| 83
| 0.652348
|
9dfdd8703588363ee1eea5908bcc540d8de82715
| 12,754
|
py
|
Python
|
test/distributed/_shard/test_replicated_tensor.py
|
stungkit/pytorch
|
0f05e398705bf15406bce79f7ee57d3935ad2abd
|
[
"Intel"
] | null | null | null |
test/distributed/_shard/test_replicated_tensor.py
|
stungkit/pytorch
|
0f05e398705bf15406bce79f7ee57d3935ad2abd
|
[
"Intel"
] | 1
|
2022-01-10T18:39:28.000Z
|
2022-01-10T19:15:57.000Z
|
test/distributed/_shard/test_replicated_tensor.py
|
stungkit/pytorch
|
0f05e398705bf15406bce79f7ee57d3935ad2abd
|
[
"Intel"
] | 1
|
2022-03-26T14:42:50.000Z
|
2022-03-26T14:42:50.000Z
|
# Owner(s): ["oncall: distributed"]
import io
import torch
import torch.distributed._shard.sharded_tensor as sharded_tensor
import torch.distributed as dist
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.distributed._shard import _shard_tensor
from torch.distributed._shard.replicated_tensor import ReplicatedTensor
from torch.distributed._shard.sharding_spec import ChunkShardingSpec
from torch.testing._internal.common_distributed import (
requires_nccl,
skip_if_lt_x_gpu,
)
from torch.testing._internal.distributed._shard.sharded_tensor import (
ShardedTensorTestBase,
with_comms,
)
from torch.testing._internal.distributed._shard.sharded_tensor._test_ops_common import (
gen_binary_op_func
)
from torch.testing._internal.distributed._shard.sharded_tensor import TEST_GPU_NUM
class TestReplicatedTensor(ShardedTensorTestBase):
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_replicated_tensor_basics(self):
local_tensor = torch.ones(3, 3, device=f"cuda:{self.rank}") * 4
replica_tensor = ReplicatedTensor(local_tensor)
# validate it's a replicated tensor by checking values on all rank
validated = replica_tensor.validate()
self.assertEqual(validated, True)
res = replica_tensor + 2
self.assertIsInstance(res, torch.Tensor)
self.assertNotIsInstance(res, ReplicatedTensor)
self.assertEqual(res, torch.ones(3, 3) * 6)
# modify local tensor on certain rank, and test if validation raise
if self.rank == 2:
local_tensor += 3
with self.assertRaisesRegex(ValueError, 'have different values'):
replica_tensor.validate()
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_replicated_tensor_inter_op_replicated_tensor(self):
local_tensor = torch.ones(3, 3, device=f"cuda:{self.rank}")
replica_tensor1 = ReplicatedTensor(local_tensor * 4)
replica_tensor2 = ReplicatedTensor(local_tensor * 6)
new_tensor = replica_tensor1 * replica_tensor2
self.assertIsInstance(new_tensor, ReplicatedTensor)
self.assertEqual(new_tensor, torch.ones(3, 3) * 24)
# test replicated tensor inter-op with different pgs
new_pg = dist.new_group(ranks=[1, 2, 3])
replica_tensor_new_group = ReplicatedTensor(local_tensor * 3, process_group=new_pg)
with self.assertRaisesRegex(RuntimeError, 'must be in the same'):
replica_tensor_new_group * replica_tensor1
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_replicated_tensor_inter_op_tensor(self):
local_tensor = torch.ones(3, 3, device=f"cuda:{self.rank}") * 4
replica_tensor = ReplicatedTensor(local_tensor)
local_rand_tensor = torch.randn(3, 3, device=f"cuda:{self.rank}")
new_tensor = replica_tensor + local_rand_tensor
self.assertIsInstance(new_tensor, torch.Tensor)
self.assertNotIsInstance(new_tensor, ReplicatedTensor)
self.assertEqual(new_tensor, local_tensor + local_rand_tensor)
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_replicated_tensor_inter_op_sharded_tensor(self):
torch.manual_seed(self.rank)
local_tensor1 = torch.rand(12, 3, device=f"cuda:{self.rank}") * 4
local_tensor2 = torch.ones(12, 3, device=f"cuda:{self.rank}") * 4
spec = ChunkShardingSpec(
dim=0,
placements=[
"rank:0/cuda:0",
"rank:1/cuda:1",
"rank:2/cuda:2",
"rank:3/cuda:3",
],
)
st = _shard_tensor(local_tensor1, spec, src_rank=0)
replica_tensor = ReplicatedTensor(local_tensor2)
ops = ["torch.add", "torch.sub", "torch.mul", "torch.div", "+", "-", "*", "/"]
for op in ops:
binary_op = gen_binary_op_func(op)
res = binary_op(st, replica_tensor)
self.assertIsInstance(res, sharded_tensor.ShardedTensor)
self.assertNotIsInstance(res, ReplicatedTensor)
output = torch.empty((12, 3), device=self.rank) if self.rank == 0 else None
res.gather(dst=0, out=output)
if self.rank == 0:
local_output = binary_op(local_tensor1, local_tensor2)
self.assertEqual(output, local_output)
# reflective
reflect_res = binary_op(replica_tensor, st)
self.assertIsInstance(reflect_res, sharded_tensor.ShardedTensor)
self.assertNotIsInstance(reflect_res, ReplicatedTensor)
reflect_output = torch.empty((12, 3), device=self.rank) if self.rank == 0 else None
reflect_res.gather(dst=0, out=reflect_output)
if self.rank == 0:
reflect_local_output = binary_op(local_tensor2, local_tensor1)
self.assertEqual(reflect_output, reflect_local_output)
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_replicated_tensor_implicit_broadcasting(self):
# use same seed
torch.manual_seed(self.rank)
# test implicit broadcasting
local_tensor1 = torch.rand(12, 3, device=f"cuda:{self.rank}") * 4
# we use size (3) to trigger the implicit broadcasting logic
# and it will fail if implicit broadcasting not happen.
local_tensor2 = torch.ones(3, device=f"cuda:{self.rank}")
spec = ChunkShardingSpec(
dim=0,
placements=[
"rank:0/cuda:0",
"rank:1/cuda:1",
"rank:2/cuda:2",
"rank:3/cuda:3",
],
)
st = _shard_tensor(local_tensor1, spec, src_rank=0)
replica_tensor = ReplicatedTensor(local_tensor2)
ops = ["torch.add", "torch.sub", "torch.mul", "torch.div", "+", "-", "*", "/"]
for op in ops:
binary_op = gen_binary_op_func(op)
# replicated tensor should automatically broadcasted
res = binary_op(st, replica_tensor)
self.assertIsInstance(res, sharded_tensor.ShardedTensor)
output = torch.empty((12, 3), device=self.rank) if self.rank == 0 else None
res.gather(dst=0, out=output)
if self.rank == 0:
local_output = binary_op(local_tensor1, local_tensor2)
self.assertEqual(output, local_output)
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_replicated_tensor_inter_op_sharded_tensor_errors(self):
local_tensor = torch.ones(3, 3, device=f"cuda:{self.rank}") * 4
replica_tensor = ReplicatedTensor(local_tensor)
torch.manual_seed(self.rank)
spec = ChunkShardingSpec(
dim=0,
placements=[
"rank:0/cuda:0",
"rank:1/cuda:1",
"rank:2/cuda:2",
"rank:3/cuda:3",
],
)
st1 = sharded_tensor.rand(spec, (20, 3, 3))
st2 = sharded_tensor.rand(spec, (30, 3, 3))
with self.assertRaisesRegex(RuntimeError, 'Implicit broadcasting'):
st1 + st2
with self.assertRaisesRegex(RuntimeError, 'not supported for ShardedTensor'):
st1 % replica_tensor
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_with_ddp(self):
# Test Replicated params for DDP
replica_tensor = ReplicatedTensor(torch.rand(4, 8, device=self.rank))
model = torch.nn.Linear(8, 2).cuda(self.rank)
optim = torch.optim.SGD(model.parameters(), lr=0.1)
ddp = DDP(model)
# Test module.parameters.
params = list(ddp.parameters())
self.assertEqual(2, len(params))
self.assertEqual(ddp.module.weight, params[0])
self.assertEqual(ddp.module.bias, params[1])
params = list(model.parameters())
self.assertEqual(2, len(params))
self.assertEqual(model.weight, params[0])
self.assertEqual(model.bias, params[1])
# Validate output
out = ddp(replica_tensor)
self.assertIsInstance(out, ReplicatedTensor)
# Test backward and optimizer.
# Validate backward.
out.sum().backward()
self.assertIsNotNone(model.weight.grad)
self.assertIsNotNone(model.bias.grad)
self.assertIsNotNone(ddp.module.weight.grad)
self.assertIsNotNone(ddp.module.bias.grad)
original_params = []
for param_group in optim.param_groups:
for original_param in param_group['params']:
self.assertIsNotNone(original_param.grad)
original_params.append(original_param)
self.assertEqual(model.weight.grad, original_params[0].grad)
self.assertEqual(model.bias.grad, original_params[1].grad)
self.assertEqual(model.weight.grad, ddp.module.weight.grad)
self.assertEqual(model.bias.grad, ddp.module.bias.grad)
# Validate optimizer.
optim.step()
self.assertEqual(model.weight, ddp.module.weight)
self.assertEqual(model.weight, original_params[0])
self.assertEqual(model.bias, ddp.module.bias)
self.assertEqual(model.bias, original_params[1])
# Validate zero_grad
optim.zero_grad()
self.assertEqual(model.weight.grad, torch.zeros_like(model.weight.grad))
self.assertEqual(model.weight.grad, ddp.module.weight.grad)
self.assertEqual(model.weight.grad, original_params[0].grad)
self.assertEqual(model.bias.grad, torch.zeros_like(model.bias.grad))
self.assertEqual(model.bias.grad, ddp.module.bias.grad)
self.assertEqual(model.bias.grad, original_params[1].grad)
# Validate zero_grad set_to_none
optim.zero_grad(set_to_none=True)
self.assertIsNone(model.weight.grad)
self.assertEqual(model.weight.grad, ddp.module.weight.grad)
self.assertEqual(model.weight.grad, original_params[0].grad)
self.assertIsNone(model.bias.grad)
self.assertEqual(model.bias.grad, ddp.module.bias.grad)
self.assertEqual(model.bias.grad, original_params[1].grad)
# Multiple forward passes.
for _ in range(5):
out = ddp(replica_tensor)
self.assertIsInstance(out, ReplicatedTensor)
# Test with context manager.
from torch.nn.parallel._replicated_tensor_ddp_utils import _ddp_replicated_tensor
with _ddp_replicated_tensor(False):
for _ in range(5):
with _ddp_replicated_tensor(True):
ddp = DDP(model)
out = ddp(replica_tensor)
self.assertIsInstance(out, ReplicatedTensor)
# Test save and load.
with _ddp_replicated_tensor(False):
ddp = DDP(model)
expected_state_dict = ddp.state_dict()
buffer = io.BytesIO()
torch.save(ddp, buffer)
buffer.seek(0)
obj = torch.load(buffer)
self.assertEqual(expected_state_dict, obj.state_dict())
with _ddp_replicated_tensor(True):
ddp = DDP(model)
buffer = io.BytesIO()
torch.save(ddp, buffer)
buffer.seek(0)
obj = torch.load(buffer)
self.assertEqual(expected_state_dict, obj.state_dict())
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_unsqueeze(self):
local_tensor = torch.rand(3, 3, device=self.rank)
replicated_tensor = ReplicatedTensor(local_tensor)
unsqueezed_replicated_tensor = replicated_tensor.unsqueeze(0)
unsqueezed_local_tensor = local_tensor.unsqueeze(0)
self.assertIsInstance(unsqueezed_replicated_tensor, ReplicatedTensor)
self.assertIsInstance(torch.unsqueeze(replicated_tensor, 0), ReplicatedTensor)
self.assertEqual(unsqueezed_local_tensor, unsqueezed_replicated_tensor)
self.assertEqual(torch.unsqueeze(replicated_tensor, 0), unsqueezed_replicated_tensor)
@with_comms(init_rpc=False)
@skip_if_lt_x_gpu(TEST_GPU_NUM)
@requires_nccl()
def test_getitem(self):
local_tensor = torch.rand(3, 3, device=self.rank)
replicated_tensor = ReplicatedTensor(local_tensor)
replicated_tensor_view = replicated_tensor[0]
local_tensor_view = local_tensor[0]
self.assertIsInstance(replicated_tensor_view, ReplicatedTensor)
self.assertEqual(local_tensor_view, replicated_tensor_view)
| 37.845697
| 95
| 0.655873
|
0fc1557fe17715f0bcdbb2d6a415b9273f1c4a82
| 1,082
|
py
|
Python
|
libra/datasets.py
|
kamranmajid41/libra
|
10635d6799b7ad32523402551e9d3d2e8f6df285
|
[
"MIT"
] | null | null | null |
libra/datasets.py
|
kamranmajid41/libra
|
10635d6799b7ad32523402551e9d3d2e8f6df285
|
[
"MIT"
] | null | null | null |
libra/datasets.py
|
kamranmajid41/libra
|
10635d6799b7ad32523402551e9d3d2e8f6df285
|
[
"MIT"
] | null | null | null |
from download import download
import pandas as pd
import os
_dataset_links = {
'housing': 'https://download1325.mediafire.com/5sm8nmw2gixg/x8m5sol30wz5kjq/5227_7876_bundle_archive+%282%29.zip',
'fake job postings': 'https://download855.mediafire.com/kezk0rq1ogzg/ikvbeoyirm92qpf/533871_976879_bundle_archive.zip',
'landslides': 'https://download947.mediafire.com/0mgw8yaubcjg/yd2b09ty4qsk6qb/686_1296_bundle_archive+%281%29.zip'
}
def load(dataset_name, path='libra_datasets'):
if not dataset_name.lower() in ['housing', 'fake job postings', 'landslides']:
raise Exception("Dataset does not exist or is not supported by Libra")
if dataset_name == 'housing':
file_name = 'housing.csv'
elif dataset_name == 'fake job postings':
file_name = 'fake_job_postings.csv'
else:
file_name = 'catalog.csv'
if not os.path.isfile(path + '/' + file_name):
download(url=_dataset_links[dataset_name], path=path, kind='zip', progressbar=True, replace=True, timeout=100000)
return pd.read_csv(path + '/' + file_name)
| 37.310345
| 123
| 0.719039
|
ff22465618848bbe7eed058b60610eb26f452382
| 9,451
|
py
|
Python
|
Patch/Adafruit_MotorHAT_Motors.py
|
ELSPL/EduBot-2WD
|
d3ef0a85cf87819e9145cc8ec44f0891932c6161
|
[
"Apache-2.0"
] | null | null | null |
Patch/Adafruit_MotorHAT_Motors.py
|
ELSPL/EduBot-2WD
|
d3ef0a85cf87819e9145cc8ec44f0891932c6161
|
[
"Apache-2.0"
] | null | null | null |
Patch/Adafruit_MotorHAT_Motors.py
|
ELSPL/EduBot-2WD
|
d3ef0a85cf87819e9145cc8ec44f0891932c6161
|
[
"Apache-2.0"
] | 1
|
2019-06-27T11:12:29.000Z
|
2019-06-27T11:12:29.000Z
|
import time
from Adafruit_MotorHAT.Adafruit_PWM_Servo_Driver import PWM
class Adafruit_StepperMotor:
MICROSTEPS = 8
MICROSTEP_CURVE = [0, 50, 98, 142, 180, 212, 236, 250, 255]
#MICROSTEPS = 16
# a sinusoidal curve NOT LINEAR!
#MICROSTEP_CURVE = [0, 25, 50, 74, 98, 120, 141, 162, 180, 197, 212, 225, 236, 244, 250, 253, 255]
def __init__(self, controller, num, steps=200):
self.MC = controller
self.revsteps = steps
self.motornum = num
self.sec_per_step = 0.1
self.steppingcounter = 0
self.currentstep = 0
num -= 1
if (num == 0):
self.PWMA = 8
self.AIN2 = 9
self.AIN1 = 10
self.PWMB = 13
self.BIN2 = 12
self.BIN1 = 11
elif (num == 1):
self.PWMA = 2
self.AIN2 = 3
self.AIN1 = 4
self.PWMB = 7
self.BIN2 = 6
self.BIN1 = 5
else:
raise NameError('MotorHAT Stepper must be between 1 and 2 inclusive')
def setSpeed(self, rpm):
self.sec_per_step = 60.0 / (self.revsteps * rpm)
self.steppingcounter = 0
def oneStep(self, dir, style):
pwm_a = pwm_b = 255
# first determine what sort of stepping procedure we're up to
if (style == Adafruit_MotorHAT.SINGLE):
if ((self.currentstep//(self.MICROSTEPS//2)) % 2):
# we're at an odd step, weird
if (dir == Adafruit_MotorHAT.FORWARD):
self.currentstep += self.MICROSTEPS//2
else:
self.currentstep -= self.MICROSTEPS//2
else:
# go to next even step
if (dir == Adafruit_MotorHAT.FORWARD):
self.currentstep += self.MICROSTEPS
else:
self.currentstep -= self.MICROSTEPS
if (style == Adafruit_MotorHAT.DOUBLE):
if not (self.currentstep//(self.MICROSTEPS//2) % 2):
# we're at an even step, weird
if (dir == Adafruit_MotorHAT.FORWARD):
self.currentstep += self.MICROSTEPS//2
else:
self.currentstep -= self.MICROSTEPS//2
else:
# go to next odd step
if (dir == Adafruit_MotorHAT.FORWARD):
self.currentstep += self.MICROSTEPS
else:
self.currentstep -= self.MICROSTEPS
if (style == Adafruit_MotorHAT.INTERLEAVE):
if (dir == Adafruit_MotorHAT.FORWARD):
self.currentstep += self.MICROSTEPS//2
else:
self.currentstep -= self.MICROSTEPS//2
if (style == Adafruit_MotorHAT.MICROSTEP):
if (dir == Adafruit_MotorHAT.FORWARD):
self.currentstep += 1
else:
self.currentstep -= 1
# go to next 'step' and wrap around
self.currentstep += self.MICROSTEPS * 4
self.currentstep %= self.MICROSTEPS * 4
pwm_a = pwm_b = 0
if (self.currentstep >= 0) and (self.currentstep < self.MICROSTEPS):
pwm_a = self.MICROSTEP_CURVE[self.MICROSTEPS - self.currentstep]
pwm_b = self.MICROSTEP_CURVE[self.currentstep]
elif (self.currentstep >= self.MICROSTEPS) and (self.currentstep < self.MICROSTEPS*2):
pwm_a = self.MICROSTEP_CURVE[self.currentstep - self.MICROSTEPS]
pwm_b = self.MICROSTEP_CURVE[self.MICROSTEPS*2 - self.currentstep]
elif (self.currentstep >= self.MICROSTEPS*2) and (self.currentstep < self.MICROSTEPS*3):
pwm_a = self.MICROSTEP_CURVE[self.MICROSTEPS*3 - self.currentstep]
pwm_b = self.MICROSTEP_CURVE[self.currentstep - self.MICROSTEPS*2]
elif (self.currentstep >= self.MICROSTEPS*3) and (self.currentstep < self.MICROSTEPS*4):
pwm_a = self.MICROSTEP_CURVE[self.currentstep - self.MICROSTEPS*3]
pwm_b = self.MICROSTEP_CURVE[self.MICROSTEPS*4 - self.currentstep]
# go to next 'step' and wrap around
self.currentstep += self.MICROSTEPS * 4
self.currentstep %= self.MICROSTEPS * 4
# only really used for microstepping, otherwise always on!
self.MC._pwm.setPWM(self.PWMA, 0, pwm_a*16)
self.MC._pwm.setPWM(self.PWMB, 0, pwm_b*16)
# set up coil energizing!
coils = [0, 0, 0, 0]
if (style == Adafruit_MotorHAT.MICROSTEP):
if (self.currentstep >= 0) and (self.currentstep < self.MICROSTEPS):
coils = [1, 1, 0, 0]
elif (self.currentstep >= self.MICROSTEPS) and (self.currentstep < self.MICROSTEPS*2):
coils = [0, 1, 1, 0]
elif (self.currentstep >= self.MICROSTEPS*2) and (self.currentstep < self.MICROSTEPS*3):
coils = [0, 0, 1, 1]
elif (self.currentstep >= self.MICROSTEPS*3) and (self.currentstep < self.MICROSTEPS*4):
coils = [1, 0, 0, 1]
else:
step2coils = [ [1, 0, 0, 0],
[1, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 1, 0],
[0, 0, 1, 0],
[0, 0, 1, 1],
[0, 0, 0, 1],
[1, 0, 0, 1] ]
coils = step2coils[self.currentstep//(self.MICROSTEPS//2)]
#print "coils state = " + str(coils)
self.MC.setPin(self.AIN2, coils[0])
self.MC.setPin(self.BIN1, coils[1])
self.MC.setPin(self.AIN1, coils[2])
self.MC.setPin(self.BIN2, coils[3])
return self.currentstep
def step(self, steps, direction, stepstyle):
s_per_s = self.sec_per_step
lateststep = 0
if (stepstyle == Adafruit_MotorHAT.INTERLEAVE):
s_per_s = s_per_s / 2.0
if (stepstyle == Adafruit_MotorHAT.MICROSTEP):
s_per_s /= self.MICROSTEPS
steps *= self.MICROSTEPS
print("{} sec per step".format(s_per_s))
for s in range(steps):
lateststep = self.oneStep(direction, stepstyle)
time.sleep(s_per_s)
if (stepstyle == Adafruit_MotorHAT.MICROSTEP):
# this is an edge case, if we are in between full steps, lets just keep going
# so we end on a full step
while (lateststep != 0) and (lateststep != self.MICROSTEPS):
lateststep = self.oneStep(direction, stepstyle)
time.sleep(s_per_s)
class Adafruit_DCMotor:
def __init__(self, controller, num):
self.MC = controller
self.motornum = num
pwm = in1 = in2 = 0
if (num == 0):
#pwm = 8 /9 8
in2 = 11
in1 = 10
elif (num == 1):
#pwm = 13 10 11
in2 = 8
in1 = 9
elif (num == 2):
#pwm = 2
in2 = 13
in1 = 12
elif (num == 3):
in2 = 4
else:
raise NameError('MotorHAT Motor must be between 1 and 4 inclusive')
self.PWMpin = pwm
self.IN1pin = in1
self.IN2pin = in2
def run(self, command, speed):
if not self.MC:
return
if (speed < 0):
speed = 0
if (speed > 255):
speed = 255
if (command == Adafruit_MotorHAT.FORWARD):
self.MC.setPin(self.IN2pin, 0)
#self.MC.setPin(self.IN1pin, 1)
self.MC._pwm.setPWM(self.IN1pin, 0, speed*16)
if (command == Adafruit_MotorHAT.BACKWARD):
self.MC.setPin(self.IN1pin, 0)
#self.MC.setPin(self.IN2pin, 1)
self.MC._pwm.setPWM(self.IN2pin, 0, speed*16)
if (command == Adafruit_MotorHAT.RELEASE):
self.MC.setPin(self.IN1pin, 0)
self.MC.setPin(self.IN2pin, 0)
def setSpeed(self, speed):
if (speed < 0):
speed = 0
if (speed > 255):
speed = 255
self.MC._pwm.setPWM(self.PWMpin, 0, speed*16)
class Adafruit_MotorHAT:
FORWARD = 1
BACKWARD = 2
BRAKE = 3
RELEASE = 4
SINGLE = 1
DOUBLE = 2
INTERLEAVE = 3
MICROSTEP = 4
def __init__(self, addr = 0x40, freq = 1600, i2c=None, i2c_bus=None):
self._frequency = freq
self.motors = [ Adafruit_DCMotor(self, m) for m in range(4) ]
self.steppers = [ Adafruit_StepperMotor(self, 1), Adafruit_StepperMotor(self, 2) ]
self._pwm = PWM(addr, debug=False, i2c=i2c, i2c_bus=i2c_bus)
self._pwm.setPWMFreq(self._frequency)
def setPin(self, pin, value):
if (pin < 0) or (pin > 15):
raise NameError('PWM pin must be between 0 and 15 inclusive')
if (value != 0) and (value != 1):
raise NameError('Pin value must be 0 or 1!')
if (value == 0):
self._pwm.setPWM(pin, 0, 4096)
if (value == 1):
self._pwm.setPWM(pin, 4096, 0)
def getStepper(self, steps, num):
if (num < 1) or (num > 2):
raise NameError('MotorHAT Stepper must be between 1 and 2 inclusive')
return self.steppers[num-1]
def getMotor(self, num):
if (num < 1) or (num > 4):
raise NameError('MotorHAT Motor must be between 1 and 4 inclusive')
return self.motors[num-1]
| 36.774319
| 102
| 0.534864
|
082907747772cc1a7bcb4aba0637025015e8248d
| 1,234
|
py
|
Python
|
scrapers/scrape_bl_common.py
|
brunis83/covid_19
|
7851abcd9b472931051123caada6a64bc5fce2b5
|
[
"CC-BY-4.0"
] | 485
|
2020-03-10T20:10:44.000Z
|
2022-03-27T16:11:30.000Z
|
scrapers/scrape_bl_common.py
|
mariaguskova/covid_19
|
24d8f7a30e63d8683ebbfdcf868ff67c430fcd18
|
[
"CC-BY-4.0"
] | 682
|
2020-03-17T09:55:12.000Z
|
2022-03-28T15:16:24.000Z
|
scrapers/scrape_bl_common.py
|
mariaguskova/covid_19
|
24d8f7a30e63d8683ebbfdcf868ff67c430fcd18
|
[
"CC-BY-4.0"
] | 224
|
2020-03-09T11:42:13.000Z
|
2022-03-22T13:16:33.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import datetime
from bs4 import BeautifulSoup
import re
import scrape_common as sc
def get_latest_bl_bulletin_url():
return get_all_bl_bulletin_urls()[0]
def get_all_bl_bulletin_urls():
news_url = 'https://www.baselland.ch/politik-und-behorden/direktionen/volkswirtschafts-und-gesundheitsdirektion/amt-fur-gesundheit/medizinische-dienste/kantonsarztlicher-dienst/aktuelles/medienmitteilungen-1'
news_content = sc.download(news_url, silent=True)
soup = BeautifulSoup(news_content, 'html.parser')
bulletins = soup.find_all('a', href=re.compile(r'.*/coronavirus-wochenbulletin.*'))
bulletin_urls = []
for bulletin in bulletins:
bulletin_urls.append(bulletin.get('href'))
return bulletin_urls
def strip_bl_bulletin_numbers(content):
content = re.sub(r'(\d+)’(\d+)', r'\1\2', content)
content = re.sub(r'(\d+)\'(\d+)', r'\1\2', content)
return content
def parse_bl_date(s):
row_date = s.replace('-', '.')
row_date = s.replace('/', '.')
parts = row_date.split('.')
s_date = datetime.datetime(day=int(parts[0]), month=int(parts[1]), year=int(parts[2]))
key = s_date.date().isoformat()
return (key, row_date)
| 31.641026
| 212
| 0.6953
|
616c509bb28b80d8ad129e7922b26a3c5bec6e7b
| 597
|
py
|
Python
|
setup.py
|
ckxckx/lolviz
|
19d7d7a5352bfdd0a4fc236b904e81481bd3146f
|
[
"BSD-3-Clause"
] | 790
|
2017-08-26T19:32:59.000Z
|
2022-03-31T21:53:20.000Z
|
setup.py
|
ckxckx/lolviz
|
19d7d7a5352bfdd0a4fc236b904e81481bd3146f
|
[
"BSD-3-Clause"
] | 23
|
2017-08-28T09:06:29.000Z
|
2022-01-12T03:26:36.000Z
|
setup.py
|
ckxckx/lolviz
|
19d7d7a5352bfdd0a4fc236b904e81481bd3146f
|
[
"BSD-3-Clause"
] | 46
|
2017-08-27T17:27:23.000Z
|
2022-03-04T08:34:38.000Z
|
from setuptools import setup
setup(
name='lolviz',
version='1.4.4',
url='https://github.com/parrt/lolviz',
license='BSD',
py_modules=['lolviz'],
author='Terence Parr',
author_email='parrt@antlr.org',
install_requires=['graphviz'], # needs numpy if you use ndarrayviz()
description='A simple Python data-structure visualization tool for call stacks, lists of lists, lists, dictionaries, numpy arrays',
keywords='visualization data structures',
classifiers=['License :: OSI Approved :: BSD License',
'Intended Audience :: Developers']
)
| 35.117647
| 135
| 0.678392
|
932eb33ecd1245a2cf138e82cb99a8142499d6de
| 22,675
|
py
|
Python
|
tests/core/test_actions.py
|
Phillammon/rasa
|
c3d1738e9a9a3d76b37a01258a3813f0fc90d852
|
[
"Apache-2.0"
] | 1
|
2020-08-09T13:28:37.000Z
|
2020-08-09T13:28:37.000Z
|
tests/core/test_actions.py
|
m0sesa/rasa
|
219baa461c066a199fb4fb10a123012ebc8182b3
|
[
"Apache-2.0"
] | 75
|
2020-08-06T08:55:42.000Z
|
2022-03-01T13:22:11.000Z
|
tests/core/test_actions.py
|
m0sesa/rasa
|
219baa461c066a199fb4fb10a123012ebc8182b3
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
import pytest
from aioresponses import aioresponses
import rasa.core
from rasa.core.actions import action
from rasa.core.actions.action import (
ACTION_BACK_NAME,
ACTION_DEACTIVATE_FORM_NAME,
ACTION_DEFAULT_ASK_AFFIRMATION_NAME,
ACTION_DEFAULT_ASK_REPHRASE_NAME,
ACTION_DEFAULT_FALLBACK_NAME,
ACTION_LISTEN_NAME,
ACTION_RESTART_NAME,
ACTION_REVERT_FALLBACK_EVENTS_NAME,
ACTION_SESSION_START_NAME,
RULE_SNIPPET_ACTION_NAME,
ActionBack,
ActionDefaultAskAffirmation,
ActionDefaultAskRephrase,
ActionDefaultFallback,
ActionExecutionRejection,
ActionListen,
ActionRestart,
ActionUtterTemplate,
ActionRetrieveResponse,
RemoteAction,
ActionSessionStart,
)
from rasa.core.actions.forms import FormAction
from rasa.core.actions.two_stage_fallback import ACTION_TWO_STAGE_FALLBACK_NAME
from rasa.core.channels import CollectingOutputChannel
from rasa.core.domain import Domain, SessionConfig
from rasa.core.events import (
Restarted,
SlotSet,
UserUtteranceReverted,
BotUttered,
Form,
SessionStarted,
ActionExecuted,
Event,
UserUttered,
)
from rasa.core.nlg.template import TemplatedNaturalLanguageGenerator
from rasa.core.constants import USER_INTENT_SESSION_START
from rasa.core.trackers import DialogueStateTracker
from rasa.utils.endpoints import ClientResponseError, EndpointConfig
from tests.utilities import json_of_latest_request, latest_request
@pytest.fixture(scope="module")
def template_nlg():
templates = {
"utter_ask_rephrase": [{"text": "can you rephrase that?"}],
"utter_restart": [{"text": "congrats, you've restarted me!"}],
"utter_back": [{"text": "backing up..."}],
"utter_invalid": [{"text": "a template referencing an invalid {variable}."}],
"utter_buttons": [
{
"text": "button message",
"buttons": [
{"payload": "button1", "title": "button1"},
{"payload": "button2", "title": "button2"},
],
}
],
}
return TemplatedNaturalLanguageGenerator(templates)
@pytest.fixture(scope="module")
def template_sender_tracker(default_domain):
return DialogueStateTracker("template-sender", default_domain.slots)
def test_text_format():
assert "{}".format(ActionListen()) == "Action('action_listen')"
assert (
"{}".format(ActionUtterTemplate("my_action_name"))
== "ActionUtterTemplate('my_action_name')"
)
assert (
"{}".format(ActionRetrieveResponse("respond_test"))
== "ActionRetrieveResponse('respond_test')"
)
def test_action_instantiation_from_names():
instantiated_actions = action.actions_from_names(
["random_name", "utter_test", "respond_test"],
None,
["random_name", "utter_test"],
)
assert len(instantiated_actions) == 3
assert isinstance(instantiated_actions[0], RemoteAction)
assert instantiated_actions[0].name() == "random_name"
assert isinstance(instantiated_actions[1], ActionUtterTemplate)
assert instantiated_actions[1].name() == "utter_test"
assert isinstance(instantiated_actions[2], ActionRetrieveResponse)
assert instantiated_actions[2].name() == "respond_test"
def test_domain_action_instantiation():
domain = Domain(
intents={},
entities=[],
slots=[],
templates={},
action_names=["my_module.ActionTest", "utter_test", "respond_test"],
forms=[],
)
instantiated_actions = domain.actions(None)
assert len(instantiated_actions) == 14
assert instantiated_actions[0].name() == ACTION_LISTEN_NAME
assert instantiated_actions[1].name() == ACTION_RESTART_NAME
assert instantiated_actions[2].name() == ACTION_SESSION_START_NAME
assert instantiated_actions[3].name() == ACTION_DEFAULT_FALLBACK_NAME
assert instantiated_actions[4].name() == ACTION_DEACTIVATE_FORM_NAME
assert instantiated_actions[5].name() == ACTION_REVERT_FALLBACK_EVENTS_NAME
assert instantiated_actions[6].name() == ACTION_DEFAULT_ASK_AFFIRMATION_NAME
assert instantiated_actions[7].name() == ACTION_DEFAULT_ASK_REPHRASE_NAME
assert instantiated_actions[8].name() == ACTION_TWO_STAGE_FALLBACK_NAME
assert instantiated_actions[9].name() == ACTION_BACK_NAME
assert instantiated_actions[10].name() == RULE_SNIPPET_ACTION_NAME
assert instantiated_actions[11].name() == "my_module.ActionTest"
assert instantiated_actions[12].name() == "utter_test"
assert instantiated_actions[13].name() == "respond_test"
async def test_remote_action_runs(
default_channel, default_nlg, default_tracker, default_domain
):
endpoint = EndpointConfig("https://example.com/webhooks/actions")
remote_action = action.RemoteAction("my_action", endpoint)
with aioresponses() as mocked:
mocked.post(
"https://example.com/webhooks/actions",
payload={"events": [], "responses": []},
)
await remote_action.run(
default_channel, default_nlg, default_tracker, default_domain
)
r = latest_request(mocked, "post", "https://example.com/webhooks/actions")
assert r
assert json_of_latest_request(r) == {
"domain": default_domain.as_dict(),
"next_action": "my_action",
"sender_id": "my-sender",
"version": rasa.__version__,
"tracker": {
"latest_message": {
"entities": [],
"intent": {},
"text": None,
"message_id": None,
"metadata": {},
},
"active_form": {},
"latest_action_name": None,
"sender_id": "my-sender",
"paused": False,
"latest_event_time": None,
"followup_action": "action_listen",
"slots": {"name": None},
"events": [],
"latest_input_channel": None,
},
}
async def test_remote_action_logs_events(
default_channel, default_nlg, default_tracker, default_domain
):
endpoint = EndpointConfig("https://example.com/webhooks/actions")
remote_action = action.RemoteAction("my_action", endpoint)
response = {
"events": [{"event": "slot", "value": "rasa", "name": "name"}],
"responses": [
{
"text": "test text",
"template": None,
"buttons": [{"title": "cheap", "payload": "cheap"}],
},
{"template": "utter_greet"},
],
}
with aioresponses() as mocked:
mocked.post("https://example.com/webhooks/actions", payload=response)
events = await remote_action.run(
default_channel, default_nlg, default_tracker, default_domain
)
r = latest_request(mocked, "post", "https://example.com/webhooks/actions")
assert r
assert json_of_latest_request(r) == {
"domain": default_domain.as_dict(),
"next_action": "my_action",
"sender_id": "my-sender",
"version": rasa.__version__,
"tracker": {
"latest_message": {
"entities": [],
"intent": {},
"text": None,
"message_id": None,
"metadata": {},
},
"active_form": {},
"latest_action_name": None,
"sender_id": "my-sender",
"paused": False,
"followup_action": "action_listen",
"latest_event_time": None,
"slots": {"name": None},
"events": [],
"latest_input_channel": None,
},
}
assert len(events) == 3 # first two events are bot utterances
assert events[0] == BotUttered(
"test text", {"buttons": [{"title": "cheap", "payload": "cheap"}]}
)
assert events[1] == BotUttered(
"hey there None!", metadata={"template_name": "utter_greet"}
)
assert events[2] == SlotSet("name", "rasa")
async def test_remote_action_utterances_with_none_values(
default_channel, default_tracker, default_domain
):
endpoint = EndpointConfig("https://example.com/webhooks/actions")
remote_action = action.RemoteAction("my_action", endpoint)
response = {
"events": [
{"event": "form", "name": "restaurant_form", "timestamp": None},
{
"event": "slot",
"timestamp": None,
"name": "requested_slot",
"value": "cuisine",
},
],
"responses": [
{
"text": None,
"buttons": None,
"elements": [],
"custom": None,
"template": "utter_ask_cuisine",
"image": None,
"attachment": None,
}
],
}
nlg = TemplatedNaturalLanguageGenerator(
{"utter_ask_cuisine": [{"text": "what dou want to eat?"}]}
)
with aioresponses() as mocked:
mocked.post("https://example.com/webhooks/actions", payload=response)
events = await remote_action.run(
default_channel, nlg, default_tracker, default_domain
)
assert events == [
BotUttered(
"what dou want to eat?", metadata={"template_name": "utter_ask_cuisine"}
),
Form("restaurant_form"),
SlotSet("requested_slot", "cuisine"),
]
async def test_remote_action_without_endpoint(
default_channel, default_nlg, default_tracker, default_domain
):
remote_action = action.RemoteAction("my_action", None)
with pytest.raises(Exception) as execinfo:
await remote_action.run(
default_channel, default_nlg, default_tracker, default_domain
)
assert "Failed to execute custom action." in str(execinfo.value)
async def test_remote_action_endpoint_not_running(
default_channel, default_nlg, default_tracker, default_domain
):
endpoint = EndpointConfig("https://example.com/webhooks/actions")
remote_action = action.RemoteAction("my_action", endpoint)
with pytest.raises(Exception) as execinfo:
await remote_action.run(
default_channel, default_nlg, default_tracker, default_domain
)
assert "Failed to execute custom action." in str(execinfo.value)
async def test_remote_action_endpoint_responds_500(
default_channel, default_nlg, default_tracker, default_domain
):
endpoint = EndpointConfig("https://example.com/webhooks/actions")
remote_action = action.RemoteAction("my_action", endpoint)
with aioresponses() as mocked:
mocked.post("https://example.com/webhooks/actions", status=500)
with pytest.raises(Exception) as execinfo:
await remote_action.run(
default_channel, default_nlg, default_tracker, default_domain
)
assert "Failed to execute custom action." in str(execinfo.value)
async def test_remote_action_endpoint_responds_400(
default_channel, default_nlg, default_tracker, default_domain
):
endpoint = EndpointConfig("https://example.com/webhooks/actions")
remote_action = action.RemoteAction("my_action", endpoint)
with aioresponses() as mocked:
# noinspection PyTypeChecker
mocked.post(
"https://example.com/webhooks/actions",
exception=ClientResponseError(400, None, '{"action_name": "my_action"}'),
)
with pytest.raises(Exception) as execinfo:
await remote_action.run(
default_channel, default_nlg, default_tracker, default_domain
)
assert execinfo.type == ActionExecutionRejection
assert "Custom action 'my_action' rejected to run" in str(execinfo.value)
async def test_action_utter_retrieved_response(
default_channel, default_nlg, default_tracker, default_domain
):
from rasa.core.channels.channel import UserMessage
action_name = "respond_chitchat"
default_tracker.latest_message = UserMessage(
"Who are you?",
parse_data={
"response_selector": {
"chitchat": {
"response": {"name": "I am a bot."},
"full_retrieval_intent": "chitchat/ask_name",
}
}
},
)
events = await ActionRetrieveResponse(action_name).run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events[0].as_dict().get("text") == BotUttered("I am a bot.").as_dict().get(
"text"
)
assert (
events[0].as_dict().get("metadata").get("template_name") == "chitchat/ask_name"
)
async def test_action_utter_default_retrieved_response(
default_channel, default_nlg, default_tracker, default_domain
):
from rasa.core.channels.channel import UserMessage
action_name = "respond_chitchat"
default_tracker.latest_message = UserMessage(
"Who are you?",
parse_data={
"response_selector": {
"default": {
"response": {"name": "I am a bot."},
"full_retrieval_intent": "chitchat/ask_name",
}
}
},
)
events = await ActionRetrieveResponse(action_name).run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events[0].as_dict().get("text") == BotUttered("I am a bot.").as_dict().get(
"text"
)
async def test_action_utter_retrieved_empty_response(
default_channel, default_nlg, default_tracker, default_domain
):
from rasa.core.channels.channel import UserMessage
action_name = "respond_chitchat"
default_tracker.latest_message = UserMessage(
"Who are you?",
parse_data={
"response_selector": {
"dummy": {
"response": {"name": "I am a bot."},
"full_retrieval_intent": "chitchat/ask_name",
}
}
},
)
events = await ActionRetrieveResponse(action_name).run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events == []
async def test_action_utter_template(
default_channel, default_nlg, default_tracker, default_domain
):
events = await ActionUtterTemplate("utter_channel").run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events == [
BotUttered(
"this is a default channel", metadata={"template_name": "utter_channel"}
)
]
async def test_action_utter_template_unknown_template(
default_channel, default_nlg, default_tracker, default_domain
):
events = await ActionUtterTemplate("utter_unknown").run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events == []
async def test_action_utter_template_with_buttons(
default_channel, template_nlg, template_sender_tracker, default_domain
):
events = await ActionUtterTemplate("utter_buttons").run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert events == [
BotUttered(
"button message",
{
"buttons": [
{"payload": "button1", "title": "button1"},
{"payload": "button2", "title": "button2"},
]
},
metadata={"template_name": "utter_buttons"},
)
]
async def test_action_utter_template_invalid_template(
default_channel, template_nlg, template_sender_tracker, default_domain
):
events = await ActionUtterTemplate("utter_invalid").run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert len(events) == 1
assert isinstance(events[0], BotUttered)
assert events[0].text.startswith("a template referencing an invalid {variable}.")
async def test_action_utter_template_channel_specific(
default_nlg, default_tracker, default_domain
):
from rasa.core.channels.slack import SlackBot
output_channel = SlackBot("DummyToken", "General")
events = await ActionUtterTemplate("utter_channel").run(
output_channel, default_nlg, default_tracker, default_domain
)
assert events == [
BotUttered(
"you're talking to me on slack!",
metadata={"channel": "slack", "template_name": "utter_channel"},
)
]
async def test_action_back(
default_channel, template_nlg, template_sender_tracker, default_domain
):
events = await ActionBack().run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert events == [
BotUttered("backing up...", metadata={"template_name": "utter_back"}),
UserUtteranceReverted(),
UserUtteranceReverted(),
]
async def test_action_restart(
default_channel, template_nlg, template_sender_tracker, default_domain
):
events = await ActionRestart().run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert events == [
BotUttered(
"congrats, you've restarted me!",
metadata={"template_name": "utter_restart"},
),
Restarted(),
]
async def test_action_session_start_without_slots(
default_channel: CollectingOutputChannel,
template_nlg: TemplatedNaturalLanguageGenerator,
template_sender_tracker: DialogueStateTracker,
default_domain: Domain,
):
events = await ActionSessionStart().run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert events == [SessionStarted(), ActionExecuted(ACTION_LISTEN_NAME)]
@pytest.mark.parametrize(
"session_config, expected_events",
[
(
SessionConfig(123, True),
[
SessionStarted(),
SlotSet("my_slot", "value"),
SlotSet("another-slot", "value2"),
ActionExecuted(action_name=ACTION_LISTEN_NAME),
],
),
(
SessionConfig(123, False),
[SessionStarted(), ActionExecuted(action_name=ACTION_LISTEN_NAME)],
),
],
)
async def test_action_session_start_with_slots(
default_channel: CollectingOutputChannel,
template_nlg: TemplatedNaturalLanguageGenerator,
template_sender_tracker: DialogueStateTracker,
default_domain: Domain,
session_config: SessionConfig,
expected_events: List[Event],
):
# set a few slots on tracker
slot_set_event_1 = SlotSet("my_slot", "value")
slot_set_event_2 = SlotSet("another-slot", "value2")
for event in [slot_set_event_1, slot_set_event_2]:
template_sender_tracker.update(event)
default_domain.session_config = session_config
events = await ActionSessionStart().run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert events == expected_events
# make sure that the list of events has ascending timestamps
assert sorted(events, key=lambda x: x.timestamp) == events
async def test_applied_events_after_action_session_start(
default_channel: CollectingOutputChannel,
template_nlg: TemplatedNaturalLanguageGenerator,
):
slot_set = SlotSet("my_slot", "value")
events = [
slot_set,
ActionExecuted(ACTION_LISTEN_NAME),
# User triggers a restart manually by triggering the intent
UserUttered(
text=f"/{USER_INTENT_SESSION_START}",
intent={"name": USER_INTENT_SESSION_START},
),
]
tracker = DialogueStateTracker.from_events("🕵️♀️", events)
# Mapping Policy kicks in and runs the session restart action
events = await ActionSessionStart().run(
default_channel, template_nlg, tracker, Domain.empty()
)
for event in events:
tracker.update(event)
assert tracker.applied_events() == [slot_set, ActionExecuted(ACTION_LISTEN_NAME)]
async def test_action_default_fallback(
default_channel, default_nlg, default_tracker, default_domain
):
events = await ActionDefaultFallback().run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events == [
BotUttered(
"sorry, I didn't get that, can you rephrase it?",
metadata={"template_name": "utter_default"},
),
UserUtteranceReverted(),
]
async def test_action_default_ask_affirmation(
default_channel, default_nlg, default_tracker, default_domain
):
events = await ActionDefaultAskAffirmation().run(
default_channel, default_nlg, default_tracker, default_domain
)
assert events == [
BotUttered(
"Did you mean 'None'?",
{
"buttons": [
{"title": "Yes", "payload": "/None"},
{"title": "No", "payload": "/out_of_scope"},
]
},
{"template_name": "action_default_ask_affirmation"},
)
]
async def test_action_default_ask_rephrase(
default_channel, template_nlg, template_sender_tracker, default_domain
):
events = await ActionDefaultAskRephrase().run(
default_channel, template_nlg, template_sender_tracker, default_domain
)
assert events == [
BotUttered(
"can you rephrase that?", metadata={"template_name": "utter_ask_rephrase"}
)
]
def test_get_form_action():
form_action_name = "my_business_logic"
domain = Domain.from_yaml(
f"""
actions:
- my_action
forms:
- {form_action_name}:
my_slot:
- type: from_text
"""
)
actual = domain.action_for_name(form_action_name, None)
assert isinstance(actual, FormAction)
def test_get_form_action_without_slot_mapping():
form_action_name = "my_business_logic"
domain = Domain.from_yaml(
f"""
actions:
- my_action
forms:
- {form_action_name}
"""
)
actual = domain.action_for_name(form_action_name, None)
assert isinstance(actual, RemoteAction)
def test_get_form_action_if_not_in_forms():
form_action_name = "my_business_logic"
domain = Domain.from_yaml(
"""
actions:
- my_action
"""
)
with pytest.raises(NameError):
assert not domain.action_for_name(form_action_name, None)
| 31.58078
| 87
| 0.637707
|
d384ebaa2e9457c9c168c9be12cd49e3557180a8
| 3,915
|
py
|
Python
|
tests/test_algorithms_authenticated.py
|
Algorithmism/quantopian-tools
|
a57afa7d16286f502e236e32525fbde552028627
|
[
"BSD-3-Clause"
] | 1
|
2022-01-31T10:53:53.000Z
|
2022-01-31T10:53:53.000Z
|
tests/test_algorithms_authenticated.py
|
Algorithmism/quantopian-tools
|
a57afa7d16286f502e236e32525fbde552028627
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_algorithms_authenticated.py
|
Algorithmism/quantopian-tools
|
a57afa7d16286f502e236e32525fbde552028627
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import, division, unicode_literals
import datetime
import pytest
from quantopian_tools import algorithms
skip_auth_tests = pytest.mark.skip(reason="no way of currently testing this")
@skip_auth_tests
def test_get_algorithm_ids(authenticated_browser):
assert algorithms.get_algorithm_ids()
@skip_auth_tests
def test_get_algorithm(authenticated_browser):
ids = algorithms.get_algorithm_ids()
assert len(ids) > 1
assert sorted(algorithms.get_algorithm(ids[-1]).keys()) == ['code', 'id', 'title']
@skip_auth_tests
def test_save_algorithm(authenticated_browser):
ids = algorithms.get_algorithm_ids()
assert len(ids) > 1
algo_id = ids[-1]
algo = algorithms.get_algorithm(algo_id)
assert sorted(algo.keys()) == ['code', 'id', 'title']
algo['title'] = 'Hello World Algorithm ({})'.format(datetime.datetime.now().isoformat())
assert algorithms.save_algorithm(algo)
assert algorithms.get_algorithm(algo_id) == algo
@skip_auth_tests
def test_new_algorithm(authenticated_browser):
title = 'Test ({})'.format(datetime.datetime.now().isoformat())
algorithm_id = algorithms.new_algorithm(title)
algorithm = algorithms.get_algorithm(algorithm_id)
assert algorithm['title'] == title
assert algorithms.delete_algorithm(algorithm)
@skip_auth_tests
def test_validate_algorithm_success(authenticated_browser):
algorithm = {
'id': '57b11da5187a9054fb00041e',
'code': "def initialize(context): pass\n"
}
assert algorithms.validate_algorithm(algorithm,
start_date=datetime.date(2016, 1, 1),
end_date=datetime.date(2016, 1, 2)) == (True, [
{
'errorcode': None,
'extra': {},
'line': None,
'name': 'Tier1.test_has_before_trading_start',
'offset': None,
'passed': True,
'trace': None,
'type': 'unit'
},
{
'errorcode': None,
'extra': {},
'line': None,
'name': 'Tier1.test_has_handle_data',
'offset': None,
'passed': True,
'trace': None,
'type': 'unit'
},
{
'errorcode': None,
'extra': {},
'line': None,
'name': 'Tier1.test_has_initialize_method',
'offset': None,
'passed': True,
'trace': None,
'type': 'unit'
}
])
@skip_auth_tests
def test_validate_algorithm_failure(authenticated_browser):
algorithm = {
'id': '57b11da5187a9054fb00041e',
'code': "def foo(context): pass\n"
}
assert algorithms.validate_algorithm(algorithm,
start_date=datetime.date(2016, 1, 1),
end_date=datetime.date(2016, 1, 2)) == (False, [
{
'errorcode': None,
'extra': {},
'line': None,
'name': 'Tier1.test_has_before_trading_start',
'offset': None,
'passed': True,
'trace': None,
'type': 'unit'
},
{
'errorcode': None,
'extra': {},
'line': None,
'name': 'Tier1.test_has_handle_data',
'offset': None,
'passed': True,
'trace': None,
'type': 'unit'
},
{
'errorcode': 14,
'extra': {'reason': 'None is not callable', 'type': 'NotCallable'},
'line': None,
'name': 'Tier1.test_has_initialize_method',
'offset': None,
'passed': False,
'trace': 'InvalidInitializeMethod: 0014 None is not callable\n',
'type': 'unit'
}])
| 30.348837
| 92
| 0.547126
|
6ab38697889f9e01ac38643f92545223c34c21eb
| 154
|
py
|
Python
|
manatsum/urls.py
|
mashabow/manatsum
|
0497a39537f35bdcb236f3b341de7145ebdc1dcb
|
[
"MIT"
] | 2
|
2015-08-25T18:10:42.000Z
|
2017-01-12T00:00:57.000Z
|
manatsum/urls.py
|
mashabow/manatsum
|
0497a39537f35bdcb236f3b341de7145ebdc1dcb
|
[
"MIT"
] | null | null | null |
manatsum/urls.py
|
mashabow/manatsum
|
0497a39537f35bdcb236f3b341de7145ebdc1dcb
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from django.conf.urls import patterns, include, url
import app.urls
urlpatterns = patterns(
'',
url(r'^', include(app.urls)),
)
| 15.4
| 51
| 0.662338
|
47f4f9cea94084795de65738b35f9f184f7c912f
| 111
|
py
|
Python
|
theano/d3viz/__init__.py
|
MarcCote/Theano
|
f0d293161a624ccf10c60ee8405a92e7d321151a
|
[
"BSD-3-Clause"
] | 95
|
2019-05-14T20:55:26.000Z
|
2022-03-26T13:32:42.000Z
|
theano/d3viz/__init__.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 7
|
2019-11-25T08:24:47.000Z
|
2021-09-12T13:29:14.000Z
|
theano/d3viz/__init__.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 30
|
2016-10-27T21:59:00.000Z
|
2021-02-20T09:55:14.000Z
|
from __future__ import absolute_import, print_function, division
from theano.d3viz.d3viz import d3viz, d3write
| 37
| 64
| 0.855856
|
c342423c7def03062d78f01e600efda75944a649
| 3,408
|
py
|
Python
|
pytorch_toolkit/nncf/nncf/debug.py
|
morkovka1337/openvino_training_extensions
|
846db45c264d6b061505213f51763520b9432ba9
|
[
"Apache-2.0"
] | 3
|
2020-12-29T02:47:32.000Z
|
2021-11-12T08:12:51.000Z
|
pytorch_toolkit/nncf/nncf/debug.py
|
morkovka1337/openvino_training_extensions
|
846db45c264d6b061505213f51763520b9432ba9
|
[
"Apache-2.0"
] | 23
|
2020-09-25T22:41:48.000Z
|
2021-12-13T20:43:37.000Z
|
pytorch_toolkit/nncf/nncf/debug.py
|
morkovka1337/openvino_training_extensions
|
846db45c264d6b061505213f51763520b9432ba9
|
[
"Apache-2.0"
] | 1
|
2021-03-12T10:08:44.000Z
|
2021-03-12T10:08:44.000Z
|
"""
Copyright (c) 2019-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import warnings
from typing import List, Dict
from torch.nn import Module
from nncf.nncf_logger import logger as nncf_logger
DEBUG_LOG_DIR = "/tmp"
def is_debug():
return nncf_logger.getEffectiveLevel() == logging.DEBUG
def set_debug_log_dir(dir_: str):
global DEBUG_LOG_DIR
DEBUG_LOG_DIR = dir_
class CallCountTracker:
def __init__(self, name):
self.name = name
self.call_counts = {}
def init_with_key_list(self, key_list: List):
self.call_counts = {key: 0 for key in key_list}
nncf_logger.debug("{} tracker: registered {} entries".format(self.name, len(self.call_counts)))
def register_call(self, key, counts=None):
if key not in self.call_counts:
warnings.warn("DEBUG: {} tracker: called an unregistered module: {}".format(self.name, key))
return
if counts is None:
self.call_counts[key] += 1
else:
self.call_counts[key] = counts
def get_never_called_keys(self) -> List[str]:
return [k for k, v in self.call_counts.items() if v == 0]
def get_overcalled_keys_with_call_counts(self) -> Dict[str, int]:
return {k: v for k, v in self.call_counts.items() if v > 1}
def get_total_call_count(self) -> int:
if self.call_counts:
return sum(self.call_counts.values())
return 0
def reset(self):
for key in self.call_counts:
self.call_counts[key] = 0
class DebugInterface:
def pre_forward_actions(self, module: Module):
raise NotImplementedError
def post_forward_actions(self, module: Module):
raise NotImplementedError
def init_actual(self, owner_model):
raise NotImplementedError
def debuggable_forward(forward_func):
def decorated(self, *args, **kwargs):
if self.debug_interface is not None:
self.debug_interface.pre_forward_actions(module=self)
retval = forward_func(self, *args, **kwargs)
if self.debug_interface is not None:
self.debug_interface.post_forward_actions(module=self)
return retval
return decorated
class CombinedDebugInterface(DebugInterface):
def __init__(self):
self._interfaces = [] # type: List[DebugInterface]
def add_interface(self, interface: 'DebugInterface'):
self._interfaces.append(interface)
def init_actual(self, owner_model: 'NNCFNetwork'):
for interface in self._interfaces:
interface.init_actual(owner_model)
def pre_forward_actions(self, module: Module):
for interface in self._interfaces:
interface.pre_forward_actions(module)
def post_forward_actions(self, module: Module):
for interface in self._interfaces:
interface.post_forward_actions(module)
| 31.555556
| 104
| 0.690141
|
bc45d3f6e5901f5d9d89cde4a78ec0fc38fb9384
| 36,978
|
py
|
Python
|
ironic/common/neutron.py
|
inmotionhosting/ironic
|
1c7b5f82592e23ab66dddca56e0b059d3cb0710b
|
[
"Apache-2.0"
] | null | null | null |
ironic/common/neutron.py
|
inmotionhosting/ironic
|
1c7b5f82592e23ab66dddca56e0b059d3cb0710b
|
[
"Apache-2.0"
] | null | null | null |
ironic/common/neutron.py
|
inmotionhosting/ironic
|
1c7b5f82592e23ab66dddca56e0b059d3cb0710b
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import ipaddress
import openstack
from openstack.connection import exceptions as openstack_exc
from oslo_log import log
import retrying
from ironic.api.controllers.v1 import utils as api_utils
from ironic.common import context as ironic_context
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import keystone
from ironic.common.pxe_utils import DHCP_CLIENT_ID
from ironic.conf import CONF
from ironic import objects
LOG = log.getLogger(__name__)
# TODO(pas-ha) remove in Rocky, until then it is a default
# for CONF.neutron.url in noauth case when endpoint_override is not set
DEFAULT_NEUTRON_URL = 'http://%s:9696' % CONF.my_ip
_NEUTRON_SESSION = None
VNIC_BAREMETAL = 'baremetal'
VNIC_SMARTNIC = 'smart-nic'
PHYSNET_PARAM_NAME = 'provider:physical_network'
"""Name of the neutron network API physical network parameter."""
def _get_neutron_session():
global _NEUTRON_SESSION
if not _NEUTRON_SESSION:
_NEUTRON_SESSION = keystone.get_session(
'neutron',
timeout=CONF.neutron.timeout)
return _NEUTRON_SESSION
def get_client(token=None, context=None, auth_from_config=False):
"""Retrieve a neutron client connection.
:param context: request context,
instance of ironic.common.context.RequestContext
:param auth_from_config: (boolean) When True, use auth values from
conf parameters
:returns: A neutron client.
"""
if not context:
context = ironic_context.RequestContext(auth_token=token)
session = _get_neutron_session()
service_auth = keystone.get_auth('neutron')
endpoint = keystone.get_endpoint('neutron', session=session,
auth=service_auth)
user_auth = None
if (not auth_from_config and CONF.neutron.auth_type != 'none'
and context.auth_token):
user_auth = keystone.get_service_auth(context, endpoint, service_auth)
sess = keystone.get_session('neutron', timeout=CONF.neutron.timeout,
auth=user_auth or service_auth)
conn = openstack.connection.Connection(session=sess, oslo_conf=CONF)
return conn.global_request(context.global_id).network
def update_neutron_port(context, port_id, attrs, client=None):
"""Undate a neutron port
Uses neutron client from conf client to update a neutron client
an unbound state.
:param context: request context,
instance of ironic.common.context.RequestContext
:param port_id: Neutron port ID.
:param attrs: The attributes to update on the port
:param client: Optional Neutron client
"""
if not client:
# verify that user can see the port before updating it
get_client(context=context).get_port(port_id)
# Set user_auth=False to ensure auth values from ironic.conf is used
# prevents issues where a non-admin user is not allowed to manage
# Neutron ports.
client = get_client(context=context, auth_from_config=True)
attrs = attrs.get('port', attrs)
return client.update_port(port_id, **attrs)
def unbind_neutron_port(port_id, client=None, context=None, reset_mac=True):
"""Unbind a neutron port
Remove a neutron port's binding profile and host ID so that it returns to
an unbound state.
:param port_id: Neutron port ID.
:param client: Optional a Neutron client object.
:param context: request context
:param reset_mac: reset mac address
:type context: ironic.common.context.RequestContext
:raises: NetworkError
"""
attrs_unbind = {'binding:host_id': '', 'binding:profile': {}}
attrs_reset_mac = {'mac_address': None}
try:
update_neutron_port(context, port_id, attrs_unbind, client)
# NOTE(hjensas): We need to reset the mac address in a separate step.
# Exception PortBound will be raised by neutron as it refuses to
# update the mac address of a bound port if we attempt to unbind and
# reset the mac in the same call.
if reset_mac:
update_neutron_port(context, port_id, attrs_reset_mac, client)
# NOTE(vsaienko): Ignore if port was deleted before calling vif detach.
except openstack_exc.ResourceNotFound:
LOG.info('Port %s was not found while unbinding.', port_id)
except openstack_exc.OpenStackCloudException as e:
msg = (_('Unable to clear binding profile for '
'neutron port %(port_id)s. Error: '
'%(err)s') % {'port_id': port_id, 'err': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
def update_port_address(port_id, address, context=None):
"""Update a port's mac address.
:param port_id: Neutron port id.
:param address: new MAC address.
:param context: request context
:type context: ironic.common.context.RequestContext
:raises: FailedToUpdateMacOnPort
"""
client = get_client(context=context)
port_attrs = {'mac_address': address}
try:
msg = (_("Failed to get the current binding on Neutron "
"port %s.") % port_id)
port = client.get_port(port_id)
binding_host_id = port.get('binding:host_id')
binding_profile = port.get('binding:profile')
if binding_host_id:
# Unbind port before we update it's mac address, because you can't
# change a bound port's mac address.
msg = (_("Failed to remove the current binding from "
"Neutron port %s, while updating its MAC "
"address.") % port_id)
unbind_neutron_port(port_id, context=context)
msg = (_("Failed to update MAC address on Neutron port %s.") % port_id)
update_neutron_port(context, port_id, port_attrs)
# Restore original binding:profile and host_id
if binding_host_id:
msg = (_("Failed to update binding:host_id and profile on Neutron "
"port %s.") % port_id)
port_attrs = {'binding:host_id': binding_host_id,
'binding:profile': binding_profile}
update_neutron_port(context, port_id, port_attrs)
except (openstack_exc.OpenStackCloudException, exception.NetworkError):
LOG.exception(msg)
raise exception.FailedToUpdateMacOnPort(port_id=port_id)
def _verify_security_groups(security_groups, client):
"""Verify that the security groups exist.
:param security_groups: a list of security group UUIDs; may be None or
empty
:param client: Neutron client
:raises: NetworkError
"""
if not security_groups:
return
try:
neutron_sec_groups = set(
x.id for x in client.security_groups(id=security_groups))
except openstack_exc.OpenStackCloudException as e:
msg = (_("Could not retrieve security groups from neutron: %(exc)s") %
{'exc': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
if set(security_groups).issubset(neutron_sec_groups):
return
missing_sec_groups = set(security_groups).difference(neutron_sec_groups)
msg = (_('Could not find these security groups (specified via ironic '
'config) in neutron: %(ir-sg)s')
% {'ir-sg': list(missing_sec_groups)})
LOG.error(msg)
raise exception.NetworkError(msg)
def _add_ip_addresses_for_ipv6_stateful(context, port, client):
"""Add additional IP addresses to the ipv6 stateful neutron port
When network booting with DHCPv6-stateful we cannot control the CLID/IAID
used by the different clients, UEFI, iPXE, Ironic IPA etc. Multiple
IP address reservation is required in the DHCPv6 server to avoid
NoAddrsAvail issues.
:param port: A neutron port
:param client: Neutron client
"""
fixed_ips = port.fixed_ips
if (not fixed_ips
or ipaddress.ip_address(fixed_ips[0]['ip_address']).version != 6):
return
subnet = client.get_subnet(fixed_ips[0]['subnet_id'])
if subnet and subnet.ipv6_address_mode == 'dhcpv6-stateful':
for i in range(1, CONF.neutron.dhcpv6_stateful_address_count):
fixed_ips.append({'subnet_id': subnet['id']})
attrs = {'fixed_ips': fixed_ips}
update_neutron_port(context, port.id, attrs, client=client)
def add_ports_to_network(task, network_uuid, security_groups=None):
"""Create neutron ports to boot the ramdisk.
Create neutron ports for each pxe_enabled port on task.node to boot
the ramdisk.
If the config option 'neutron.add_all_ports' is set, neutron ports
for non-pxe-enabled ports are also created -- these neutron ports
will not have any assigned IP addresses.
:param task: a TaskManager instance.
:param network_uuid: UUID of a neutron network where ports will be
created.
:param security_groups: List of Security Groups UUIDs to be used for
network.
:raises: NetworkError
:returns: a dictionary in the form {port.uuid: neutron_port['id']}
"""
client = get_client(context=task.context)
node = task.node
add_all_ports = CONF.neutron.add_all_ports
# If Security Groups are specified, verify that they exist
_verify_security_groups(security_groups, client)
LOG.debug('For node %(node)s, creating neutron ports on network '
'%(network_uuid)s using %(net_iface)s network interface.',
{'net_iface': task.driver.network.__class__.__name__,
'node': node.uuid, 'network_uuid': network_uuid})
attrs = {'network_id': network_uuid,
'admin_state_up': True,
'binding:vnic_type': VNIC_BAREMETAL,
}
# separate out fields that can only be updated by admins
update_attrs = {'binding:host_id': node.uuid,
'device_owner': 'baremetal:none',
}
if security_groups:
attrs['security_groups'] = security_groups
# Since instance_uuid will not be available during cleaning
# operations, we need to check that and populate them only when
# available
attrs['device_id'] = node.instance_uuid or node.uuid
ports = {}
failures = []
portmap = get_node_portmap(task)
if not add_all_ports:
ports_to_create = [p for p in task.ports if p.pxe_enabled]
else:
ports_to_create = task.ports
if not ports_to_create:
pxe_enabled = 'PXE-enabled ' if not add_all_ports else ''
raise exception.NetworkError(_(
"No available %(enabled)sports on node %(node)s.") %
{'enabled': pxe_enabled, 'node': node.uuid})
for ironic_port in ports_to_create:
# Start with a clean state for each port
port_attrs = copy.deepcopy(attrs)
update_port_attrs = copy.deepcopy(update_attrs)
# Skip ports that are missing required information for deploy.
if not validate_port_info(node, ironic_port):
failures.append(ironic_port.uuid)
continue
update_port_attrs['mac_address'] = ironic_port.address
binding_profile = {'local_link_information':
[portmap[ironic_port.uuid]]}
update_port_attrs['binding:profile'] = binding_profile
if not ironic_port.pxe_enabled:
LOG.debug("Adding port %(port)s to network %(net)s for "
"provisioning without an IP allocation.",
{'port': ironic_port.uuid, 'net': network_uuid})
port_attrs['fixed_ips'] = []
is_smart_nic = is_smartnic_port(ironic_port)
if is_smart_nic:
link_info = binding_profile['local_link_information'][0]
LOG.debug('Setting hostname as host_id in case of Smart NIC, '
'port %(port_id)s, hostname %(hostname)s',
{'port_id': ironic_port.uuid,
'hostname': link_info['hostname']})
update_port_attrs['binding:host_id'] = link_info['hostname']
# TODO(hamdyk): use portbindings.VNIC_SMARTNIC from neutron-lib
port_attrs['binding:vnic_type'] = VNIC_SMARTNIC
client_id = ironic_port.extra.get('client-id')
if client_id:
extra_dhcp_opts = port_attrs.get('extra_dhcp_opts', [])
extra_dhcp_opts.append(
{'opt_name': DHCP_CLIENT_ID, 'opt_value': client_id})
port_attrs['extra_dhcp_opts'] = extra_dhcp_opts
try:
if is_smart_nic:
wait_for_host_agent(
client, update_port_attrs['binding:host_id'])
port = client.create_port(**port_attrs)
update_neutron_port(task.context, port.id, update_port_attrs)
if CONF.neutron.dhcpv6_stateful_address_count > 1:
_add_ip_addresses_for_ipv6_stateful(task.context, port, client)
if is_smart_nic:
wait_for_port_status(client, port.id, 'ACTIVE')
except openstack_exc.OpenStackCloudException as e:
failures.append(ironic_port.uuid)
LOG.warning("Could not create neutron port for node's "
"%(node)s port %(ir-port)s on the neutron "
"network %(net)s. %(exc)s",
{'net': network_uuid, 'node': node.uuid,
'ir-port': ironic_port.uuid, 'exc': e})
else:
ports[ironic_port.uuid] = port.id
if failures:
if len(failures) == len(ports_to_create):
rollback_ports(task, network_uuid)
raise exception.NetworkError(_(
"Failed to create neutron ports for node's %(node)s ports "
"%(ports)s.") % {'node': node.uuid, 'ports': ports_to_create})
else:
LOG.warning("Some errors were encountered when updating "
"vif_port_id for node %(node)s on "
"the following ports: %(ports)s.",
{'node': node.uuid, 'ports': failures})
else:
LOG.info('For node %(node_uuid)s in network %(net)s, successfully '
'created ports (ironic ID: neutron ID): %(ports)s.',
{'node_uuid': node.uuid, 'net': network_uuid, 'ports': ports})
return ports
def remove_ports_from_network(task, network_uuid):
"""Deletes the neutron ports created for booting the ramdisk.
:param task: a TaskManager instance.
:param network_uuid: UUID of a neutron network ports will be deleted from.
:raises: NetworkError
"""
add_all_ports = CONF.neutron.add_all_ports
if not add_all_ports:
macs = [p.address for p in task.ports if p.pxe_enabled]
else:
macs = [p.address for p in task.ports]
if macs:
params = {
'network_id': network_uuid,
'mac_address': macs,
}
LOG.debug("Removing ports on network %(net)s on node %(node)s.",
{'net': network_uuid, 'node': task.node.uuid})
remove_neutron_ports(task, params)
def remove_neutron_ports(task, params):
"""Deletes the neutron ports matched by params.
:param task: a TaskManager instance.
:param params: Dict of params to filter ports.
:raises: NetworkError
"""
client = get_client(context=task.context)
node_uuid = task.node.uuid
try:
ports = client.ports(**params)
except openstack_exc.OpenStackCloudException as e:
msg = (_('Could not get given network VIF for %(node)s '
'from neutron, possible network issue. %(exc)s') %
{'node': node_uuid, 'exc': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
if not ports:
LOG.debug('No ports to remove for node %s', node_uuid)
return
for port in ports:
LOG.debug('Deleting neutron port %(vif_port_id)s of node '
'%(node_id)s.',
{'vif_port_id': port['id'], 'node_id': node_uuid})
if is_smartnic_port(port):
wait_for_host_agent(client, port['binding:host_id'])
try:
client.delete_port(port)
# NOTE(mgoddard): Ignore if the port was deleted by nova.
except openstack_exc.ResourceNotFound:
LOG.info('Port %s was not found while deleting.', port.id)
except openstack_exc.OpenStackCloudException as e:
msg = (_('Could not remove VIF %(vif)s of node %(node)s, possibly '
'a network issue: %(exc)s') %
{'vif': port.id, 'node': node_uuid, 'exc': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
LOG.info('Successfully removed node %(node_uuid)s neutron ports.',
{'node_uuid': node_uuid})
def _uncidr(cidr, ipv6=False):
"""Convert CIDR network representation into network/netmask form
:param cidr: network in CIDR form
:param ipv6: if `True`, consider `cidr` being IPv6
:returns: a tuple of network/host number in dotted
decimal notation, netmask in dotted decimal notation
"""
net = ipaddress.ip_interface(cidr).network
return str(net.network_address), str(net.netmask)
def get_neutron_port_data(port_id, vif_id, client=None, context=None):
"""Gather Neutron port and network configuration
Query Neutron for port and network configuration, return whatever
is available.
:param port_id: ironic port/portgroup ID.
:param vif_id: Neutron port ID.
:param client: Optional a Neutron client object.
:param context: request context
:type context: ironic.common.context.RequestContext
:raises: NetworkError
:returns: a dict holding network configuration information
associated with this ironic or Neutron port.
"""
if not client:
client = get_client(context=context)
try:
port_config = client.get_port(vif_id)
except openstack_exc.OpenStackCloudException as e:
msg = (_('Unable to get port info for %(port_id)s. Error: '
'%(err)s') % {'port_id': vif_id, 'err': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
LOG.debug('Received port %(port)s data: %(info)s',
{'port': vif_id, 'info': port_config})
port_id = port_config['name'] or port_id
network_id = port_config.network_id
try:
network_config = client.get_network(network_id)
except openstack_exc.OpenStackCloudException as e:
msg = (_('Unable to get network info for %(network_id)s. Error: '
'%(err)s') % {'network_id': network_id, 'err': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
LOG.debug('Received network %(network)s data: %(info)s',
{'network': network_id, 'info': network_config})
subnets_config = {}
network_data = {
'links': [
{
'id': port_id,
'type': 'vif',
'ethernet_mac_address': port_config['mac_address'],
'vif_id': port_config['id'],
'mtu': network_config['mtu']
}
],
'networks': [
]
}
for fixed_ip in port_config.get('fixed_ips', []):
subnet_id = fixed_ip['subnet_id']
try:
subnet_config = client.get_subnet(subnet_id)
LOG.debug('Received subnet %(subnet)s data: %(info)s',
{'subnet': subnet_id, 'info': subnet_config})
subnets_config[subnet_id] = subnet_config
except openstack_exc.OpenStackCloudException as e:
msg = (_('Unable to get subnet info for %(subnet_id)s. Error: '
'%(err)s') % {'subnet_id': subnet_id, 'err': e})
LOG.exception(msg)
raise exception.NetworkError(msg)
subnet_config = subnets_config[subnet_id]
subnet_network, netmask = _uncidr(
subnet_config.cidr, subnet_config.ip_version == 6)
network = {
'id': fixed_ip['subnet_id'],
'network_id': port_config['network_id'],
'type': 'ipv%s' % subnet_config['ip_version'],
'link': port_id,
'ip_address': fixed_ip['ip_address'],
'netmask': netmask,
'routes': [
]
}
# TODO(etingof): Adding default route if gateway is present.
# This is a hack, Neutron should have given us a route.
if subnet_config['gateway_ip']:
zero_addr = ('::0' if subnet_config['ip_version'] == 6
else '0.0.0.0')
route = {
'network': zero_addr,
'netmask': zero_addr,
'gateway': subnet_config['gateway_ip']
}
network['routes'].append(route)
for host_config in subnet_config['host_routes']:
subnet_network, netmask = _uncidr(
host_config['destination'],
subnet_config['ip_version'] == 6)
route = {
'network': subnet_network,
'netmask': netmask,
'gateway': host_config['nexthop']
}
network['routes'].append(route)
network_data['networks'].append(network)
return network_data
def get_node_portmap(task):
"""Extract the switch port information for the node.
The information is returned in the form of::
{
port.uuid: {
'switch_id': 'abc',
'port_id': 'Po0/1',
'other_llc_key': 'val'
}
}
:param task: a task containing the Node object.
:returns: port information as a dict
"""
portmap = {}
for port in task.ports:
portmap[port.uuid] = port.local_link_connection
return portmap
# TODO(jroll) raise InvalidParameterValue if a port doesn't have the
# necessary info? (probably)
def get_local_group_information(task, portgroup):
"""Extract the portgroup information.
The information is returned in the form of::
{
'id': portgroup.uuid,
'name': portgroup.name,
'bond_mode': portgroup.mode,
'bond_properties': {
'bond_propertyA': 'valueA',
'bond_propertyB': 'valueB',
}
}
:param task: a task containing the Node object.
:param portgroup: Ironic portgroup object to extract data for.
:returns: port group information as a dict
"""
portgroup_properties = {}
for prop, value in portgroup.properties.items():
# These properties are the bonding driver options described
# at https://www.kernel.org/doc/Documentation/networking/bonding.txt .
# cloud-init checks the same way, parameter name has to start with
# 'bond'. Keep this structure when passing properties to neutron ML2
# drivers.
key = prop if prop.startswith('bond') else 'bond_%s' % prop
portgroup_properties[key] = value
return {
'id': portgroup.uuid,
'name': portgroup.name,
'bond_mode': portgroup.mode,
'bond_properties': portgroup_properties
}
def rollback_ports(task, network_uuid):
"""Attempts to delete any ports created by cleaning/provisioning
Purposefully will not raise any exceptions so error handling can
continue.
:param task: a TaskManager instance.
:param network_uuid: UUID of a neutron network.
"""
try:
remove_ports_from_network(task, network_uuid)
except exception.NetworkError:
# Only log the error
LOG.exception('Failed to rollback port changes for '
'node %(node)s on network %(network)s',
{'node': task.node.uuid, 'network': network_uuid})
def validate_network(uuid_or_name, net_type=_('network'), context=None):
"""Check that the given network is present.
:param uuid_or_name: network UUID or name
:param net_type: human-readable network type for error messages
:param context: request context
:type context: ironic.common.context.RequestContext
:return: network UUID
:raises: MissingParameterValue if uuid_or_name is empty
:raises: NetworkError on failure to contact Neutron
:raises: InvalidParameterValue for missing or duplicated network
"""
if not uuid_or_name:
raise exception.MissingParameterValue(
_('UUID or name of %s is not set in configuration') % net_type)
client = get_client(context=context)
network = _get_network_by_uuid_or_name(client, uuid_or_name,
net_type=net_type)
return network.id
def validate_port_info(node, port):
"""Check that port contains enough information for deploy.
Neutron network interface requires that local_link_information field is
filled before we can use this port.
:param node: Ironic node object.
:param port: Ironic port object.
:returns: True if port info is valid, False otherwise.
"""
# Note(moshele): client-id in the port extra field indicates an InfiniBand
# port. In this case we don't require local_link_connection to be
# populated because the network topology is discoverable by the Infiniband
# Subnet Manager.
if port.extra.get('client-id'):
return True
if (node.network_interface == 'neutron'
and not port.local_link_connection):
LOG.warning("The local_link_connection is required for "
"'neutron' network interface and is not present "
"in the nodes %(node)s port %(port)s",
{'node': node.uuid, 'port': port.uuid})
return False
try:
api_utils.LOCAL_LINK_SMART_NIC_VALIDATOR(
'local_link_connection', port.local_link_connection)
except exception.Invalid:
valid_smart_nic = False
else:
valid_smart_nic = True
if port.is_smartnic and not valid_smart_nic:
LOG.error("Smart NIC port must have port_id and hostname in "
"local_link_connection, port: %s", port['id'])
return False
if not port.is_smartnic and valid_smart_nic:
LOG.error("Only Smart NIC ports can have port_id and hostname "
"in local_link_connection, port: %s", port['id'])
return False
return True
def _validate_agent(client, **kwargs):
"""Check that the given neutron agent is alive
:param client: Neutron client
:param kwargs: Additional parameters to pass to the neutron client
list_agents method.
:returns: A boolean to describe the agent status, if more than one agent
returns by the client then return True if at least one of them is
alive.
:raises: NetworkError in case of failure contacting Neutron.
"""
try:
agents = client.agents(**kwargs)
for agent in agents:
if agent.is_alive:
return True
return False
except openstack_exc.OpenStackCloudException:
raise exception.NetworkError('Failed to contact Neutron server')
def is_smartnic_port(port_data):
"""Check that the port is Smart NIC port
:param port_data: an instance of ironic.objects.port.Port
or port data as dict.
:returns: A boolean to indicate port as Smart NIC port.
"""
if isinstance(port_data, objects.Port):
return port_data.supports_is_smartnic() and port_data.is_smartnic
if isinstance(port_data, dict):
return port_data.get('is_smartnic', False)
LOG.warning('Unknown port data type: %(type)s', {'type': type(port_data)})
return False
def _get_network_by_uuid_or_name(client, uuid_or_name, net_type=_('network')):
"""Return a neutron network by UUID or name.
:param client: A Neutron client object.
:param uuid_or_name: network UUID or name
:param net_type: human-readable network type for error messages
:returns: A dict describing the neutron network.
:raises: NetworkError on failure to contact Neutron
:raises: InvalidParameterValue for missing or duplicated network
"""
try:
network = client.find_network(uuid_or_name, ignore_missing=False)
except openstack_exc.DuplicateResource:
network_ids = [net.id for net in client.networks(name=uuid_or_name)]
raise exception.InvalidParameterValue(
_('More than one %(type)s was found for name %(name)s: %(nets)s') %
{'name': uuid_or_name, 'nets': ', '.join(network_ids),
'type': net_type})
except openstack_exc.ResourceNotFound:
raise exception.InvalidParameterValue(
_('%(type)s with name or UUID %(uuid_or_name)s was not found') %
{'type': net_type, 'uuid_or_name': uuid_or_name})
except openstack_exc.OpenStackCloudException as exc:
raise exception.NetworkError(_('Could not retrieve network: %s') % exc)
LOG.debug('Got network matching %(uuid_or_name)s: %(result)s',
{'uuid_or_name': uuid_or_name, 'result': network})
return network
def _get_port_by_uuid(client, port_uuid):
"""Return a neutron port by UUID.
:param client: A Neutron client object.
:param port_uuid: UUID of a Neutron port to query.
:returns: A dict describing the neutron port.
:raises: InvalidParameterValue if the port does not exist.
:raises: NetworkError on failure to contact Neutron.
"""
try:
port = client.get_port(port_uuid)
except openstack_exc.ResourceNotFound:
raise exception.InvalidParameterValue(
_('Neutron port %(port_uuid)s was not found') %
{'port_uuid': port_uuid})
except openstack_exc.OpenStackCloudException as exc:
raise exception.NetworkError(_('Could not retrieve neutron port: %s') %
exc)
return port
def get_physnets_by_port_uuid(client, port_uuid):
"""Return the set of physical networks associated with a neutron port.
Query the network to which the port is attached and return the set of
physical networks associated with the segments in that network.
:param client: A Neutron client object.
:param port_uuid: UUID of a Neutron port to query.
:returns: A set of physical networks.
:raises: NetworkError if the network query fails.
:raises: InvalidParameterValue for missing network.
"""
port = _get_port_by_uuid(client, port_uuid)
network_uuid = port.network_id
network = _get_network_by_uuid_or_name(client, network_uuid)
if network.segments is not None:
# A network with multiple segments will have a 'segments' parameter
# which will contain a list of segments. Each segment should have a
# 'provider:physical_network' parameter which contains the physical
# network of the segment.
return set(segment[PHYSNET_PARAM_NAME]
for segment in network.segments
if segment[PHYSNET_PARAM_NAME])
else:
# A network with a single segment will have a
# 'provider:physical_network' parameter which contains the network's
# physical network.
return (set([network.provider_physical_network])
if network.provider_physical_network else set())
@retrying.retry(
stop_max_attempt_number=CONF.agent.neutron_agent_max_attempts,
retry_on_exception=lambda e: isinstance(e, exception.NetworkError),
wait_fixed=CONF.agent.neutron_agent_status_retry_interval * 1000
)
def wait_for_host_agent(client, host_id, target_state='up'):
"""Wait for neutron agent to become target state
:param client: A Neutron client object.
:param host_id: Agent host_id
:param target_state: up: wait for up status,
down: wait for down status
:returns: boolean indicates the agent state matches
param value target_state_up.
:raises: exception.Invalid if 'target_state' is not valid.
:raises: exception.NetworkError if host status didn't match the required
status after max retry attempts.
"""
if target_state not in ['up', 'down']:
raise exception.Invalid(
'Invalid requested agent state to validate, accepted values: '
'up, down. Requested state: %(target_state)s' % {
'target_state': target_state})
LOG.debug('Validating host %(host_id)s agent is %(status)s',
{'host_id': host_id,
'status': target_state})
is_alive = _validate_agent(client, host=host_id)
LOG.debug('Agent on host %(host_id)s is %(status)s',
{'host_id': host_id,
'status': 'up' if is_alive else 'down'})
if ((target_state == 'up' and is_alive)
or (target_state == 'down' and not is_alive)):
return True
raise exception.NetworkError(
'Agent on host %(host)s failed to reach state %(state)s' % {
'host': host_id, 'state': target_state})
@retrying.retry(
stop_max_attempt_number=CONF.agent.neutron_agent_max_attempts,
retry_on_exception=lambda e: isinstance(e, exception.NetworkError),
wait_fixed=CONF.agent.neutron_agent_status_retry_interval * 1000
)
def wait_for_port_status(client, port_id, status):
"""Wait for port status to be the desired status
:param client: A Neutron client object.
:param port_id: Neutron port_id
:param status: Port's target status, can be ACTIVE, DOWN ... etc.
:returns: boolean indicates that the port status matches the
required value passed by param status.
:raises: InvalidParameterValue if the port does not exist.
:raises: exception.NetworkError if port status didn't match
the required status after max retry attempts.
"""
LOG.debug('Validating Port %(port_id)s status is %(status)s',
{'port_id': port_id, 'status': status})
port = _get_port_by_uuid(client, port_id)
LOG.debug('Port %(port_id)s status is: %(status)s',
{'port_id': port_id, 'status': port.status})
if port.status == status:
return True
raise exception.NetworkError(
'Port %(port_id)s failed to reach status %(status)s' % {
'port_id': port_id, 'status': status})
class NeutronNetworkInterfaceMixin(object):
def get_cleaning_network_uuid(self, task):
cleaning_network = (
task.node.driver_info.get('cleaning_network')
or CONF.neutron.cleaning_network
)
return validate_network(
cleaning_network, _('cleaning network'),
context=task.context)
def get_provisioning_network_uuid(self, task):
provisioning_network = (
task.node.driver_info.get('provisioning_network')
or CONF.neutron.provisioning_network
)
return validate_network(
provisioning_network, _('provisioning network'),
context=task.context)
# TODO(stendulker): FlatNetwork should not use this method.
# FlatNetwork uses tenant network for rescue operation.
def get_rescuing_network_uuid(self, task):
rescuing_network = (
task.node.driver_info.get('rescuing_network')
or CONF.neutron.rescuing_network
)
return validate_network(
rescuing_network, _('rescuing network'),
context=task.context)
def get_inspection_network_uuid(self, task):
inspection_network = (
task.node.driver_info.get('inspection_network')
or CONF.neutron.inspection_network
)
return validate_network(
inspection_network, _('inspection network'),
context=task.context)
def validate_inspection(self, task):
"""Validate that the node has required properties for inspection.
:param task: A TaskManager instance with the node being checked
:raises: MissingParameterValue if node is missing one or more required
parameters
:raises: UnsupportedDriverExtension
"""
try:
self.get_inspection_network_uuid(task)
except exception.MissingParameterValue:
# Fall back to non-managed in-band inspection
raise exception.UnsupportedDriverExtension(
driver=task.node.driver, extension='inspection')
| 37.351515
| 79
| 0.64533
|
e7bf018d2db44f390f3f58d24c8a6b93acb1f0af
| 2,331
|
py
|
Python
|
src/callosum/auth.py
|
bohblue2/callosum
|
adb8f6aa2d44cd3c4448f6899027a2964eca380a
|
[
"MIT"
] | 19
|
2018-08-17T15:58:43.000Z
|
2022-03-31T07:12:43.000Z
|
src/callosum/auth.py
|
bohblue2/callosum
|
adb8f6aa2d44cd3c4448f6899027a2964eca380a
|
[
"MIT"
] | 9
|
2018-11-15T15:44:11.000Z
|
2019-12-06T15:32:57.000Z
|
src/callosum/auth.py
|
bohblue2/callosum
|
adb8f6aa2d44cd3c4448f6899027a2964eca380a
|
[
"MIT"
] | 2
|
2018-05-16T06:02:39.000Z
|
2020-07-24T06:30:58.000Z
|
from __future__ import annotations
import abc
from typing import Optional
import attr
import zmq
@attr.dataclass(frozen=True, slots=True)
class AuthResult:
success: bool
user_id: Optional[str] = None
@attr.dataclass(frozen=True, slots=True)
class Identity:
domain: str
private_key: bytes
def create_keypair(self):
'''
Generate a new CURVE-25519 public-private keypair.
'''
# NOTE: currently we rely on zmq for convenience, but we may use libnacl directly
# if we want to isolate this module from zmq dependency.
public_key, private_key = zmq.curve_keypair()
return public_key, private_key
class AbstractAuthenticator(metaclass=abc.ABCMeta):
'''
Users of Callosum should subclass this to implement custom authentication.
Though `lower.zeromq` uses the keypair to encrypt the traffic as well as
authenticate the peer sockets, but this is not a mandatory requirement for
transport implementations. A transport may simply use its own network-level
encryption and/or authentication scheme while leaving this authenticator as an
application-level identity management scheme.
'''
# === Binder APIs ===
@abc.abstractmethod
async def server_identity(self) -> Identity:
'''
Return the identity of the server.
Only used by the binder.
'''
raise NotImplementedError
@abc.abstractmethod
async def check_client(self, client_id: Identity) -> AuthResult:
'''
Check if the given domain and client public key is a valid one or not.
Only used by the binder.
'''
raise NotImplementedError
# === Connector APIs ===
@abc.abstractmethod
async def server_public_key(self) -> bytes:
'''
Return the public key of the server.
Only used by the connector.
'''
raise NotImplementedError
@abc.abstractmethod
async def client_identity(self) -> Identity:
'''
Return the identity of the client.
Only used by the connector.
'''
raise NotImplementedError
@abc.abstractmethod
async def client_public_key(self) -> bytes:
'''
Return the public key of the client.
Only used by the connector.
'''
raise NotImplementedError
| 27.104651
| 85
| 0.668812
|
a770dc31980cbaa5d5c4bc3044b1a567442c387b
| 14,710
|
py
|
Python
|
python/ray/util/placement_group.py
|
decypherdatalabs/ray-1
|
214ad08cb9f7299b9b9cf2c8a4cb51d9e25f9f90
|
[
"Apache-2.0"
] | null | null | null |
python/ray/util/placement_group.py
|
decypherdatalabs/ray-1
|
214ad08cb9f7299b9b9cf2c8a4cb51d9e25f9f90
|
[
"Apache-2.0"
] | null | null | null |
python/ray/util/placement_group.py
|
decypherdatalabs/ray-1
|
214ad08cb9f7299b9b9cf2c8a4cb51d9e25f9f90
|
[
"Apache-2.0"
] | 1
|
2022-03-27T09:01:59.000Z
|
2022-03-27T09:01:59.000Z
|
from typing import Dict, Union, List, Optional
import ray
from ray._raylet import ObjectRef
from ray._raylet import PlacementGroupID
from ray._private.utils import hex_to_binary
from ray.util.annotations import PublicAPI, DeveloperAPI
from ray.ray_constants import to_memory_units
from ray._private.client_mode_hook import client_mode_should_convert
from ray._private.client_mode_hook import client_mode_wrap
bundle_reservation_check = None
BUNDLE_RESOURCE_LABEL = "bundle"
# We need to import this method to use for ready API.
# But ray.remote is only available in runtime, and
# if we define this method inside ready method, this function is
# exported whenever ready is called, which can impact performance,
# https://github.com/ray-project/ray/issues/6240.
def _export_bundle_reservation_check_method_if_needed():
global bundle_reservation_check
if bundle_reservation_check:
return
@ray.remote(num_cpus=0)
def bundle_reservation_check_func(placement_group):
return placement_group
bundle_reservation_check = bundle_reservation_check_func
@PublicAPI
class PlacementGroup:
"""A handle to a placement group."""
@staticmethod
def empty() -> "PlacementGroup":
return PlacementGroup(PlacementGroupID.nil())
def __init__(self, id: PlacementGroupID, bundle_cache: Optional[List[Dict]] = None):
self.id = id
self.bundle_cache = bundle_cache
@property
def is_empty(self):
return self.id.is_nil()
def ready(self) -> ObjectRef:
"""Returns an ObjectRef to check ready status.
This API runs a small dummy task to wait for placement group creation.
It is compatible to ray.get and ray.wait.
Example:
>>> pg = placement_group([{"CPU": 1}])
ray.get(pg.ready())
>>> pg = placement_group([{"CPU": 1}])
ray.wait([pg.ready()], timeout=0)
"""
self._fill_bundle_cache_if_needed()
_export_bundle_reservation_check_method_if_needed()
assert len(self.bundle_cache) != 0, (
"ready() cannot be called on placement group object with a "
"bundle length == 0, current bundle length: "
f"{len(self.bundle_cache)}"
)
return bundle_reservation_check.options(
placement_group=self, resources={BUNDLE_RESOURCE_LABEL: 0.001}
).remote(self)
def wait(self, timeout_seconds: Union[float, int]) -> bool:
"""Wait for the placement group to be ready within the specified time.
Args:
timeout_seconds(float|int): Timeout in seconds.
Return:
True if the placement group is created. False otherwise.
"""
return _call_placement_group_ready(self.id, timeout_seconds)
@property
def bundle_specs(self) -> List[Dict]:
"""List[Dict]: Return bundles belonging to this placement group."""
self._fill_bundle_cache_if_needed()
return self.bundle_cache
@property
def bundle_count(self) -> int:
self._fill_bundle_cache_if_needed()
return len(self.bundle_cache)
def _fill_bundle_cache_if_needed(self) -> None:
if not self.bundle_cache:
self.bundle_cache = _get_bundle_cache(self.id)
@client_mode_wrap
def _call_placement_group_ready(pg_id: PlacementGroupID, timeout_seconds: int) -> bool:
worker = ray.worker.global_worker
worker.check_connected()
return worker.core_worker.wait_placement_group_ready(pg_id, timeout_seconds)
@client_mode_wrap
def _get_bundle_cache(pg_id: PlacementGroupID) -> List[Dict]:
worker = ray.worker.global_worker
worker.check_connected()
return list(ray.state.state.placement_group_table(pg_id)["bundles"].values())
@PublicAPI
@client_mode_wrap
def placement_group(
bundles: List[Dict[str, float]],
strategy: str = "PACK",
name: str = "",
lifetime=None,
) -> PlacementGroup:
"""Asynchronously creates a PlacementGroup.
Args:
bundles(List[Dict]): A list of bundles which
represent the resources requirements.
strategy(str): The strategy to create the placement group.
- "PACK": Packs Bundles into as few nodes as possible.
- "SPREAD": Places Bundles across distinct nodes as even as possible.
- "STRICT_PACK": Packs Bundles into one node. The group is
not allowed to span multiple nodes.
- "STRICT_SPREAD": Packs Bundles across distinct nodes.
name(str): The name of the placement group.
lifetime(str): Either `None`, which defaults to the placement group
will fate share with its creator and will be deleted once its
creator is dead, or "detached", which means the placement group
will live as a global object independent of the creator.
Raises:
ValueError if bundle type is not a list.
ValueError if empty bundle or empty resource bundles are given.
ValueError if the wrong lifetime arguments are given.
Return:
PlacementGroup: Placement group object.
"""
worker = ray.worker.global_worker
worker.check_connected()
if not isinstance(bundles, list):
raise ValueError("The type of bundles must be list, got {}".format(bundles))
# Validate bundles
for bundle in bundles:
if len(bundle) == 0 or all(
resource_value == 0 for resource_value in bundle.values()
):
raise ValueError(
"Bundles cannot be an empty dictionary or "
f"resources with only 0 values. Bundles: {bundles}"
)
if "memory" in bundle.keys() and bundle["memory"] > 0:
# Make sure the memory resource can be
# transformed to memory unit.
to_memory_units(bundle["memory"], True)
if lifetime is None:
detached = False
elif lifetime == "detached":
detached = True
else:
raise ValueError(
"placement group `lifetime` argument must be either `None` or 'detached'"
)
placement_group_id = worker.core_worker.create_placement_group(
name, bundles, strategy, detached
)
return PlacementGroup(placement_group_id)
@PublicAPI
@client_mode_wrap
def remove_placement_group(placement_group: PlacementGroup) -> None:
"""Asynchronously remove placement group.
Args:
placement_group (PlacementGroup): The placement group to delete.
"""
assert placement_group is not None
worker = ray.worker.global_worker
worker.check_connected()
worker.core_worker.remove_placement_group(placement_group.id)
@PublicAPI
@client_mode_wrap
def get_placement_group(placement_group_name: str) -> PlacementGroup:
"""Get a placement group object with a global name.
Returns:
None if can't find a placement group with the given name.
The placement group object otherwise.
"""
if not placement_group_name:
raise ValueError("Please supply a non-empty value to get_placement_group")
worker = ray.worker.global_worker
worker.check_connected()
placement_group_info = ray.state.state.get_placement_group_by_name(
placement_group_name, worker.namespace
)
if placement_group_info is None:
raise ValueError(f"Failed to look up actor with name: {placement_group_name}")
else:
return PlacementGroup(
PlacementGroupID(hex_to_binary(placement_group_info["placement_group_id"]))
)
@DeveloperAPI
@client_mode_wrap
def placement_group_table(placement_group: PlacementGroup = None) -> dict:
"""Get the state of the placement group from GCS.
Args:
placement_group (PlacementGroup): placement group to see
states.
"""
worker = ray.worker.global_worker
worker.check_connected()
placement_group_id = placement_group.id if (placement_group is not None) else None
return ray.state.state.placement_group_table(placement_group_id)
@PublicAPI
def get_current_placement_group() -> Optional[PlacementGroup]:
"""Get the current placement group which a task or actor is using.
It returns None if there's no current placement group for the worker.
For example, if you call this method in your driver, it returns None
(because drivers never belong to any placement group).
Examples:
>>> @ray.remote
>>> def f():
>>> # This will return the placement group the task f belongs to.
>>> # It means this pg will be identical to the pg created below.
>>> pg = get_current_placement_group()
>>> pg = placement_group([{"CPU": 2}])
>>> f.options(placement_group=pg).remote()
>>> # New script.
>>> ray.init()
>>> # New script doesn't belong to any placement group,
>>> # so it returns None.
>>> assert get_current_placement_group() is None
Return:
PlacementGroup: Placement group object.
None if the current task or actor wasn't
created with any placement group.
"""
if client_mode_should_convert(auto_init=True):
# Client mode is only a driver.
return None
worker = ray.worker.global_worker
worker.check_connected()
pg_id = worker.placement_group_id
if pg_id.is_nil():
return None
return PlacementGroup(pg_id)
def check_placement_group_index(
placement_group: PlacementGroup, bundle_index: int
) -> None:
assert placement_group is not None
if placement_group.id.is_nil():
if bundle_index != -1:
raise ValueError(
"If placement group is not set, "
"the value of bundle index must be -1."
)
elif bundle_index >= placement_group.bundle_count or bundle_index < -1:
raise ValueError(
f"placement group bundle index {bundle_index} "
f"is invalid. Valid placement group indexes: "
f"0-{placement_group.bundle_count}"
)
def _validate_resource_shape(
placement_group, resources, placement_resources, task_or_actor_repr
):
def valid_resource_shape(resources, bundle_specs):
"""
If the resource shape cannot fit into every
bundle spec, return False
"""
for bundle in bundle_specs:
fit_in_bundle = True
for resource, requested_val in resources.items():
# Skip "bundle" resource as it is automatically added
# to all nodes with bundles by the placement group.
if resource == BUNDLE_RESOURCE_LABEL:
continue
if bundle.get(resource, 0) < requested_val:
fit_in_bundle = False
break
if fit_in_bundle:
# If resource request fits in any bundle, it is valid.
return True
return False
bundles = placement_group.bundle_specs
resources_valid = valid_resource_shape(resources, bundles)
placement_resources_valid = valid_resource_shape(placement_resources, bundles)
if not resources_valid:
raise ValueError(
f"Cannot schedule {task_or_actor_repr} with "
"the placement group because the resource request "
f"{resources} cannot fit into any bundles for "
f"the placement group, {bundles}."
)
if not placement_resources_valid:
# Happens for the default actor case.
# placement_resources is not an exposed concept to users,
# so we should write more specialized error messages.
raise ValueError(
f"Cannot schedule {task_or_actor_repr} with "
"the placement group because the actor requires "
f"{placement_resources.get('CPU', 0)} CPU for "
"creation, but it cannot "
f"fit into any bundles for the placement group, "
f"{bundles}. Consider "
"creating a placement group with CPU resources."
)
def configure_placement_group_based_on_context(
placement_group_capture_child_tasks: bool,
bundle_index: int,
resources: Dict,
placement_resources: Dict,
task_or_actor_repr: str,
placement_group: Union[PlacementGroup, str, None] = "default",
) -> PlacementGroup:
"""Configure the placement group based on the given context.
Based on the given context, this API returns the placement group instance
for task/actor scheduling.
Params:
placement_group_capture_child_tasks: Whether or not the
placement group needs to be captured from the global
context.
bundle_index: The bundle index for tasks/actor scheduling.
resources: The scheduling resources.
placement_resources: The scheduling placement resources for
actors.
task_or_actor_repr: The repr of task or actor
function/class descriptor.
placement_group: The placement group instance.
- "default": Default placement group argument. Currently,
the default behavior is to capture the parent task'
placement group if placement_group_capture_child_tasks
is set.
- None: means placement group is explicitly not configured.
- Placement group instance: In this case, do nothing.
Returns:
Placement group instance based on the given context.
Raises:
ValueError: If the bundle index is invalid for the placement group
or the requested resources shape doesn't fit to any
bundles.
"""
# Validate inputs.
assert placement_group_capture_child_tasks is not None
assert resources is not None
# Validate and get the PlacementGroup instance.
# Placement group could be None, default, or placement group.
# Default behavior is "do not capture child tasks".
if placement_group != "default":
if not placement_group:
placement_group = PlacementGroup.empty()
elif placement_group == "default":
if placement_group_capture_child_tasks:
placement_group = get_current_placement_group()
else:
placement_group = PlacementGroup.empty()
if not placement_group:
placement_group = PlacementGroup.empty()
assert isinstance(placement_group, PlacementGroup)
# Validate the index.
check_placement_group_index(placement_group, bundle_index)
# Validate the shape.
if not placement_group.is_empty:
_validate_resource_shape(
placement_group, resources, placement_resources, task_or_actor_repr
)
return placement_group
| 35.107399
| 88
| 0.670292
|
798029399b5176044bae3f40170e9f1db85540e3
| 198
|
py
|
Python
|
solutions_automation/vdc/dashboard/gitea.py
|
threefoldtech/js-sdk
|
811f783ac34a60225175bab2d806802a87b9d5c7
|
[
"Apache-2.0"
] | 13
|
2020-09-02T09:05:08.000Z
|
2022-03-12T02:43:24.000Z
|
solutions_automation/vdc/dashboard/gitea.py
|
threefoldtech/js-sdk
|
811f783ac34a60225175bab2d806802a87b9d5c7
|
[
"Apache-2.0"
] | 1,998
|
2020-06-15T11:46:10.000Z
|
2022-03-24T22:12:41.000Z
|
solutions_automation/vdc/dashboard/gitea.py
|
threefoldtech/js-sdk
|
811f783ac34a60225175bab2d806802a87b9d5c7
|
[
"Apache-2.0"
] | 8
|
2020-09-29T06:50:35.000Z
|
2021-06-14T03:30:52.000Z
|
from solutions_automation.vdc.dashboard.common import CommonChatBot
from jumpscale.packages.vdc_dashboard.chats.gitea import GiteaDeploy
class GiteaAutomated(CommonChatBot, GiteaDeploy):
pass
| 28.285714
| 68
| 0.853535
|
8a43b1b64f7e792ab618f6134e411f90027fae8c
| 40,793
|
py
|
Python
|
qsurface/plot.py
|
watermarkhu/Toric_code
|
9817d9b188d397c1478410aef6f92e5508e45b57
|
[
"BSD-3-Clause"
] | 14
|
2020-12-10T15:10:08.000Z
|
2022-03-10T00:34:39.000Z
|
qsurface/plot.py
|
siddhantphy/qsurface
|
88005c4e7c1c72b765caf695926be001c2e89756
|
[
"BSD-3-Clause"
] | 28
|
2020-06-23T14:39:25.000Z
|
2020-11-10T20:01:45.000Z
|
Lib/site-packages/qsurface/plot.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 9
|
2021-01-12T16:04:26.000Z
|
2022-03-10T00:34:45.000Z
|
from abc import ABC, abstractmethod
from typing import Optional, Tuple, Union
from dataclasses import dataclass
import matplotlib as mpl
from matplotlib import pyplot as plt
from matplotlib.artist import Artist
from matplotlib.lines import Line2D
from matplotlib.widgets import Button
from matplotlib.blocking_input import BlockingInput
from matplotlib.patches import Circle, Rectangle
from collections import defaultdict
import tkinter
import numpy as np
import os
color_type = Union[str, Tuple[float, float, float, float]]
axis_type = Tuple[float, float, float, float]
@dataclass
class PlotParams:
"""Parameters for the plotting template classes.
Contains all parameters used in inherited objects of `.Template2D` and `.Template3D`. The dataclass is initialized with many default values for an optimal plotting experience. But if any parameters should be changed, the user can call the class to create its own instance of plotting paramters, where the altered paramters are supplied as keyword arguments. The instance can be supplied to the plotting class via the ``plot_params`` keyword argument.
Examples
--------
See the below example where the background color of the figure is changed to black. Note that we have to inherit from the `.Template2D` class.
>>> class Plotting(Template2D):
... pass
>>> custom_params = PlotParams(color_background = (0,0,0,1))
>>> plot_with_custom_params = Plotting(plot_params=custom_params)
"""
blocking_wait: float = -1
blocking_pick_radius: float = 10
scale_figure_length: float = 10
scale_figure_height: float = 10
scale_font_primary: float = 12
scale_font_secondary: float = 10
scale_3d_layer: float = 8
color_background: color_type = (1, 1, 1, 0)
color_edge: color_type = (0.8, 0.8, 0.8, 1)
color_qubit_edge: color_type = (0.7, 0.7, 0.7, 1)
color_qubit_face: color_type = (0.95, 0.95, 0.95, 1)
color_x_primary: color_type = (0.9, 0.3, 0.3, 1)
color_z_primary: color_type = (0.5, 0.5, 0.9, 1)
color_y_primary: color_type = (0.9, 0.9, 0.5, 1)
color_x_secondary: color_type = (0.9, 0.7, 0.3, 1)
color_z_secondary: color_type = (0.3, 0.9, 0.3, 1)
color_y_secondary: color_type = (0.9, 0.9, 0.5, 1)
color_x_tertiary: color_type = (0.5, 0.1, 0.1, 1)
color_z_tertiary: color_type = (0.1, 0.1, 0.5, 1)
color_y_tertiary: color_type = (0.9, 0.9, 0.5, 1)
alpha_primary: float = 0.35
alpha_secondary: float = 0.5
line_width_primary: float = 1.5
line_width_secondary: float = 3
line_style_primary: str = "solid"
line_style_secondary: str = "dashed"
line_style_tertiary: str = "dotted"
patch_circle_2d: float = 0.1
patch_rectangle_2d: float = 0.1
patch_circle_3d: float = 30
patch_rectangle_3d: float = 30
legend_line_width = 1
legend_marker_size = 10
axis_main: axis_type = (0.075, 0.1, 0.7, 0.85)
axis_main_non_interact: axis_type = (0.0, 0.05, 0.8, 0.9)
axis_block: axis_type = (0.96, 0.01, 0.03, 0.03)
axis_nextbutton: axis_type = (0.85, 0.05, 0.125, 0.05)
axis_prevbutton: axis_type = (0.85, 0.12, 0.125, 0.05)
axis_legend: axis_type = (0.85, 0.5, 0.125, 0.3)
axis_text: axis_type = (0.05, 0.025, 0.7, 0.05)
axis_radio: axis_type = (0.85, 0.19, 0.125, 0.125)
font_default_size: float = 12
font_title_size: float = 16
font_button_size: float = 12
axis3d_pane_color: color_type = (1, 1, 1, 0)
axis3d_line_color: color_type = (0, 0, 0, 0.1)
axis3d_grid_line_style: str = "dotted"
axis3d_grid_line_alpha: float = 0.2
def load_params(self, param_dict):
"""Loads extra plotting parameters.
Additional parameters can be loaded to the dataclass via this method. The additional parameters must be a dictionary where values are stored to the dataclass with the key as attribute name. If the value is a string that equals to any already defined dataclass attribute, the value at the existing attribute is used for the new parameter. See examples.
Parameters
----------
params_dict
Dictionary or dictionary of dictionaries of additional parameters.
Examples
--------
New parameters can be added to the dataclass. Values of dataclass attributes are used if present.
>>> params = PlotParams()
>>> params.alpha_primary
0.35
>>> params.load_params({
... "new_attr" : "some_value",
... "use_existing" : "alpha_primary",
... })
>>> params.new_attr
some_value
>>> params.use_existing
0.35
Nested dictionaries will also load existing attribute values.
>>> params.load_params({
... "category": {
... "new_attr" : "some_value",
... "use_existing" : "alpha_primary",
... }
... })
>>> params.category
{"new_attr" : "some_value", "use_existing" : 0.35}
"""
for attribute, value in param_dict.items():
if hasattr(self, attribute):
print(f"Warning, attribute {attribute} already defined.")
if isinstance(value, dict):
for sub_attribute, sub_value in value.items():
if isinstance(sub_value, str):
value[sub_attribute] = getattr(self, sub_value, sub_value)
else:
value[sub_attribute] = sub_value
setattr(self, attribute, value)
else:
setattr(self, attribute, getattr(self, value, value))
class BlockingKeyInput(BlockingInput):
"""Blocking class to receive key presses.
See Also
--------
`matplotlib.blocking_input.BlockingInput` : Inherited blocking class.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, eventslist=("button_press_event", "key_press_event"), **kwargs)
def __call__(self, timeout=30):
"""Blocking call to retrieve a single key press."""
return super().__call__(n=1, timeout=timeout)
class Template2D(ABC):
"""Template 2D plot object with history navigation.
This template plot object which can either be an interactive figure using the Tkinter backend, or shows each plotting iteration as a separate figure for the IPython ``inline`` backend. The interactive figure has the following features.
- Fast plotting by use of "blitting".
- Redrawing past iterations of the figure by storing all changes in history.
- Keyboard navigation for iteration selection.
- Plot object information by picking.
To instance this class, one must inherit the current class. The existing objects can then be altered by updating their plot properties by :meth:`new_properties`, where the changed properties must be a dictionary with keywords and values corresponding tho the respective matplotlib object. Every change in plot property is stored in ``self.history_dict``. This allows to undo or redo changes by simply applying the saved changed properties in the dictionary. Fast plotting is enabled by not drawing the figure after every queued change. Instead, each object is draw in the canvas individually after a property change and a series of changes is drawn to the figure when a new plot iteration is requested via :meth:`new_iter`. This is performed by *blitting* the canvas.
Keyboard navigation and picking is enabled by blocking the code via a custom `.BlockingKeyInput` class. While the code is blocked, inputs are caught by the blocking class and processed for history navigation or picking navigation. Moving the iteration past the available history allows for the code to continue. The keyboard input is parsed by :meth:`focus`.
Default values for plot properties such as colors and linewidths loaded from `.PlotParams`. A custom parameter dataclass can be supplied via the ``plot_params`` keyword argument.
Parameters
----------
plot_params
Plotting parameters dataclass containing colors, styles and others.
Attributes
----------
figure : `matplotlib.figure.Figure`
Main figure.
interactive : bool
Enables GUI elements and interactive plotting.
main_ax : `matplotlib.axes.Axes`
Main axis of the figure.
history_dict : `.collections.defaultdict`
For each iteration, for every plot object with changed properties, the properties are stored as a nested dictionary. See the example below.
>>> history_dict = {
0: {
"<Line2D object>": {
"color": "k",
},
"<Circle object>": {
"linestyle": "-",
}
}
1: {
"<Line2D object>": {
"color": "r",
},
"<Circle object>": {
"linestyle": ":",
}
}
}
history_iters : int
Total number of iterations in history.
history_iter : int
The current plot iteration.
history_iter_names : list of str
List of length ``history_iters`` containing a title for each iteration.
history_at_newest : bool
Whether the current plot iteration is the latest or newest.
history_event_iter : str
String catching the keyboard input for the wanted plot iteration.
future_dict : `.collections.defaultdict`
Same as ``history_dict`` but for changes for future iterations.
temporary_changes : `.collections.defaultdict`
Temporary changes for plot properties, requested by :meth:`temporary_properties`, which are immediately drawn to the figure. These properties can be overwritten or undone before a new iteration is requested via :meth:`new_iter`. When a new iteration is requested, we need to find the difference in properties of the queued changes with the current iteration and save all differences to ``self.history_dict``.
temporary_saved : `.collections.defaultdict`
Temporary changes are saved to the current iteration ``iter``. Thus when a new iteration ``iter + 1`` is requested, we need to recalculate the differences of the properties in ``iter-1`` and the current iteration with the temporary changes. The previous property values when temporary changes are requested by :meth:`temporary_properties` are saved to ``self.temporary_saved`` and used as the property changes for ``iter-1``.
interact_axes : dict of `matplotlib.axes.Axes`
All iteractive elements should have their own axis saved in ``self.interact_axes``. The ``axis.active`` attribute must be added to define when the axis is shown. If the focus on the figure is lost, all axes in ``self.interact_axes`` are hidden by setting ``axis.active=False``.
interact_bodies : dict
All interactive elements such as buttons, radiobuttons, sliders, should be saved to this dictionary with the same key as their axes in ``self.interact_axes``.
Notes
-----
Note all backends support blitting. It does not work with the OSX backend (but does work with other GUI backends on mac).
Examples
--------
A `matplotlib.lines.Line2D` object is initiated with ``color="k"`` and ``ls="-"``. We request that the color of the object is red in a new plot iteration.
>>> import matplotlib.pyplot as plt
... class Example(Template2D):
... def __init__(self, *args, **kwargs):
... super().__init__(*args, **kwargs)
... self.line = plt.plot(0, 0, color="k", ls="-")[0] # Line located at [0] after plot
>>> fig = Example()
>>> fig.new_properties(fig.line, {"color": "r})
>>> fig.new_iter()
>>> fig.history_dict
{
0: {"<Line2D>": {"color": "k"}},
1: {"<Line2D>": {"color": "r"}},
}
The attribute ``self.history_dict`` thus only contain changes to plot properties. If we request another iteration but change the linestyle to ":", the initial linestyle will be saved to iteration 1.
>>> fig.new_properties(fig.line, {"ls": ":"})
>>> fig.new_iter()
>>> fig.history_dict
{
0: {"<Line2D>": {"color": "k"}},
1: {"<Line2D>": {"color": "r", "ls: "-"}},
2: {"<Line2D>": {ls: ":"}},
}
We temporarily alter the linewidth to 2, and then to 1.5. After we are satisfied with the temporary changes. we request a new iteration with the final change of color to green.
>>> fig.temporary_properties(fig.line, {"lw": 2})
>>> fig.temporary_properties(fig.line, {"lw": 1.5})
>>> fig.temporary_changes
{"<Line2D>": {"lw": 1.5}}
>>> fig.temporary_saved
{"<Line2D>": {"lw": 1}} # default value
>>> fig.new_properties(fig.line, {"color": "g"})
>>> fig.new_iter()
>>> fig.history_dict
{
0: {"<Line2D>": {"color": "k"}},
1: {"<Line2D>": {"color": "r", "ls: "-", "lw": 1}},
2: {"<Line2D>": {"lw": 1.5, color": "r"},
3: {"<Line2D>": {"color": "g"}},
}
Properties in ``self.temporary_saved`` are saved to ``self.history_dict`` in the previous iteration, properties in ``self.temporary_changes`` are saved to the current iteration, and new properties are saved to the new iteration.
The ``history_dict`` for a plot with a Line2D object and a Circle object. In the second iteration, the color of the Line2D object is updated from black to red, and the linestyle of the Circle object is changed from "-" to ":".
"""
def __init__(
self,
plot_params: Optional[PlotParams] = None,
projection: Optional[str] = None,
**kwargs,
):
self.interactive = self.load_interactive_backend()
self.projection = projection
self.params = plot_params if plot_params else PlotParams()
self.figure = None
self.main_ax = None
self.history_dict = defaultdict(dict)
self.history_iters = 0
self.history_iter = 0
self.history_iter_names = []
self.history_event_iter = ""
self.future_dict = defaultdict(dict)
self.temporary_changes = defaultdict(dict)
self.temporary_saved = defaultdict(dict)
self.shown_confirm_close = False
self.figure = plt.figure(figsize=(self.params.scale_figure_length, self.params.scale_figure_height))
self.canvas = self.figure.canvas
# Init buttons and boxes
self.legend_ax = plt.axes(self.params.axis_legend)
self.legend_ax.axis("off")
if self.interactive:
self.main_ax = plt.axes(self.params.axis_main, projection=self.projection)
self.canvas.mpl_connect("pick_event", self._pick_handler)
self.blocking_input = BlockingKeyInput(self.figure)
self.interact_axes = {
"prev_button": plt.axes(self.params.axis_prevbutton),
"next_button": plt.axes(self.params.axis_nextbutton),
}
for body in self.interact_axes.values():
body.active = True
self.interact_bodies = {
"prev_button": Button(self.interact_axes["prev_button"], "Previous"),
"next_button": Button(self.interact_axes["next_button"], "Next"),
}
self.interact_bodies["prev_button"].on_clicked(self._draw_prev)
self.interact_bodies["next_button"].on_clicked(self._draw_next)
self.block_box = plt.axes(self.params.axis_block)
self.block_box.axis("off")
self.block_icon = self.block_box.scatter(0, 0, color="r")
else:
self.main_ax = plt.axes(self.params.axis_main_non_interact, projection=self.projection)
self.text_box = plt.axes(self.params.axis_text)
self.text_box.axis("off")
self.text = self.text_box.text(
0.5,
0.5,
"",
fontsize=self.params.font_default_size,
va="center",
ha="center",
transform=self.text_box.transAxes,
)
if self.interactive:
self.canvas.draw()
def load_interactive_backend(self) -> bool:
"""Configures the plotting backend.
If the Tkinter backend is enabled or can be enabled, the function returns True. For other backends False is returned.
"""
backend = mpl.get_backend().lower()
if backend in ["tkagg", "qt5agg"]:
return True
elif "inline" in backend:
from IPython.display import display
self.display = display
else:
DISPLAY = os.environ.get("DISPLAY", None)
if DISPLAY:
try:
mpl.use("TkAgg")
return True
except ImportError:
pass
try:
mpl.use("Qt5Agg")
return True
except ImportError:
pass
print(f"Matplotlib is using {backend} backend, which is not supported.")
else:
print(f"Display {DISPLAY} not available. Interactive plotting is disabled.")
return False
def close(self):
"""Closes the figure."""
if self.interactive:
self.draw_figure("Press (->/enter) to close figure.")
plt.close(self.figure)
@property
def history_at_newest(self):
return self.history_iter == self.history_iters
"""
-------------------------------------------------------------------------------
Initialization
-------------------------------------------------------------------------------
"""
def _init_axis(
self,
limits: Optional[Tuple[float, float, float, float]] = None,
title: str = "",
invert: bool = True,
aspect: str = "",
ax: Optional[mpl.axes.Axes] = None,
**kwargs,
):
"""(Main) Axis settings function.
Parameters
----------
limits
Axis boundaries
title
Axis title.
invert
Invert axis.
ax
Axis to change.
"""
if ax is None:
ax = self.main_ax
ax.axis(False)
if limits is not None:
ax.set_xlim(limits[0], limits[0] + limits[2])
ax.set_ylim(limits[1], limits[1] + limits[3])
if title:
ax.set_title(title, fontsize=self.params.font_title_size)
for bound in ["top", "right", "bottom", "left"]:
ax.spines[bound].set_visible(False)
if invert:
ax.invert_yaxis()
if aspect:
self.main_ax.set_aspect(aspect)
"""
-------------------------------------------------------------------------------
Event Handlers
-------------------------------------------------------------------------------
"""
def _pick_handler(self, event):
"""Function on when an object in the figure is picked"""
print(event)
def focus(self):
"""Enables the blocking object, catches input for history navigation.
The BlockingKeyInput object is called which blocks the execution of the code. During this block, the user input is received by the blocking object and return to the current method. From here, we can manipulate the plot or move through the plot history and call :meth:`focus` again when all changes in the history have been drawn and blit.
================== ==============================================
key function
================== ==============================================
h show help
i show all iterations
d redraw current iteration
enter or right go to next iteration, enter iteration number
backspace or left go to previous iteration
n go to newest iteration
0-9 input iteration number
================== ==============================================
When the method is active, the focus is on the figure. This will be indicated by a green circle in the bottom right of the figure. When the focus is lost, the code execution is continued and the icon is red. The change is icon color is performed by :meth:`_set_figure_state`, which also hides the interactive elements when the focus is lost.
"""
self.canvas.draw()
wait = True
while wait:
self._set_figure_state("g")
try:
output = self.blocking_input(self.params.blocking_wait)
if output == []:
if self.history_at_newest:
wait = False
else:
wait = self._draw_next()
continue
else:
event = output[-1]
if hasattr(event, "button"): # Catch next button if on most recent
if (
event.button == 1
and event.inaxes == self.interact_axes["next_button"]
and self.history_iter == self.history_iters
):
wait = False
elif event.key in ["return", "right"]:
if self.history_event_iter == "":
if self.history_at_newest:
wait = False
else:
wait = self._draw_next()
else:
target_iter = int(self.history_event_iter)
self.history_event_iter = ""
if target_iter <= self.history_iters:
wait = self._draw_iteration(target_iter)
else:
print("Input iter not in range.")
elif event.key in ["backspace", "left"]:
wait = self._draw_prev()
elif event.key in [str(i) for i in range(10)]:
self.history_event_iter += event.key
print("Go to iteration {} (press return).".format(self.history_event_iter))
elif event.key == "n":
wait = self._draw_iteration(self.history_iters)
elif event.key == "i":
print("Iterations:")
for i, iter_name in enumerate(self.history_iter_names):
print(i, iter_name)
print()
elif event.key == "h":
print(self.focus.__doc__)
elif event.key == "d":
self.draw_figure()
except tkinter.TclError:
print("Figure has been destroyed. Future plots will be ignored.")
wait = False
self._set_figure_state("r", False) # Hide all interactive axes
def _set_figure_state(self, color, override: Optional[bool] = None):
"""Set color of blocking icon and updates interactive axes visibility.
Parameters
----------
color
Color of `self.block_icon`.
override
Overrides the visibility of axes in `self.interact_axes`.
"""
for ax in self.interact_axes.values():
if override is None:
ax.set_visible(ax.active)
else:
ax.set_visible(override)
self.block_icon.set_color(color)
self.block_box.draw_artist(self.block_icon)
self.canvas.blit(self.block_box.bbox)
self.canvas.draw()
"""
-------------------------------------------------------------------------------
Legend functions
-------------------------------------------------------------------------------
"""
# marker="o", ms=10, color="w", mfc=None, mec="k", ls="-"
def _legend_circle(self, label: str, **kwargs) -> Line2D:
"""Returns a Line2D object that is used on the plot legend."""
return Line2D(
[],
[],
lw=self.params.legend_line_width,
mew=self.params.legend_line_width,
label=label,
**kwargs,
)
def _legend_scatter(self, label: str, **kwargs):
line = Line2D(
[],
[],
label=label,
lw=self.params.legend_line_width,
mew=self.params.legend_line_width,
color=self.params.color_edge,
)
scatter = plt.scatter([], [], s=8 ** 2, **kwargs)
return (line, scatter)
def _draw_line(self, X: list, Y: list, *args, z: float = 0, **kwargs):
artist = Line2D(X, Y, *args, **kwargs)
self.main_ax.add_line(artist)
return artist
def _draw_circle(self, xy: tuple, size: float, *args, z: float = 0, **kwargs):
artist = Circle(xy, size, *args, **kwargs)
self.main_ax.add_patch(artist)
return artist
def _draw_rectangle(self, xy: tuple, size_x: float, size_y: float, *args, z: float = 0, **kwargs):
artist = Rectangle(xy, size_x, size_y, *args, **kwargs)
self.main_ax.add_patch(artist)
return artist
"""
-------------------------------------------------------------------------------
History
-------------------------------------------------------------------------------
"""
def draw_figure(
self,
new_iter_name: Optional[str] = None,
output: bool = True,
carriage_return: bool = False,
**kwargs,
):
"""Draws the canvas and blocks code execution.
Draws the queued plot changes onto the canvas and calls for :meth:`focus` which blocks the code execution and catches user input for history navigation.
If a new iteration is called by supplying a `new_iter_name`, we additionally check for future property changes in the `self.future_dict`, and add these changes to the queue. Finally, all queued property changes for the next iteration are applied by `change_properties`.
Parameters
----------
new_iter_name
Name of the new iteration. If no name is supplied, no new iteration is called.
output
Prints information to the console.
carriage_return
Applies carriage return to remove last line printed.
See Also
--------
focus
change_properties
"""
# if self.interactive:
if new_iter_name:
if self.history_at_newest:
for artist, changes in self.future_dict.pop(new_iter_name, {}).items():
self.new_properties(artist, changes)
for artist, changes in self.future_dict.pop(self.history_iter + 1, {}).items():
self.new_properties(artist, changes)
for artist, changes in self.history_dict[self.history_iter + 1].items():
self.change_properties(artist, changes)
self.history_iter_names.append(new_iter_name)
self.history_iters += 1
self.history_iter += 1
else:
print(f"Cannot add iteration {new_iter_name} to history, currently not on most recent iteration.")
if not (new_iter_name and self.history_at_newest):
new_iter_name = self.history_iter_names[self.history_iter - 1]
text = "{}/{}: {}".format(self.history_iter, self.history_iters, new_iter_name)
self.text.set_text(text)
if output:
if carriage_return:
print("\rDrawing", text)
else:
print("Drawing", text)
if self.interactive:
self.canvas.blit(self.main_ax.bbox)
self.focus()
else:
self.display(self.figure)
def _draw_from_history(self, condition: bool, direction: int, draw: bool = True, **kwargs) -> bool:
"""Move a single plot iteration forward or backwards.
Draws all stored object properties of in either +1 or -1 `direction` in the history if the `condition` is met. If there are any properties stored in `self.temporary_changes`, these settings are first parsed and saved to the current and previous iterations.
Parameters
----------
condition
Must be true for navigation.
direction
Moves either a single iteration forward or backwards in time.
draw
Draws the figure and blocks the code immediately with :meth:`draw_figure`.
Returns
-------
bool
True if focus is kept, False if lost.
"""
if condition:
# Save temporary changes
if self.history_at_newest and self.temporary_changes:
for artist, properties in self.temporary_changes.items():
self.new_properties(artist, properties, self.temporary_saved.pop(artist))
self.temporary_changes = {}
self.history_iter += direction
for artist, changes in self.history_dict[self.history_iter].items():
self.change_properties(artist, changes)
if draw:
self.draw_figure(**kwargs)
return False
else:
print("Nothing to plot.")
return True
def _draw_next(self, *args, **kwargs) -> bool:
"""Redraws all changes from next plot iteration onto the plot."""
return self._draw_from_history(self.history_iter < self.history_iters, 1, **kwargs)
def _draw_prev(self, *args, **kwargs) -> bool:
"""Redraws all changes from previous plot iteration onto the plot."""
self.shown_confirm_close = False
return self._draw_from_history(self.history_iter > 1, -1, **kwargs)
def _draw_iteration(self, target: int, draw: bool = True, **kwargs) -> bool:
"""Redraws all changes until the `target` iteration.
Loops over :meth:`_draw_next` or :meth:`_draw_prev` until the `target` plot iteration is reached. Note that this means that all the changes from the current iteration until the target iteration in `self.history_dict` are applied.
Parameters
----------
target
Target plot iteration.
draw
Draws the figure and blocks the code immediately with :meth:`draw_figure`.
Returns
-------
bool
True if focus is kept, False if lost.
"""
if target != self.history_iter:
diff = target - self.history_iter
if diff > 0:
for _ in range(diff):
self._draw_next(draw=False, output=False)
else:
for _ in range(-diff):
self._draw_prev(draw=False, output=False)
if draw:
self.draw_figure(**kwargs)
else:
print("Already on this plot iteration.")
return True
return False
"""
-------------------------------------------------------------------------------
Object properties
-------------------------------------------------------------------------------
"""
def new_artist(self, artist: mpl.artist.Artist, axis: Optional[mpl.axes.Axes] = None) -> None:
"""Adds a new artist to the ``axis``.
Newly added artists must be hidden in the previous iteration. To make sure the history is properly logged, the visibility of the ``artist`` is set to ``False``, and a new property of shown visibility is added to the queue of the next iteration.
Parameters
----------
artist
New plot artist to add to the ``axis``.
axis
Axis to add the figure to.
"""
if axis is None:
axis = self.main_ax
self.change_properties(artist, {"visible": False})
self.new_properties(artist, {"visible": True})
@staticmethod
def change_properties(artist, prop_dict):
"""Changes the plot properties and draw the plot object or artist."""
if prop_dict:
plt.setp(artist, **prop_dict)
def new_properties(self, artist: Artist, properties: dict, saved_properties: dict = {}, **kwargs):
"""Parses a dictionary of property changes of a *matplotlib* artist.
New properties are supplied via ``properties``. If any of the new properties is different from its current value, this is seen as a property change. The old property value is stored in ``self.history_dict[self.history_iteration]``, and the new property value is stored at ``self.history_dict[self.history_iteration+1]``. These new properties are *queued* for the next interation. The queue is emptied by applying all changes when `draw_figure` is called. If the same property changes 2+ times within the same iteration, the previous property change is removed with ``next_prop.pop(key, None)``.
The ``saved_properties`` parameter is used when temporary property changes have been applied by `temporary_changes`, in which the original properties are saved to ``self.temporary_saved`` as the saved properties. Before a new iteration is drawn, the temporary changes, which can be overwritten, are compared with the saved changes and the differences in properties are saved to ``[self.history_dict[self.history_iter-1]]`` and ``self.history_dict[self.history_iteration]``.
Some color values from different *matplotlib* objects are nested, some are list or tuple, and others may be a `.numpy.ndarray`. The nested methods `get_nested()` and `get_nested_property()` make sure that the return type is always a list.
Parameters
----------
artist
Plot object whose properties are changed.
properties
Plot properties to change.
saved_properties
Override current properties and parse previous and current history.
"""
def get_nested(value):
if type(value) == list and type(value[0]) == list:
return get_nested(value[0])
else:
return value
def get_nested_property(prop):
if type(prop) == np.ndarray:
return get_nested(prop.tolist())[:3]
elif type(prop) == list:
return get_nested(prop)[:3]
else:
return prop
if saved_properties:
prev_properties = self.history_dict[self.history_iter - 1]
next_properties = self.history_dict[self.history_iter]
else:
prev_properties = self.history_dict[self.history_iter]
next_properties = self.history_dict[self.history_iter + 1]
prev_prop = prev_properties.pop(artist, {})
prev_prop.update(saved_properties)
next_prop = next_properties.pop(artist, {})
# If record exists, find difference in object properties
for key, new_value in properties.items():
current_value = prev_prop.get(key, get_nested_property(plt.getp(artist, key)))
next_prop.pop(key, None)
if current_value != new_value:
prev_prop[key], next_prop[key] = current_value, new_value
if prev_prop:
prev_properties[artist] = prev_prop
if next_prop:
next_properties[artist] = next_prop
def temporary_properties(self, artist: Artist, properties: dict, **kwargs):
"""Applies temporary property changes to a *matplotlib* artist.
Only available on the newest iteration, as we cannot change what is already in the past. All values in ``properties`` are immediately applied to `artist`. Since temporary changes can be overwritten within the same iteration, the first time a temporary property change is requested, the previous value is saved to ``self.temporary_saved``. When the iteration changes, the property differences of the previous and current iteration are recomputed and saved to ``self.history_dict`` in :meth:`_draw_from_history`.
Parameters
----------
artist
Plot object whose properties are changed.
properties
Plot properties to change.
"""
if self.history_at_newest:
self.temporary_changes[artist].update(properties)
for prop_name in properties:
if prop_name not in self.temporary_saved[artist]:
self.temporary_saved[artist][prop_name] = plt.getp(artist, prop_name)
self.change_properties(artist, properties)
else:
print("Must be at newest iteration to apply changes.")
from mpl_toolkits.mplot3d import art3d
class Template3D(Template2D):
"""Template 3D plot object with history navigation."""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, projection="3d", **kwargs)
def _init_axis(
self,
limits: Optional[Tuple[float, float, float, float]] = None,
title: str = "",
invert: bool = True,
ax: Optional[mpl.axes.Axes] = None,
z_limits: Optional[Tuple[float, float]] = None,
**kwargs,
):
"""
Initializes the 3D axis by removing the background panes, changing the grid tics, alpha and linestyle, setting the labels and title.
"""
if ax is None:
ax = self.main_ax
ax.axis(False)
if title:
ax.set_title(title, fontsize=self.params.font_title_size)
ax.set_xlabel("z")
ax.set_ylabel("y")
ax.set_zlabel("t")
ax.xaxis.set_pane_color(self.params.axis3d_pane_color)
ax.yaxis.set_pane_color(self.params.axis3d_pane_color)
ax.zaxis.set_pane_color(self.params.axis3d_pane_color)
ax.xaxis.line.set_color(self.params.axis3d_line_color)
ax.yaxis.line.set_color(self.params.axis3d_line_color)
ax.zaxis.line.set_color(self.params.axis3d_line_color)
ax.xaxis._axinfo["grid"]["linestyle"] = self.params.axis3d_grid_line_style
ax.yaxis._axinfo["grid"]["linestyle"] = self.params.axis3d_grid_line_style
ax.zaxis._axinfo["grid"]["linestyle"] = self.params.axis3d_grid_line_style
ax.xaxis._axinfo["grid"]["alpha"] = self.params.axis3d_grid_line_alpha
ax.yaxis._axinfo["grid"]["alpha"] = self.params.axis3d_grid_line_alpha
ax.zaxis._axinfo["grid"]["alpha"] = self.params.axis3d_grid_line_alpha
if limits is not None:
ax.set_xlim(limits[0], limits[0] + limits[2])
ax.set_ylim(limits[1], limits[1] + limits[3])
if z_limits is not None:
ax.set_zlim(z_limits[0], z_limits[0] + z_limits[1])
x_limits = ax.get_xlim3d()
y_limits = ax.get_ylim3d()
z_limits = ax.get_zlim3d()
x_range = abs(x_limits[1] - x_limits[0])
x_middle = sum(x_limits) / len(x_limits)
y_range = abs(y_limits[1] - y_limits[0])
y_middle = sum(y_limits) / len(y_limits)
z_range = abs(z_limits[1] - z_limits[0])
z_middle = sum(z_limits) / len(z_limits)
plot_radius = 0.5 * max([x_range, y_range, z_range])
ax.set_xlim3d([x_middle - plot_radius, x_middle + plot_radius])
ax.set_ylim3d([y_middle - plot_radius, y_middle + plot_radius])
ax.set_zlim3d([z_middle - plot_radius, z_middle + plot_radius])
if invert:
ax.invert_yaxis()
def _draw_line(self, X, Y, *args, z: float = 0, **kwargs):
artist = super()._draw_line(np.array(X), np.array(Y), *args, **kwargs)
art3d.line_2d_to_3d(artist, zs=z)
return artist
def _draw_line3D(self, X, Y, Z, *args, **kwargs):
artist = art3d.Line3D(X, Y, Z, *args, **kwargs)
self.main_ax.add_line(artist)
return artist
def _draw_circle(self, *args, z: float = 0, **kwargs):
artist = super()._draw_circle(*args, **kwargs)
art3d.patch_2d_to_3d(artist, z=z)
return artist
def _draw_rectangle(self, *args, z: float = 0, **kwargs):
artist = super()._draw_rectangle(*args, **kwargs)
art3d.patch_2d_to_3d(artist, z=z)
return artist
| 44.388466
| 771
| 0.589586
|
e7a3797fe27f2540dbedcc0f4767a7976ebdc8dc
| 439
|
py
|
Python
|
packages/python/plotly/plotly/validators/scatter/marker/line/_reversescale.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/scatter/marker/line/_reversescale.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/scatter/marker/line/_reversescale.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="reversescale", parent_name="scatter.marker.line", **kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
**kwargs,
)
| 31.357143
| 85
| 0.667426
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.