repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
iafisher/precommit | setup.py | import os
from setuptools import find_packages, setup
d = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(d, "README.md"), "r") as f:
long_description = f.read()
setup(
name="iafisher-precommit",
version="0.1",
description="Manage git pre-commit hooks",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
entry_points={"console_scripts": ["precommit = precommitlib.main:main"]},
packages=find_packages(exclude=["tests"]),
package_data={"": ["precommit.py.template"]},
project_urls={"Source": "https://github.com/iafisher/precommit"},
)
|
iafisher/precommit | precommitlib/main.py | """
The command-line interface to the precommit tool.
Most of the tool's implementation lives in lib.py, and the definitions of the pre-commit
checks live in checks.py.
Author: <NAME> (<EMAIL>)
Version: May 2020
"""
import importlib.util
import os
import pkg_resources
import shutil
import stat
import subprocess
import sys
from collections import namedtuple
from . import utils
from .lib import Checklist, Precommit
def main() -> None:
args = parse_args(sys.argv[1:])
configure_globals(args)
chdir_to_git_root()
if args.subcommand == "help" or args.flags["--help"]:
main_help(args)
elif args.subcommand == "init":
main_init(args)
elif args.subcommand == "fix":
main_fix(args)
else:
main_check(args)
def main_init(args):
if not args.flags["--force"] and os.path.exists("precommit.py"):
utils.error("precommit.py already exists. Re-run with --force to overwrite it.")
hookpath = os.path.join(".git", "hooks", "pre-commit")
if not args.flags["--force"] and os.path.exists(hookpath):
utils.error(f"{hookpath} already exists. Re-run with --force to overwrite it.")
# Courtesy of https://setuptools.readthedocs.io/en/latest/pkg_resources.html
template_path = pkg_resources.resource_filename(__name__, "precommit.py.template")
shutil.copyfile(template_path, "precommit.py")
with open(hookpath, "w", encoding="utf-8") as f:
f.write("#!/bin/sh\n\nprecommit --all\n")
# Make the hook executable by everyone.
st = os.stat(hookpath)
os.chmod(hookpath, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
def main_fix(args):
precommit = get_precommit(args)
precommit.fix()
def main_help(args):
print(HELP)
def main_check(args):
precommit = get_precommit(args)
found_problems = precommit.check()
if found_problems:
sys.exit(1)
def chdir_to_git_root():
gitroot = subprocess.run(
["git", "rev-parse", "--show-toplevel"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
if gitroot.returncode != 0:
utils.error("must be in git repository.")
os.chdir(gitroot.stdout.decode("ascii").strip())
SUBCOMMANDS = {"init", "fix", "help", "check"}
SHORT_FLAGS = {"-f": "--force", "-h": "--help", "-w": "--working"}
FLAGS = {
"--color": set(),
"--no-color": set(),
"--help": set(),
"--verbose": {"fix", "check"},
"--all": {"fix", "check"},
"--force": {"init"},
"--working": {"fix", "check"},
}
Args = namedtuple("Args", ["subcommand", "positional", "flags"])
def parse_args(args):
"""
Parses the argument list into an `Args` object.
Exits the program with an error message if the arguments are invalid.
"""
positional = []
flags = {}
force_positional = False
for arg in sys.argv[1:]:
if arg == "--":
force_positional = True
continue
elif not force_positional and arg.startswith("-"):
if arg in SHORT_FLAGS:
flags[SHORT_FLAGS[arg]] = True
else:
flags[arg] = True
else:
positional.append(arg)
if positional:
subcommand = positional[0]
positional = positional[1:]
else:
subcommand = "check"
args = Args(subcommand=subcommand, flags=flags, positional=positional)
errormsg = check_args(args)
if errormsg:
utils.error(errormsg)
for flag in FLAGS:
if flag not in args.flags:
args.flags[flag] = False
return args
def check_args(args):
"""
Checks that the command-line arguments are valid.
"""
if len(args.positional) > 0:
return "precommit does not take positional arguments"
if args.subcommand not in SUBCOMMANDS:
return f"unknown subcommand: {args.subcommand}"
if "--no-color" in args.flags and "--color" in args.flags:
return "--color and --no-color are incompatible"
for flag in args.flags:
try:
valid_subcommands = FLAGS[flag]
except KeyError:
return f"unknown flag: {flag}"
else:
# If `FLAGS[flag]` is the empty set, then it means the flag is valid for
# any subcommand.
if valid_subcommands and args.subcommand not in valid_subcommands:
return f"flag {flag} not valid for {args.subcommand} subcommand"
return None
def configure_globals(args):
"""
Configure global settings based on the command-line arguments.
"""
# Check for the NO_COLOR environment variable and for a non-terminal standard output
# before handling command-line arguments so that it can be overridden by explicitly
# specifying --color.
no_color = "NO_COLOR" in os.environ or not sys.stdout.isatty()
if args.flags["--color"]:
no_color = False
elif args.flags["--no-color"]:
no_color = True
if no_color:
utils.turn_off_colors()
utils.VERBOSE = args.flags["--verbose"]
def get_precommit(args):
path = os.path.join(os.getcwd(), "precommit.py")
try:
# Courtesy of https://stackoverflow.com/questions/67631/
# We could add the current directory to `sys.path` and use a regular import
# statement, but then if there's no `precommit.py` file in the right place, but
# there is one somewhere else on `sys.path`, Python will import that module
# instead and the user will be very confused (#28). The technique below
# guarantees that an exception will be raised if there is no `precommit.py` in
# the expected place.
spec = importlib.util.spec_from_file_location("precommit", path)
precommit_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(precommit_module)
except (FileNotFoundError, ImportError):
utils.error(
"could not find precommit.py. You can create it with 'precommit init'."
)
else:
# Call the user's code to initialize the checklist.
checklist = Checklist()
precommit_module.init(checklist)
precommit = Precommit(
checklist._checks,
check_all=args.flags["--all"],
working=args.flags["--working"],
)
return precommit
HELP = """\
precommit: simple git pre-commit hook management.
Usage: precommit [flags] [subcommand]
Subcommands:
If left blank, subcommand defaults to 'check'.
check Check for precommit failures.
fix Apply any available fixes for problems that 'check' finds.
init Initialize
help Display a help message and exit.
Flags:
--all Run all pre-commit checks, including slow ones.
--color Turn on colorized output, overriding any environment settings.
--no-color Turn off colorized output.
--verbose Emit verbose output.
-w, --working Run on unstaged as well as staged changes.
-h, --help Display a help message and exit.
Written by <NAME>. http://github.com/iafisher/precommit"""
|
iafisher/precommit | precommitlib/utils.py | <reponame>iafisher/precommit
"""
Utility functions and constants for the precommit tool.
Author: <NAME> (<EMAIL>)
Version: May 2020
"""
import sys
# A global flag indicating whether --verbose was passed.
VERBOSE = False
def plural(n: int, word: str, suffix: str = "s") -> str:
"""Returns the numeral and the proper plural form of the word."""
return f"{n} {word}" if n == 1 else f"{n} {word}{suffix}"
def error(message: str) -> None:
"""
Prints an error message and exits the program.
"""
print(f"Error: {message}", file=sys.stderr)
sys.exit(1)
def turn_off_colors() -> None:
"""Turns off colored output globally for the program."""
global _NO_COLOR
_NO_COLOR = True
def red(text: str) -> str:
"""Returns a string that will display as red using ANSI color codes."""
return _colored(text, _COLOR_RED)
def blue(text: str) -> str:
"""Returns a string that will display as blue using ANSI color codes."""
return _colored(text, _COLOR_BLUE)
def green(text: str) -> str:
"""Returns a string that will display as green using ANSI color codes."""
return _colored(text, _COLOR_GREEN)
def yellow(text: str) -> str:
"""Returns a string that will display as yellow using ANSI color codes."""
return _colored(text, _COLOR_YELLOW)
def _colored(text: str, color: str) -> str:
return f"\033[{color}m{text}\033[{_COLOR_RESET}m" if not _NO_COLOR else text
_COLOR_RED = "91"
_COLOR_BLUE = "94"
_COLOR_GREEN = "92"
_COLOR_YELLOW = "93"
_COLOR_RESET = "0"
_NO_COLOR = False
|
mhrous/Graduation_Project | server/test.py |
s ={1,2,3,4,5,6}
print( 1 in s) |
mhrous/Graduation_Project | server/Data/math_and_physic_constants.py | <filename>server/Data/math_and_physic_constants.py
def add_constant(obj, _type='math'):
pass
def get_constant(my_filter):
pass
|
mhrous/Graduation_Project | server/Data/lows.py | def add_low(obj):
pass
def get_low(my_filter):
pass
|
mhrous/Graduation_Project | server/Data/DB/connect.py | <reponame>mhrous/Graduation_Project
from pymongo import MongoClient
from constants import CONSTANTS
def get_db():
client = MongoClient(CONSTANTS['DB_URL'])
db = client[CONSTANTS['DB_NAME']]
return db
DB = get_db()
|
mhrous/Graduation_Project | server/Data/problem.py | <filename>server/Data/problem.py
from hashlib import md5
path_checked_file = './DB/Problems/Checked'
path_not_checked_file = './DB/Problems/Not Checked'
checked_problem = {}
not_checked_problem = {}
def get_file_name(text):
name = text.replace('\n', ' ').strip()
return md5(name.encode('utf8')).hexdigest() + '.json'
def add_problem(obj):
global all_problem
file_name = get_file_name(obj['text'])
if file_name in all_problem:
return None
pass
def get_problem(my_filter):
pass
|
mhrous/Graduation_Project | server/server.py | <gh_stars>0
from flask import Flask, jsonify
from constants import CONSTANTS
app = Flask(__name__, static_folder='build')
@app.route('/', methods=['GET'])
def get_tasks():
return jsonify({'tasks': 'hi'})
if __name__ == '__main__':
app.run(port=CONSTANTS['PORT'])
|
mhrous/Graduation_Project | server/constants.py | <gh_stars>0
import os
CONSTANTS = {
"PORT": os.environ.get("PORT", 3001),
"DB_URL": "mongodb://localhost:27017/",
"DB_NAME": "graduation-project"
} |
TrabajoGradoMotorBusqueda/Ontologia | Scripts/ontologia.py | from owlready2 import *
# agregar la carpeta que contiene la ontologia,
# para busqueda local, sino en internet
onto_path.append("../Data")
# carga de ontologia por IRI o por ruta directa al archivo owl
ontologia = get_ontology(
"http://www.semanticweb.org/OntologiaInvestigacionPrueba").load()
with ontologia:
class Grupo_investigacion(Thing):
def get_id_grupo_investigacion(self):
return self.id_grupo_investigacion
def set_id_grupo_investigacion(self,id_grupo_investigacion):
self.id_grupo_investigacion = [id_grupo_investigacion]
def get_nombre_grupo_investigacion(self):
return self.nombre_grupo_investigacion
def set_nombre_grupo_investigacion(self,nombre_grupo_investigacion):
self.nombre_grupo_investigacion = [nombre_grupo_investigacion]
def get_clasificacion_grupo_investigacion(self):
return self.clasificacion_grupo_investigacion
def set_clasificacion_grupo_investigacion(self,clasificacion_grupo_investigacion):
self.clasificacion_grupo_investigacion = [clasificacion_grupo_investigacion]
def get_area_grupo_investigacion(self):
return self.area_grupo_investigacion
def set_area_grupo_investigacion(self,area_grupo_investigacion):
self.area_grupo_investigacion = [area_grupo_investigacion]
def get_correo_grupo_investigacion(self):
return self.correo_grupo_investigacion
def set_correo_grupo_investigacion(self,correo_grupo_investigacion):
self.correo_grupo_investigacion = [correo_grupo_investigacion]
def relation_gi_tiene_li(self,li):
self.gi_tiene_li.append(li)
def relation_gi_tiene_investigador(self,investigador):
self.gi_tiene_investigador.append(investigador)
def relation_gi_tiene_docente(self,docente):
self.gi_tiene_docente.append(docente)
def relation_gi_tiene_estudiante(self,estudiante):
self.gi_tiene_estudiante.append(estudiante)
#SUBCLASE LINEA DE INVESTIGACION
class Linea_investigacion(Grupo_investigacion):
def get_id_linea_investigacion(self):
return self.id_linea_investigacion
def set_id_linea_investigacion(self, id_linea_investigacion):
self.id_linea_investigacion = [id_linea_investigacion]
def get_nombre_linea_investigacion(self):
return self.nombre_linea_investigacion
def set_nombre_linea_investigacion(self, nombre_linea_investigacion):
self.nombre_linea_investigacion = [nombre_linea_investigacion]
def relation_li_tiene_pi(self,pi):
self.li_tiene_pi.append(pi)
#CLASE INVESTIGADOR
class Investigador(Thing):
def get_id_investigador(self):
return self.id_investigador
def set_id_investigador(self,id_investigador):
self.id_investigador = [id_investigador]
def get_nombres_investigador(self):
return self.nombres_investigador
def set_nombres_investigador(self,nombres_investigador):
self.nombres_investigador = [nombres_investigador]
def get_apellidos_investigador(self):
return self.apellidos_investigador
def set_apellidos_investigador(self,apellidos_investigador):
self.apellidos_investigador = [apellidos_investigador]
def get_codigo_investigador(self):
return self.codigo_investigador
def set_codigo_investigador(self,codigo_investigador):
self.codigo_investigador = [codigo_investigador]
def get_cedula_investigador(self):
return self.cedula_investigador
def set_cedula_investigador(self,cedula_investigador):
self.cedula_investigador = [cedula_investigador]
def get_correo_investigador(self):
return self.correo_investigador
def set_correo_investigador(self,correo_investigador):
self.correo_investigador = [correo_investigador]
def relation_investigador_es_docente(self,docente):
self.investigador_es_docente.append(docente)
def relation_investigador_es_estudiante(self,estudiante):
self.investigador_es_estudiante.append(estudiante)
def relation_investigador_es_ie(self,ie):
self.investigador_es_ie.append(ie)
#SUBCLASE DOCENTE INVESTIGADOR
class Docente(Investigador):
def get_id_docente(self):
return self.id_docente
def set_id_docente(self, id_docente):
self.id_docente = [id_docente]
def relation_docente_es_autor_pi(self,pi):
self.docente_es_autor_pi.append(pi)
def relation_docente_asesora_pi(self,pi):
self.docente_asesora_pi.append(pi)
#SUBCLASE ESTUDIANTE INVESTIGADOR
class Estudiante(Investigador):
def get_id_estudiante(self):
return self.id_estudiante
def set_id_estudiante(self, id_estudiante):
self.id_estudiante = [id_estudiante]
def relation_estudiante_es_autor_pi(self,pi):
self.estudiante_es_autor_pi.append(pi)
#SUBCLASE INVESTIGADOR EXTERNO
class Investigador_externo(Investigador):
def get_id_investigador_externo(self):
return self.id_investigador_externo
def set_id_investigador_externo(self, id_investigador_externo):
self.id_investigador_externo = [id_investigador_externo]
def relation_ie_es_autor_pi(self,pi):
self.ie_es_autor_pi.append(pi)
#CLASE PALABRA
class Palabra(Thing):
def get_id_palabra(self):
return self.id_palabra
def set_id_palabra(self,id_palabra):
self.id_palabra = [id_palabra]
def get_descripcion_palabra(self):
return self.descripcion_palabra
def set_descripcion_palabra(self, descripcion_palabra):
if (len(self.descripcion_palabra) > 0):
self.descripcion_palabra.append(descripcion_palabra)
else:
self.descripcion_palabra = [descripcion_palabra]
def get_lema_palabra(self):
return self.lema_palabra
def set_lema_palabra(self,lema_palabra):
self.lema_palabra = [lema_palabra]
def get_tipo_palabra(self):
return self.tipo_palabra
def set_tipo_palabra(self,tipo_palabra):
self.tipo_palabra = [tipo_palabra]
def get_concepto_palabra(self):
return self.concepto_palabra
def set_concepto_palabra(self,concepto_palabra):
self.concepto_palabra = [concepto_palabra]
def relation_palabra_sinonimo_palabra(self,palabra):
self.palabra_sinonimo_palabra.append(palabra)
def relation_palabra_conecta_palabra(self,palabra):
self.palabra_conecta_palabra.append(palabra)
#CLASE PROYECTO DE INVESTIGACION
class Proyecto_investigacion(Thing):
def get_id_proyecto_investigacion(self):
return self.id_proyecto_investigacion
def set_id_proyecto_investigacion(self, id_proyecto_investigacion):
self.id_proyecto_investigacion = [id_proyecto_investigacion]
def get_titulo_proyecto_investigacion(self):
return self.titulo_proyecto_investigacion
def set_titulo_proyecto_investigacion(self, titulo_proyecto_investigacion):
self.titulo_proyecto_investigacion = [titulo_proyecto_investigacion]
def get_resumen_proyecto_investigacion(self):
return self.resumen_proyecto_investigacion
def set_resumen_proyecto_investigacion(self, resumen_proyecto_investigacion):
self.resumen_proyecto_investigacion = [resumen_proyecto_investigacion]
def get_estado_proyecto_investigacion(self):
return self.estado_proyecto_investigacion
def set_estado_proyecto_investigacion(self, estado_proyecto_investigacion):
self.estado_proyecto_investigacion = [estado_proyecto_investigacion]
def get_tipo_proyecto_investigacion(self):
return self.tipo_proyecto_investigacion
def set_tipo_proyecto_investigacion(self, tipo_proyecto_investigacion):
self.tipo_proyecto_investigacion = [tipo_proyecto_investigacion]
def get_palabras_clave(self):
return self.palabras_clave
def set_palabras_clave(self, palabra_clave):
if(len(self.palabras_clave) > 0):
self.palabras_clave.append(palabra_clave)
else:
self.palabras_clave = [palabra_clave]
def relation_pi_tiene_palabra(self,palabra):
self.pi_tiene_palabra.append(palabra)
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#CLASE UNIVERSIDAD
class Universidad(Thing):
def get_id_universidad(self):
return self.id_universidad
def set_id_universidad(self, id_universidad):
self.id_universidad = [id_universidad]
def get_nombre_universidad(self):
return self.nombre_universidad
def set_nombre_universidad(self, nombre_universidad):
self.nombre_universidad = [nombre_universidad]
def relation_universidad_tiene_facultad(self,facultad):
self.universidad_tiene_facultad.append(facultad)
def relation_universidad_tiene_viis(self,viis):
self.universidad_tiene_viis.append(viis)
#SUBCLASE FACULTAD
class Facultad(Universidad):
def get_id_facultad(self):
return self.id_facultad
def set_id_facultad(self, id_facultad):
self.id_facultad = [id_facultad]
def get_nombre_facultad(self):
return self.nombre_facultad
def set_nombre_facultad(self, nombre_facultad):
self.nombre_facultad = [nombre_facultad]
def relation_facultad_tiene_departamento(self,departamento):
self.facultad_tiene_departamento.append(departamento)
#SUBCLASE DEPARTAMENTO
class Departamento(Facultad):
def get_id_departamento(self):
return self.id_departamento
def set_id_departamento(self, id_departamento):
self.id_departamento = [id_departamento]
def get_nombre_departamento(self):
return self.nombre_departamento
def set_nombre_departamento(self, nombre_departamento):
self.nombre_departamento = [nombre_departamento]
def relation_departamento_tiene_programa(self,programa):
self.departamento_tiene_programa.append(programa)
def relation_departamento_tiene_gi(self,gi):
self.departamento_tiene_gi.append(gi)
#SUBCLASE PROGRAMA
class Programa(Departamento):
def get_id_programa(self):
return self.id_programa
def set_id_programa(self, id_programa):
self.id_programa = [id_programa]
def get_nombre_programa(self):
return self.nombre_programa
def set_nombre_programa(self, nombre_programa):
self.nombre_programa = [nombre_programa]
def relation_programa_tiene_estudiante(self,estudiante):
self.programa_tiene_estudiante.append(estudiante)
def relation_programa_tiene_docente(self,docente):
self.programa_tiene_docente.append(docente)
#CLASE VIIS
class VIIS(Thing):
def get_id_VIIS(self):
return self.id_VIIS
def set_id_VIIS(self, id_VIIS):
self.id_VIIS = [id_VIIS]
def get_nombre_VIIS(self):
return self.nombre_VIIS
def set_nombre_VIIS(self, nombre_VIIS):
self.nombre_VIIS = [nombre_VIIS]
def relation_viis_tiene_convocatoria(self,convocatoria):
self.viis_tiene_convocatoria.append(convocatoria)
def relation_viis_tiene_investigador(self,investigador):
self.viis_tiene_investigador.append(investigador)
def relation_viis_adscribe_gi(self,gi):
self.viis_adscribe_gi.append(gi)
def relation_viis_tiene_pi(self,pi):
self.viis_tiene_pi.append(pi)
#SUBCLASE CONVOCATORIA
class Convocatoria(VIIS):
def get_id_convocatoria(self):
return self.id_convocatoria
def set_id_convocatoria(self, id_convocatoria):
self.id_convocatoria = [id_convocatoria]
def get_nombre_convocatoria(self):
return self.nombre_convocatoria
def set_nombre_convocatoria(self, nombre_convocatoria):
self.nombre_convocatoria = [nombre_convocatoria]
def get_tipo_convocatoria(self):
return self.tipo_convocatoria
def set_tipo_convocatoria(self, tipo_convocatoria):
self.tipo_convocatoria = [tipo_convocatoria]
def get_anio_convocatoria(self):
return self.anio_convocatoria
def set_anio_convocatoria(self, anio_convocatoria):
self.anio_convocatoria = [anio_convocatoria]
def relation_convocatoria_tiene_pi(self,pi):
self.convocatoria_tiene_pi.append(pi)
def relation_convocatoria_dirigida_investigador(self,investigador):
self.convocatoria_dirigida_investigador.append(investigador)
# Relacion Relacion Directa
# class pi_tiene_palabra (ObjectProperty):
# domain = [Proyecto_investigacion]
# range = [Palabra]
# Relacio Inversa
# class palabra_describe_pi (ObjectProperty):
# domain = [Palabra]
# range = [Proyecto_investigacion]
# inverse_property = pi_tiene_palabra
|
TrabajoGradoMotorBusqueda/Ontologia | Scripts/ClasesOPDP.py | #############################################CREACION DE CLASES#################################################
#IMPORTAR RUTA
with ontologia:
#GRUPO DE INVESTIGACION
class Grupo_investigacion(Thing):
def get_id_grupo_investigacion(self):
return self.id_grupo_investigacion
def set_id_grupo_investigacion(self,id_grupo_investigacion):
self.id_grupo_investigacion = [id_grupo_investigacion]
def get_nombre_grupo_investigacion(self):
return self.nombre_grupo_investigacion
def set_nombre_grupo_investigacion(self,nombre_grupo_investigacion):
self.nombre_grupo_investigacion = [nombre_grupo_investigacion]
def get_clasificacion_grupo_investigacion(self):
return self.clasificacion_grupo_investigacion
def set_clasificacion_grupo_investigacion(self,clasificacion_grupo_investigacion):
self.clasificacion_grupo_investigacion = [clasificacion_grupo_investigacion]
def get_area_grupo_investigacion(self):
return self.area_grupo_investigacion
def set_area_grupo_investigacion(self,area_grupo_investigacion):
self.area_grupo_investigacion = [area_grupo_investigacion]
def get_correo_grupo_investigacion(self):
return self.correo_grupo_investigacion
def set_correo_grupo_investigacion(self,correo_grupo_investigacion):
self.correo_grupo_investigacion = [correo_grupo_investigacion]
def relation_gi_tiene_li(self,li):
self.gi_tiene_li.append(li)
def relation_gi_tiene_investigador(self,investigador):
self.gi_tiene_investigador.append(investigador)
def relation_gi_tiene_docente(self,docente):
self.gi_tiene_docente.append(docente)
def relation_gi_tiene_estudiante(self,estudiante):
self.gi_tiene_estudiante.append(estudiante)
#SUBCLASE LINEA DE INVESTIGACION
class Linea_investigacion(Grupo_investigacion):
def get_id_linea_investigacion(self):
return self.id_linea_investigacion
def set_id_linea_investigacion(self, id_linea_investigacion):
self.id_linea_investigacion = [id_linea_investigacion]
def get_nombre_linea_investigacion(self):
return self.nombre_linea_investigacion
def set_nombre_linea_investigacion(self, nombre_linea_investigacion):
self.nombre_linea_investigacion = [nombre_linea_investigacion]
def relation_li_tiene_pi(self,pi):
self.li_tiene_pi.append(pi)
#CLASE INVESTIGADOR
class Investigador(Thing):
def get_id_investigador(self):
return self.id_investigador
def set_id_investigador(self,id_investigador):
self.id_investigador = [id_investigador]
def get_nombres_investigador(self):
return self.nombres_investigador
def set_nombres_investigador(self,nombres_investigador):
self.nombres_investigador = [nombres_investigador]
def get_apellidos_investigador(self):
return self.apellidos_investigador
def set_apellidos_investigador(self,apellidos_investigador):
self.apellidos_investigador = [apellidos_investigador]
def get_codigo_investigador(self):
return self.codigo_investigador
def set_codigo_investigador(self,codigo_investigador):
self.codigo_investigador = [codigo_investigador]
def get_cedula_investigador(self):
return self.cedula_investigador
def set_cedula_investigador(self,cedula_investigador):
self.cedula_investigador = [cedula_investigador]
def get_correo_investigador(self):
return self.correo_investigador
def set_correo_investigador(self,correo_investigador):
self.correo_investigador = [correo_investigador]
def relation_investigador_es_docente(self,docente):
self.investigador_es_docente.append(docente)
def relation_investigador_es_estudiante(self,estudiante):
self.investigador_es_estudiante.append(estudiante)
def relation_investigador_es_ie(self,ie):
self.investigador_es_ie.append(ie)
#SUBCLASE DOCENTE INVESTIGADOR
class Docente(Investigador):
def get_id_docente(self):
return self.id_docente
def set_id_docente(self, id_docente):
self.id_docente = [id_docente]
def relation_docente_es_autor_pi(self,pi):
self.docente_es_autor_pi.append(pi)
def relation_docente_asesora_pi(self,pi):
self.docente_asesora_pi.append(pi)
#SUBCLASE ESTUDIANTE INVESTIGADOR
class Estudiante(Investigador):
def get_id_estudiante(self):
return self.id_estudiante
def set_id_estudiante(self, id_estudiante):
self.id_estudiante = [id_estudiante]
def relation_estudiante_es_autor_pi(self,pi):
self.estudiante_es_autor_pi.append(pi)
#SUBCLASE INVESTIGADOR EXTERNO
class Investigador_externo(Investigador):
def get_id_investigador_externo(self):
return self.id_investigador_externo
def set_id_investigador_externo(self, id_investigador_externo):
self.id_investigador_externo = [id_investigador_externo]
def relation_ie_es_autor_pi(self,pi):
self.ie_es_autor_pi.append(pi)
#CLASE PALABRA
class Palabra(Thing):
def get_id_palabra(self):
return self.id_palabra
def set_id_palabra(self,id_palabra):
self.id_palabra = [id_palabra]
def get_descripcion_palabra(self):
return self.descripcion_palabra
def set_descripcion_palabra(self,descripcion_palabra):
self.descripcion_palabra = [descripcion_palabra]
def get_lema_palabra(self):
return self.lema_palabra
def set_lema_palabra(self,lema_palabra):
self.lema_palabra = [lema_palabra]
def get_tipo_palabra(self):
return self.tipo_palabra
def set_tipo_palabra(self,tipo_palabra):
self.tipo_palabra = [tipo_palabra]
def get_concepto_palabra(self):
return self.concepto_palabra
def set_concepto_palabra(self,concepto_palabra):
self.concepto_palabra = [concepto_palabra]
def relation_palabra_sinonimo_palabra(self,palabra):
self.palabra_sinonimo_palabra.append(palabra)
def relation_palabra_conecta_palabra(self,palabra):
self.palabra_conecta_palabra.append(palabra)
#CLASE PROYECTO DE INVESTIGACION
class Proyecto_investigacion(Thing):
def get_id_proyecto_investigacion(self):
return self.id_proyecto_investigacion
def set_id_proyecto_investigacion(self, id_proyecto_investigacion):
self.id_proyecto_investigacion = [id_proyecto_investigacion]
def get_titulo_proyecto_investigacion(self):
return self.titulo_proyecto_investigacion
def set_titulo_proyecto_investigacion(self, titulo_proyecto_investigacion):
self.titulo_proyecto_investigacion = [titulo_proyecto_investigacion]
def get_resumen_proyecto_investigacion(self):
return self.resumen_proyecto_investigacion
def set_resumen_proyecto_investigacion(self, resumen_proyecto_investigacion):
self.resumen_proyecto_investigacion = [resumen_proyecto_investigacion]
def get_estado_proyecto_investigacion(self):
return self.estado_proyecto_investigacion
def set_estado_proyecto_investigacion(self, estado_proyecto_investigacion):
self.estado_proyecto_investigacion = [estado_proyecto_investigacion]
def get_tipo_proyecto_investigacion(self):
return self.tipo_proyecto_investigacion
def set_tipo_proyecto_investigacion(self, tipo_proyecto_investigacion):
self.tipo_proyecto_investigacion = [tipo_proyecto_investigacion]
def get_palabras_clave(self):
return self.palabras_clave
def set_palabras_clave(self, palabras_clave):
self.palabras_clave = [palabras_clave]
def relation_pi_tiene_palabra(self,palabra):
self.pi_tiene_palabra.append(palabra)
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##FALTAN PALABRAS CLAVE OJO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#CLASE UNIVERSIDAD
class Universidad(Thing):
def get_id_universidad(self):
return self.id_universidad
def set_id_universidad(self, id_universidad):
self.id_universidad = [id_universidad]
def get_nombre_universidad(self):
return self.nombre_universidad
def set_nombre_universidad(self, nombre_universidad):
self.nombre_universidad = [nombre_universidad]
def relation_universidad_tiene_facultad(self,facultad):
self.universidad_tiene_facultad.append(facultad)
def relation_universidad_tiene_viis(self,viis):
self.universidad_tiene_viis.append(viis)
#SUBCLASE FACULTAD
class Facultad(Universidad):
def get_id_facultad(self):
return self.id_facultad
def set_id_facultad(self, id_facultad):
self.id_facultad = [id_facultad]
def get_nombre_facultad(self):
return self.nombre_facultad
def set_nombre_facultad(self, nombre_facultad):
self.nombre_facultad = [nombre_facultad]
def relation_facultad_tiene_departamento(self,departamento):
self.facultad_tiene_departamento.append(departamento)
#SUBCLASE DEPARTAMENTO
class Departamento(Facultad):
def get_id_departamento(self):
return self.id_departamento
def set_id_departamento(self, id_departamento):
self.id_departamento = [id_departamento]
def get_nombre_departamento(self):
return self.nombre_departamento
def set_nombre_departamento(self, nombre_departamento):
self.nombre_departamento = [nombre_departamento]
def relation_departamento_tiene_programa(self,programa):
self.departamento_tiene_programa.append(programa)
def relation_departamento_tiene_gi(self,gi):
self.departamento_tiene_gi.append(gi)
#SUBCLASE PROGRAMA
class Programa(Departamento):
def get_id_programa(self):
return self.id_programa
def set_id_programa(self, id_programa):
self.id_programa = [id_programa]
def get_nombre_programa(self):
return self.nombre_programa
def set_nombre_programa(self, nombre_programa):
self.nombre_programa = [nombre_programa]
def relation_programa_tiene_estudiante(self,estudiante):
self.programa_tiene_estudiante.append(estudiante)
def relation_programa_tiene_docente(self,docente):
self.programa_tiene_docente.append(docente)
#CLASE VIIS
class VIIS(Thing):
def get_id_VIIS(self):
return self.id_VIIS
def set_id_VIIS(self, id_VIIS):
self.id_VIIS = [id_VIIS]
def get_nombre_VIIS(self):
return self.nombre_VIIS
def set_nombre_VIIS(self, nombre_VIIS):
self.nombre_VIIS = [nombre_VIIS]
def relation_viis_tiene_convocatoria(self,convocatoria):
self.viis_tiene_convocatoria.append(convocatoria)
def relation_viis_tiene_investigador(self,investigador):
self.viis_tiene_investigador.append(investigador)
def relation_viis_adscribe_gi(self,gi):
self.viis_adscribe_gi.append(gi)
def relation_viis_tiene_pi(self,pi):
self.viis_tiene_pi.append(pi)
#SUBCLASE CONVOCATORIA
class Convocatoria(VIIS):
def get_id_convocatoria(self):
return self.id_convocatoria
def set_id_convocatoria(self, id_convocatoria):
self.id_convocatoria = [id_convocatoria]
def get_nombre_convocatoria(self):
return self.nombre_convocatoria
def set_nombre_convocatoria(self, nombre_convocatoria):
self.nombre_convocatoria = [nombre_convocatoria]
def get_tipo_convocatoria(self):
return self.tipo_convocatoria
def set_tipo_convocatoria(self, tipo_convocatoria):
self.tipo_convocatoria = [tipo_convocatoria]
def get_anio_convocatoria(self):
return self.anio_convocatoria
def set_anio_convocatoria(self, anio_convocatoria):
self.anio_convocatoria = [anio_convocatoria]
def relation_convocatoria_tiene_pi(self,pi):
self.convocatoria_tiene_pi.append(pi)
def relation_convocatoria_dirigida_investigador(self,investigador):
self.convocatoria_dirigida_investigador.append(investigador)
#############################################CREACION DE DATA PROPERTIES#################################################
#######################PARA SNIPPET#########################################
# with ontologia:
# #Clase $1
# class atributo (DataProperty):
# domain = [$1]
# range = [str]
######################################START###################################
with ontologia:
#Clase Universidad
class id_universidad (DataProperty):
domain = [Universidad]
range = [int]
#Clase Universidad
class nombre_universidad (DataProperty):
domain = [Universidad]
range = [str]
#Clase Facultad
class id_facultad (DataProperty):
domain = [Facultad]
range = [int]
#Clase Facultad
class nombre_facultad (DataProperty):
domain = [Facultad]
range = [str]
#Clase Departamento
class id_departamento (DataProperty):
domain = [Departamento]
range = [int]
#Clase Departamento
class nombre_departamento (DataProperty):
domain = [Departamento]
range = [str]
#Clase Programa
class id_programa (DataProperty):
domain = [Programa]
range = [int]
#Clase Programa
class nombre_programa (DataProperty):
domain = [Programa]
range = [str]
#Clase Docente
class id_docente (DataProperty):
domain = [Docente]
range = [int]
#Clase Estudiante
class id_estudiante (DataProperty):
domain = [Estudiante]
range = [int]
#Clase Grupo_investigacion
class id_grupo_investigacion (DataProperty):
domain = [Grupo_investigacion]
range = [int]
#Clase Grupo_investigacion
class nombre_grupo_investigacion (DataProperty):
domain = [Grupo_investigacion]
range = [str]
#Clase Grupo_investigacion
class clasificacion_grupo_investigacion (DataProperty):
domain = [Grupo_investigacion]
range = [str]
#Clase Grupo_investigacion
class area_grupo_investigacion (DataProperty):
domain = [Grupo_investigacion]
range = [str]
#Clase Grupo_investigacion
class correo_grupo_investigacion (DataProperty):
domain = [Grupo_investigacion]
range = [str]
#Clase Linea_investigacion
class id_linea_investigacion (DataProperty):
domain = [Linea_investigacion]
range = [int]
#Clase Linea_investigacion
class nombre_linea_investigacion(DataProperty):
domain = [Linea_investigacion]
range = [str]
#Clase VIIS
class id_VIIS(DataProperty):
domain = [VIIS]
range = [str]
#Clase VIIS
class nombre_VIIS(DataProperty):
domain = [VIIS]
range = [str]
#Clase Convocatoria
class id_convocatoria(DataProperty):
domain = [Convocatoria]
range = [int]
#Clase Convocatoria
class nombre_convocatoria(DataProperty):
domain = [Convocatoria]
range = [str]
#Clase Convocatoria
class anio_convocatoria(DataProperty):
domain = [Convocatoria]
range = [int]
#Clase Convocatoria
class tipo_convocatoria(DataProperty):
domain = [Convocatoria]
range = [str]
#Clase Proyecto_investigacion
class id_proyecto_investigacion (DataProperty):
domain = [Proyecto_investigacion]
range = [int]
#Clase Proyecto_investigacion
class titulo_proyecto_investigacion (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class resumen_proyecto_investigacion (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class palabra_clave1 (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class palabra_clave2 (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class palabra_clave3 (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class palabra_clave4 (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class palabra_clave5 (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class estado_proyecto_investigacion (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Proyecto_investigacion
class tipo_proyecto_investigacion (DataProperty):
domain = [Proyecto_investigacion]
range = [str]
#Clase Investigador
class id_investigador (DataProperty):
domain = [Investigador]
range = [int]
#Clase Investigador
class nombres_investigador (DataProperty):
domain = [Investigador]
range = [str]
#Clase Investigador
class apellidos_investigador (DataProperty):
domain = [Investigador]
range = [str]
#Clase Investigador
class codigo_investigador (DataProperty):
domain = [Investigador]
range = [str]
#Clase Investigador
class cedula_investigador (DataProperty):
domain = [Investigador]
range = [str]
#Clase Investigador
class correo_investigador (DataProperty):
domain = [Investigador]
range = [str]
#Clase Palabra
class id_palabra(DataProperty):
domain = [Palabra]
range = [int]
#Clase Palabra
class descripcion_palabra (DataProperty):
domain = [Palabra]
range = [str]
#Clase Palabra
class lema_palabra (DataProperty):
domain = [Palabra]
range = [str]
#Clase Palabra
class tipo_palabra (DataProperty):
domain = [Palabra]
range = [str]
#Clase Palabra
class concepto_palabra (DataProperty):
domain = [Palabra]
range = [str]
#Clase Investigador_externo
class id_investigador_externo(DataProperty):
domain = [Investigador_externo]
range = [str]
#############################################CREACION DE OBJECT PROPERTIES#################################################
#######################PARA SNIPPET#########################################
# with ontologia:
# class $1 (ObjectProperty):
# domain = [$2]
# range = [$3]
# inverse_property = $4
# class $4 (ObjectProperty):
# domain = [$3]
# range = [$2]
# inverse_property = $1
# #Metodo $2
# def relation_$1(self,$5)
# self.$1.append($5)
# #Metodo $3
# def relation_$4(self,$6)
# self.$4.append($6)
######################################START###################################
"""
with ontologia:
class universidad_tiene_facultad (ObjectProperty):
domain = [Universidad]
range = [Facultad]
inverse_property = facultad_pertenece_universidad
class facultad_pertenece_universidad (ObjectProperty):
domain = [Facultad]
range = [Universidad]
inverse_property = universidad_tiene_facultad
#Metodo Universidad
def relation_universidad_tiene_facultad(self,facultad):
self.universidad_tiene_facultad.append(facultad)
#Metodo Facultad
def relation_facultad_pertenece_universidad(self,universidad):
self.facultad_pertenece_universidad.append(universidad)
#OP
class universidad_tiene_viis (ObjectProperty):
domain = [Universidad]
range = [VIIS]
inverse_property = viis_pertenece_universidad
class viis_pertenece_universidad (ObjectProperty):
domain = [VIIS]
range = [Universidad]
inverse_property = universidad_tiene_viis
#Metodo Universidad
def relation_universidad_tiene_viis(self,viis):
self.universidad_tiene_viis.append(viis)
#Metodo VIIS
def relation_viis_pertenece_universidad(self,universidad):
self.viis_pertenece_universidad.append(universidad)
#OP
class facultad_tiene_departamento (ObjectProperty):
domain = [Facultad]
range = [Departamento]
inverse_property = departamento_pertenece_facultad
class departamento_pertenece_facultad (ObjectProperty):
domain = [Departamento]
range = [Facultad]
inverse_property = facultad_tiene_departamento
#Metodo Facultad
def relation_facultad_tiene_departamento(self,departamento):
self.facultad_tiene_departamento.append(departamento)
#Metodo Departamento
def relation_departamento_pertenece_facultad(self,facultad):
self.departamento_pertenece_facultad.append(facultad)
#OP
class departamento_tiene_programa (ObjectProperty):
domain = [Departamento]
range = [Programa]
inverse_property = programa_pertenece_departamento
class programa_pertenece_departamento (ObjectProperty):
domain = [Programa]
range = [Departamento]
inverse_property = departamento_tiene_programa
#Metodo Departamento
def relation_departamento_tiene_programa(self,programa):
self.departamento_tiene_programa.append(programa)
#Metodo Programa
def relation_programa_pertenece_departamento(self,departamento):
self.programa_pertenece_departamento.append(departamento)
#OP
class departamento_tiene_gi (ObjectProperty):
domain = [Departamento]
range = [Grupo_investigacion]
inverse_property = gi_pertenece_departamento
class gi_pertenece_departamento (ObjectProperty):
domain = [Grupo_investigacion]
range = [Departamento]
inverse_property = departamento_tiene_gi
#Metodo Departamento
def relation_departamento_tiene_gi(self,gi):
self.departamento_tiene_gi.append(gi)
#Metodo Grupo_investigacion
def relation_gi_pertenece_departamento(self,departamento):
self.gi_pertenece_departamento.append(departamento)
#OP
class programa_tiene_estudiante (ObjectProperty):
domain = [Programa]
range = [Estudiante]
inverse_property = estudiante_pertenece_programa
class estudiante_pertenece_programa (ObjectProperty):
domain = [Estudiante]
range = [Programa]
inverse_property = programa_tiene_estudiante
#Metodo Programa
def relation_programa_tiene_estudiante(self,estudiante):
self.programa_tiene_estudiante.append(estudiante)
#Metodo Estudiante
def relation_estudiante_pertenece_programa(self,programa):
self.estudiante_pertenece_programa.append(programa)
#OP
class programa_tiene_docente (ObjectProperty):
domain = [Programa]
range = [Docente]
inverse_property = docente_pertenece_programa
class docente_pertenece_programa (ObjectProperty):
domain = [Docente]
range = [Programa]
inverse_property = programa_tiene_docente
#Metodo Programa
def relation_programa_tiene_docente(self,docente):
self.programa_tiene_docente.append(docente)
#Metodo Docente
def relation_docente_pertenece_programa(self,programa):
self.docente_pertenece_programa.append(programa)
#OP
class gi_tiene_li (ObjectProperty):
domain = [Grupo_investigacion]
range = [Linea_investigacion]
inverse_property = li_pertenece_gi
class li_pertenece_gi (ObjectProperty):
domain = [Linea_investigacion]
range = [Grupo_investigacion]
inverse_property = gi_tiene_li
#Metodo Grupo_investigacion
def relation_gi_tiene_li(self,li):
self.gi_tiene_li.append(li)
#Metodo Linea_investigacion
def relation_li_pertenece_gi(self,gi):
self.li_pertenece_gi.append(gi)
#OP
class gi_tiene_investigador (ObjectProperty):
domain = [Grupo_investigacion]
range = [Investigador]
inverse_property = investigador_pertenece_gi
class investigador_pertenece_gi (ObjectProperty):
domain = [Investigador]
range = [Grupo_investigacion]
inverse_property = gi_tiene_investigador
#Metodo Grupo_investigacion
def relation_gi_tiene_investigador(self,investigador):
self.gi_tiene_investigador.append(investigador)
#Metodo Investigador
def relation_investigador_pertenece_gi(self,gi):
self.investigador_pertenece_gi.append(gi)
#OP
class gi_tiene_docente (ObjectProperty):
domain = [Grupo_investigacion]
range = [Docente]
inverse_property = docente_pertenece_gi
class docente_pertenece_gi (ObjectProperty):
domain = [Docente]
range = [Grupo_investigacion]
inverse_property = gi_tiene_docente
#Metodo Grupo_investigacion
def relation_gi_tiene_docente(self,docente):
self.gi_tiene_docente.append(docente)
#Metodo Docente
def relation_docente_pertenece_gi(self,gi):
self.docente_pertenece_gi.append(gi)
#OP
class gi_tiene_estudiante (ObjectProperty):
domain = [Grupo_investigacion]
range = [Estudiante]
inverse_property = estudiante_pertenece_gi
class estudiante_pertenece_gi (ObjectProperty):
domain = [Estudiante]
range = [Grupo_investigacion]
inverse_property = gi_tiene_estudiante
#Metodo Grupo_investigacion
def relation_gi_tiene_estudiante(self,estudiante):
self.gi_tiene_estudiante.append(estudiante)
#Metodo Estudiante
def relation_estudiante_pertenece_gi(self,gi):
self.estudiante_pertenece_gi.append(gi)
#OP
class li_tiene_pi (ObjectProperty):
domain = [Linea_investigacion]
range = [Proyecto_investigacion]
inverse_property = pi_pertenece_li
class pi_pertenece_li (ObjectProperty):
domain = [Proyecto_investigacion]
range = [Linea_investigacion]
inverse_property = li_tiene_pi
#Metodo Linea_investigacion
def relation_li_tiene_pi(self,pi):
self.li_tiene_pi.append(pi)
#Metodo Proyecto_investigacion
def relation_pi_pertenece_li(self,li):
self.pi_pertenece_li.append(li)
#OP
class viis_tiene_convocatoria (ObjectProperty):
domain = [VIIS]
range = [Convocatoria]
inverse_property = convocatoria_pertenece_viis
class convocatoria_pertenece_viis (ObjectProperty):
domain = [Convocatoria]
range = [VIIS]
inverse_property = viis_tiene_convocatoria
#Metodo VIIS
def relation_viis_tiene_convocatoria(self,convocatoria):
self.viis_tiene_convocatoria.append(convocatoria)
#Metodo Convocatoria
def relation_convocatoria_pertenece_viis(self,viis):
self.convocatoria_pertenece_viis.append(viis)
#OP
class viis_tiene_investigador (ObjectProperty):
domain = [VIIS]
range = [Investigador]
inverse_property = investigador_pertenece_viis
class investigador_pertenece_viis (ObjectProperty):
domain = [Investigador]
range = [VIIS]
inverse_property = viis_tiene_investigador
#Metodo VIIS
def relation_viis_tiene_investigador(self,investigador):
self.viis_tiene_investigador.append(investigador)
#Metodo Investigador
def relation_investigador_pertenece_viis(self,viis):
self.investigador_pertenece_viis.append(viis)
#OP
class viis_adscribe_gi (ObjectProperty):
domain = [VIIS]
range = [Grupo_investigacion]
inverse_property = gi_esta_adscrito_viis
class gi_esta_adscrito_viis (ObjectProperty):
domain = [Grupo_investigacion]
range = [VIIS]
inverse_property = viis_adscribe_gi
#Metodo VIIS
def relation_viis_adscribe_gi(self,gi):
self.viis_adscribe_gi.append(gi)
#Metodo Grupo_investigacion
def relation_gi_esta_adscrito_viis(self,viis):
self.gi_esta_adscrito_viis.append(viis)
#OP
class viis_tiene_pi (ObjectProperty):
domain = [VIIS]
range = [Proyecto_investigacion]
inverse_property = pi_pertenece_viis
class pi_pertenece_viis (ObjectProperty):
domain = [Proyecto_investigacion]
range = [VIIS]
inverse_property = viis_tiene_pi
#Metodo VIIS
def relation_viis_tiene_pi(self,pi):
self.viis_tiene_pi.append(pi)
#Metodo Proyecto_investigacion
def relation_pi_pertenece_viis(self,viis):
self.pi_pertenece_viis.append(viis)
#OP
class convocatoria_tiene_pi (ObjectProperty):
domain = [Convocatoria]
range = [Proyecto_investigacion]
inverse_property = pi_pertenece_convocatoria
class pi_pertenece_convocatoria (ObjectProperty):
domain = [Proyecto_investigacion]
range = [Convocatoria]
inverse_property = convocatoria_tiene_pi
#Metodo Convocatoria
def relation_convocatoria_tiene_pi(self,pi):
self.convocatoria_tiene_pi.append(pi)
#Metodo Proyecto_investigacion
def relation_pi_pertenece_convocatoria(self,convocatoria):
self.pi_pertenece_convocatoria.append(convocatoria)
#OP
class convocatoria_dirigida_investigador (ObjectProperty):
domain = [Convocatoria]
range = [Investigador]
inverse_property = investigador_se_encuentra_convocatoria
class investigador_se_encuentra_convocatoria (ObjectProperty):
domain = [Investigador]
range = [Convocatoria]
inverse_property = convocatoria_dirigida_investigador
#Metodo Convocatoria
def relation_convocatoria_dirigida_investigador(self,investigador):
self.convocatoria_dirigida_investigador.append(investigador)
#Metodo Investigador
def relation_investigador_se_encuentra_convocatoria(self,convocatoria):
self.investigador_se_encuentra_convocatoria.append(convocatoria)
#OP
class estudiante_es_autor_pi (ObjectProperty):
domain = [Estudiante]
range = [Proyecto_investigacion]
inverse_property = pi_tiene_autor_estudiante
class pi_tiene_autor_estudiante (ObjectProperty):
domain = [Proyecto_investigacion]
range = [Estudiante]
inverse_property = estudiante_es_autor_pi
#Metodo Estudiante
def relation_estudiante_es_autor_pi(self,pi):
self.estudiante_es_autor_pi.append(pi)
#Metodo Proyecto_investigacion
def relation_pi_tiene_autor_estudiante(self,estudiante):
self.pi_tiene_autor_estudiante.append(estudiante)
#OP
class docente_es_autor_pi (ObjectProperty):
domain = [Docente]
range = [Proyecto_investigacion]
inverse_property = pi_tiene_autor_docente
class pi_tiene_autor_docente (ObjectProperty):
domain = [Proyecto_investigacion]
range = [Docente]
inverse_property = docente_es_autor_pi
#Metodo Docente
def relation_docente_es_autor_pi(self,pi):
self.docente_es_autor_pi.append(pi)
#Metodo Proyecto_investigacion
def relation_pi_tiene_autor_docente(self,docente):
self.pi_tiene_autor_docente.append(docente)
#OP
class docente_asesora_pi (ObjectProperty):
domain = [Docente]
range = [Proyecto_investigacion]
inverse_property = pi_es_asesorado_docente
class pi_es_asesorado_docente (ObjectProperty):
domain = [Proyecto_investigacion]
range = [Docente]
inverse_property = docente_asesora_pi
#Metodo Docente
def relation_docente_asesora_pi(self,pi):
self.docente_asesora_pi.append(pi)
#Metodo Proyecto_investigacion
def relation_pi_es_asesorado_docente(self,docente):
self.pi_es_asesorado_docente.append(docente)
#OP
class ie_es_autor_pi(ObjectProperty):
domain = [Investigador_externo]
range = [Proyecto_investigacion]
inverse_propety = pi_tiene_autor_ie
class pi_tiene_autor_ie(ObjectProperty):
domain = [Proyecto_investigacion]
range = [Investigador_externo]
inverse_propety = ie_es_autor_pi
#Metodo Investigador_externo
def relation_ie_es_autor_pi(self,pi):
self.ie_es_autor_pi.append(pi)
#Metodo Proyecto_investigación
def relation_pi_tiene_autor_ie(self,ie):
self.pi_tiene_autor_ie.append(ie)
#OP
class investigador_es_docente (ObjectProperty):
domain = [Investigador]
range = [Docente]
inverse_property = docente_es_investigador
class docente_es_investigador (ObjectProperty):
domain = [Docente]
range = [Investigador]
inverse_property = investigador_es_docente
#Metodo Investigador
def relation_investigador_es_docente(self,docente):
self.investigador_es_docente.append(docente)
#Metodo Docente
def relation_docente_es_investigador(self,investigador):
self.docente_es_investigador.append(investigador)
#OP
class investigador_es_estudiante (ObjectProperty):
domain = [Investigador]
range = [Estudiante]
inverse_property = estudiante_es_investigador
class estudiante_es_investigador (ObjectProperty):
domain = [Estudiante]
range = [Investigador]
inverse_property = investigador_es_estudiante
#Metodo Investigador
def relation_investigador_es_estudiante(self,estudiante):
self.investigador_es_estudiante.append(estudiante)
#Metodo Estudiante
def relation_estudiante_es_investigador(self,investigador):
self.estudiante_es_investigador.append(investigador)
#OP
class investigador_es_ie (ObjectProperty):
domain = [Investigador]
range = [Investigador_externo]
inverse_property = ie_es_investigador
class ie_es_investigador (ObjectProperty):
domain = [Investigador_externo]
range = [Investigador]
inverse_property = investigador_es_ie
#Metodo Investigador
def relation_investigador_es_ie(self,ie):
self.investigador_es_ie.append(ie)
#Metodo Investigador_externo
def relation_ie_es_investigador(self,investigador):
self.ie_es_investigador.append(investigador)
#OP
class palabra_sinonimo_palabra (ObjectProperty):
domain = [Palabra]
range = [Palabra]
inverse_property = palabra_sinonimo_palabra
#Metodo Palabra
def relation_palabra_sinonimo_palabra(self,palabra):
self.palabra_sinonimo_palabra.append(palabra)
#OP
class palabra_conecta_palabra (ObjectProperty):
domain = [Palabra]
range = [Palabra]
inverse_property = palabra_conecta_palabra
#Metodo Palabra
def relation_palabra_conecta_palabra(self,palabra):
self.palabra_conecta_palabra.append(palabra)
###SE ESTA PROBANDO POR AHORA CON ESTE OP
#OP
class pi_tiene_palabra(ObjectProperty):
domain = [Proyecto_investigacion]
range = [Palabra]
class palabra_describe_pi(ObjectProperty):
domain = [Palabra]
range = [Proyecto_investigacion]
inverse_property = pi_tiene_palabra
#Metodo Proyecto_investigacion
def relation_pi_tiene_palabra(self,palabra):
self.pi_tiene_palabra.append(palabra)
#Metodo Palabra
def relation_palabra_describe_pi(self,pi):
self.palabra_describe_pi.append(pi)
""" |
bmorris3/asteroid | asteroid/models/zenodo.py | import os
import json
import requests
from io import BufferedReader, BytesIO
import torch
class Zenodo(object):
""" Faciliate Zenodo's REST API.
Args:
api_key (str): Access token generated to upload depositions.
use_sandbox (bool): Whether to use the sandbox (default: True)
Note that `api_key` are different in sandbox.
Methods (all methods return the requests response):
create_new_deposition
change_metadata_in_deposition,
upload_new_file_to_deposition
publish_deposition
get_deposition
remove_deposition
remove_all_depositions
Notes:
A Zenodo record is something that is public and cannot be deleted.
A Zenodo deposit has not yet been published, is private and can be
deleted.
"""
def __init__(self, api_key, use_sandbox=True):
self.use_sandbox = use_sandbox
if use_sandbox is True:
self.zenodo_address = 'https://sandbox.zenodo.org'
else:
self.zenodo_address = 'https://zenodo.org'
self.api_key = api_key
self.auth_header = {'Authorization': f"Bearer {self.api_key}"}
self.headers = {"Content-Type": "application/json",
'Authorization': f"Bearer {self.api_key}"}
def create_new_deposition(self, metadata=None):
""" Creates a new deposition.
Args:
metadata (dict, optional): Metadata dict to upload on the new
deposition.
"""
r = requests.post(
f'{self.zenodo_address}/api/deposit/depositions',
json={}, headers=self.headers
)
if r.status_code != 201:
print("Creation failed (status code: {})".format(r.status_code))
return r
if metadata is not None and isinstance(metadata, dict):
return self.change_metadata_in_deposition(r.json()["id"], metadata)
else:
print(f"Could not interpret metadata type ({type(metadata)}), "
"expected dict")
return r
def change_metadata_in_deposition(self, dep_id, metadata):
""" Set or replace metadata in given deposition
Args:
dep_id (int): deposition id. You cna get it with
`r = create_new_deposition(); dep_id = r.json()['id']`
metadata (dict): Metadata dict.
Examples:
metadata = {
'title': 'My first upload',
'upload_type': 'poster',
'description': 'This is my first upload',
'creators': [{'name': '<NAME>',
'affiliation': 'Zenodo'}]
}
"""
data = {"metadata": metadata}
r = requests.put(
f'{self.zenodo_address}/api/deposit/depositions/{dep_id}',
data=json.dumps(data), headers=self.headers
)
return r
def upload_new_file_to_deposition(self, dep_id, file, name=None):
""" Upload one file to existing deposition.
Args:
dep_id (int): deposition id. You cna get it with
`r = create_new_deposition(); dep_id = r.json()['id']`
file (str or io.BufferedReader): path to a file, or already opened
file (path prefered).
name (str, optional): name given to the uploaded file.
Defaults to the path.
(More: https://developers.zenodo.org/#deposition-files)
"""
if isinstance(file, BufferedReader):
files = {'file': file}
filename = name if name else "Unknown"
elif isinstance(file, str):
if os.path.isfile(file):
# This is a file, read it
files = {'file': open(os.path.expanduser(file), 'rb')}
filename = name if name else os.path.basename(file)
else:
# This is a string, convert to BytesIO
files = {'file': BytesIO(bytes(file, 'utf-8'))}
filename = name if name else "Unknown"
else:
raise ValueError('Unknown file format , expected str or Bytes ')
data = {"name": filename}
print("Submitting Data: {} and Files: {}".format(data, files))
r = requests.post(
f'{self.zenodo_address}/api/deposit/depositions/{dep_id}/files',
headers=self.auth_header, data=data,
files=files
)
print("Zenodo received : {}".format(r.content))
return r
def publish_deposition(self, dep_id): # pragma: no cover (Cannot publish)
""" Publish given deposition (Cannot be deleted) !
Args:
dep_id (int): deposition id. You cna get it with
`r = create_new_deposition(); dep_id = r.json()['id']`
"""
r = requests.post(
f'{self.zenodo_address}/api/deposit/depositions/{dep_id}/actions/publish',
headers=self.headers
)
return r
def get_deposition(self, dep_id=-1):
""" Get deposition by deposition id. Get all dep_id is -1 (default)."""
if dep_id > -1:
print(f"Get deposition {dep_id} from Zenodo")
r = requests.get(
f"{self.zenodo_address}/api/deposit/depositions/{dep_id}",
headers=self.headers
)
else:
print("Get all depositions from Zenodo")
r = requests.get(
f"{self.zenodo_address}/api/deposit/depositions",
headers=self.headers
)
print("Get Depositions: Status Code: {}".format(r.status_code))
return r
def remove_deposition(self, dep_id):
""" Remove deposition with deposition id `dep_id`"""
print(f'Delete deposition number {dep_id}')
r = requests.delete(
f'{self.zenodo_address}/api/deposit/depositions/{dep_id}',
headers=self.auth_header
)
return r
def remove_all_depositions(self):
""" Removes all unpublished deposition (not records)."""
all_depositions = self.get_deposition()
for dep in all_depositions.json():
self.remove_deposition(dep["id"])
# Probably remove that.
# sandbox_asteroid_url = 'https://sandbox.zenodo.org/deposit/new?c=asteroid-models'
# zenodo_asteroid_url = 'https://zenodo.org/deposit/new?c=asteroid-models'
class AsteroidZenodo(Zenodo):
REQUIRED_KEYS = []
def share_model(self, model_path):
# Load model_path
model = torch.load(model_path)
# Assert all keys are there
if not all(k in model.keys() for k in self.REQUIRED_KEYS):
missing = [k for k in self.REQUIRED_KEYS if k not in model.keys()]
raise ValueError(f"Expected all keys {self.REQUIRED_KEYS} but "
f"{missing} were missing.")
|
bmorris3/asteroid | asteroid/data/musdb18_dataset.py | <filename>asteroid/data/musdb18_dataset.py
from pathlib import Path
import torch.utils.data
import random
import torch
import tqdm
import soundfile as sf
class MUSDB18Dataset(torch.utils.data.Dataset):
"""MUSDB18 music separation dataset
The dataset consists of 150 full lengths music tracks (~10h duration) of
different genres along with their isolated stems:
`drums`, `bass`, `vocals` and `others`.
Out-of-the-box, asteroid does only support MUSDB18-HQ which comes as
uncompressed WAV files. To use the MUSDB18, please convert it to WAV first:
MUSDB18 HQ: https://zenodo.org/record/3338373
MUSDB18 https://zenodo.org/record/1117372
Note: The datasets are hosted on Zenodo and require that users
request access, since the tracks can only be used for
academic purposes. We manually check this requests.
This dataset asssumes music tracks in (sub)folders where each folder
has a fixed number of sources (defaults to 4). For each track, a list
of `sources` and a common `suffix` can be specified.
A linear mix is performed on the fly by summing up the sources
Due to the fact that all tracks comprise the exact same set
of sources, random track mixing can be used can be used,
where sources from different tracks are mixed together.
Folder Structure:
train/1/vocals.wav ---------------\
train/1/drums.wav -----------------+--> input (mix), output[target]
train/1/bass.wav ------------------|
train/1/other.wav ----------------/
Args:
root (str): Root path of dataset
sources (:obj:`list` of :obj:`str`, optional): List of source names.
Defaults to MUSDB18 4 stem scenario: `vocals`, `drums`, `bass`, `other`.
suffix (str, optional): Filename suffix, defaults to `.wav`.
split (str, optional): Dataset subfolder, defaults to `train`.
subset (:obj:`list` of :obj:`str`, optional): Selects a specific of
list of tracks to be loaded, defaults to `None` (loads all tracks).
segment (float, optional): Duration of segments in seconds,
defaults to ``None`` which loads the full-length audio tracks.
samples_per_track (int, optional):
Number of samples yielded from each track, can be used to increase
dataset size, defaults to `1`.
random_segments (boolean, optional): Enables random offset for track segments.
random_track_mix boolean: enables mixing of random sources from
different tracks to assemble mix.
source_augmentations (:obj:`list` of `obj`:`callable`):
list of augmentation function names,
defaults to no-op augmentations (input = output)
sample_rate (int, optional): Samplerate of files in dataset.
Attributes:
root (str): Root path of dataset
sources (:obj:`list` of :obj:`str`, optional): List of source names.
Defaults to MUSDB18 4 stem scenario: `vocals`, `drums`, `bass`, `other`.
suffix (str, optional): Filename suffix, defaults to `.wav`.
split (str, optional): Dataset subfolder, defaults to `train`.
subset (:obj:`list` of :obj:`str`, optional): Selects a specific of
list of tracks to be loaded, defaults to `None` (loads all tracks).
segment (float, optional): Duration of segments in seconds,
defaults to ``None`` which loads the full-length audio tracks.
samples_per_track (int, optional):
Number of samples yielded from each track, can be used to increase
dataset size, defaults to `1`.
random_segments (boolean, optional): Enables random offset for track segments.
random_track_mix boolean: enables mixing of random sources from
different tracks to assemble mix.
source_augmentations (:obj:`list` of `obj`:`callable`):
list of augmentation function names,
defaults to no-op augmentations (input = output)
sample_rate (int, optional): Samplerate of files in dataset.
tracks (:obj:`list` of :obj:`Dict`): List of track metadata
"""
dataset_name = 'MUSDB18'
def __init__(self,
root,
sources=['vocals', 'bass', 'drums', 'other'],
suffix='.wav',
split='train',
subset=None,
segment=None,
samples_per_track=1,
random_segments=False,
random_track_mix=False,
source_augmentations=lambda audio: audio,
sample_rate=44100):
self.root = Path(root).expanduser()
self.split = split
self.sample_rate = sample_rate
self.segment = segment
self.random_track_mix = random_track_mix
self.random_segments = random_segments
self.source_augmentations = source_augmentations
self.sources = sources
self.suffix = suffix
self.subset = subset
self.samples_per_track = samples_per_track
self.tracks = list(self.get_tracks())
if not self.tracks:
raise RuntimeError("No tracks found.")
def __getitem__(self, index):
# assemble the mixture of target and interferers
audio_sources = {}
# get track_id
track_id = index // self.samples_per_track
if self.random_segments:
start = random.uniform(
0, self.tracks[track_id]['min_duration'] - self.segment
)
else:
start = 0
# load sources
for source in self.sources:
# optionally select a random track for each source
if self.random_track_mix:
# load a different track
track_id = random.choice(range(len(self.tracks)))
if self.random_segments:
start = random.uniform(
0, self.tracks[track_id]['min_duration'] - self.segment
)
# loads the full track duration
start_sample = int(start * self.sample_rate)
# check if dur is none
if self.segment:
# stop in soundfile is calc in samples, not seconds
stop_sample = start_sample + int(
self.segment * self.sample_rate
)
else:
# set to None for reading complete file
stop_sample = None
# load actual audio
audio, _ = sf.read(
Path(
self.tracks[track_id]['path'] / source
).with_suffix(self.suffix),
always_2d=True,
start=start_sample,
stop=stop_sample
)
# convert to torch tensor
audio = torch.tensor(audio.T, dtype=torch.float)
# apply source-wise augmentations
audio = self.source_augmentations(audio)
audio_sources[source] = audio
# apply linear mix over source index=0
audio_mix = torch.stack(list(audio_sources.values())).sum(0)
return audio_mix, audio_sources
def __len__(self):
return len(self.tracks) * self.samples_per_track
def get_tracks(self):
"""Loads input and output tracks"""
p = Path(self.root, self.split)
for track_path in tqdm.tqdm(p.iterdir()):
if track_path.is_dir():
if self.subset and track_path.stem not in self.subset:
# skip this track
continue
source_paths = [
track_path / (s + self.suffix) for s in self.sources
]
if not all(sp.exists() for sp in source_paths):
print(
"Exclude track due to non-existing source",
track_path
)
continue
# get metadata
infos = list(map(sf.info, source_paths))
if not all(
i.samplerate == self.sample_rate for i in infos
):
print(
"Exclude track due to different sample rate ",
track_path
)
continue
if self.segment is not None:
# get minimum duration of track
min_duration = min(i.duration for i in infos)
if min_duration > self.segment:
yield({
'path': track_path,
'min_duration': min_duration
})
else:
yield({
'path': track_path,
'min_duration': None
})
def get_infos(self):
""" Get dataset infos (for publishing models).
Returns:
dict, dataset infos with keys `dataset`, `task` and `licences`.
"""
infos = dict()
infos['dataset'] = self.dataset_name
infos['task'] = 'enhancement'
infos['licenses'] = [musdb_license]
return infos
musdb_license = dict(
)
|
bmorris3/asteroid | asteroid/__init__.py | <filename>asteroid/__init__.py
import pathlib
from .utils import deprecation_utils, torch_utils
from .models import ConvTasNet, DPRNNTasNet
project_root = str(pathlib.Path(__file__).expanduser().absolute().parent.parent)
__version__ = '0.2.1'
|
bmorris3/asteroid | tests/models/publish_test.py | import os
import json
import shutil
from asteroid.models import save_publishable, upload_publishable, ConvTasNet
from asteroid.data import WhamDataset
def populate_wham_dir(path):
wham_files = ['s1', 's2', 'noise', 'mix_single', 'mix_clean', 'mix_both']
os.makedirs(path, exist_ok=True)
for source in wham_files:
json_file = os.path.join(path, source + '.json')
with open(os.path.join(json_file), 'w') as f:
json.dump(dict(), f)
def test_upload():
# Make dirs
os.makedirs('tmp/publish_dir', exist_ok=True)
populate_wham_dir('tmp/wham')
# Dataset and NN
train_set = WhamDataset('tmp/wham', task='sep_clean')
model = ConvTasNet(n_src=2, n_repeats=2, n_blocks=2, bn_chan=16,
hid_chan=4, skip_chan=8, n_filters=32)
# Save publishable
model_conf = model.serialize()
model_conf.update(train_set.get_infos())
save_publishable('tmp/publish_dir', model_conf, metrics={}, train_conf={})
if False:
# Upload
zen, current = upload_publishable(
'tmp/publish_dir',
uploader="<NAME>",
affiliation="INRIA",
use_sandbox=True,
unit_test=True, # Remove this argument and monkeypatch `input()`
)
# Assert metadata is correct
meta = current.json()['metadata']
assert meta['creators'][0]['name'] == "<NAME>"
assert meta['creators'][0]['affiliation'] == "INRIA"
assert 'asteroid-models' in [d['identifier'] for d in meta['communities']]
# Clean up
zen.remove_deposition(current.json()['id'])
shutil.rmtree('tmp/wham')
if __name__ == '__main__':
test_upload() |
UND-ARC/IPCam | tools/autoaspfinder.py | # coding: utf-8
import sys
import os
FILENAME = sys.argv[1]
with open(FILENAME, 'r') as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
asplines = [line for line in lines if 'asp' in line]
aspparts = [line.split(' ') for line in asplines]
asps = []
for asppart in aspparts:
has_asp = list(map(lambda q: 'asp' in q, asppart))
idx = has_asp.index(True)
part = asppart[idx]
subparts = part.split("'")
has_asp = list(map(lambda q: 'asp' in q, subparts))
idx = has_asp.index(True)
asp = subparts[idx]
asps.append(asp)
for asp in asps:
print(asp)
|
iakoul17/runwayML | test_video.py | """Test pre-trained RGB model on a single video.
Date: 01/15/18
Authors: <NAME> and <NAME>
This script accepts an mp4 video as the command line argument --video_file
and averages ResNet50 (trained on Moments) predictions on num_segment equally
spaced frames (extracted using ffmpeg).
Alternatively, one may instead provide the path to a directory containing
video frames saved as jpgs, which are sorted and forwarded through the model.
ResNet50 trained on Moments is used to predict the action for each frame,
and these class probabilities are average to produce a video-level predction.
Optionally, one can generate a new video --rendered_output from the frames
used to make the prediction with the predicted category in the top-left corner.
"""
import os
import runway
from runway.data_types import number, text, image
from example_model import ExampleModel
#import moviepy.editor as mpy
import torch.optim
import torch.nn.parallel
from torch.nn import functional as F
from pytube import YouTube #for youtube videos
import cv2 # for capturing videos
import math # for mathematical operations
import numpy as np
import matplotlib.pyplot as plt # for plotting the images
import pandas as pd
from PIL import Image, ImageStat
import models
from utils import extract_frames, load_frames, render_frames
# options
videoPath = './tmp'
imgPath = './tmp/frames'
os.makedirs(videoPath, exist_ok=True)
os.makedirs(imgPath, exist_ok=True)
arch = 'resnet3d50'
start = 10
end = 30
video_hash = 'Y6m6DYJ7RW8'
# Load model
@runway.setup
def setup():
return models.load_model(arch)
# Get dataset categories
categories = models.load_categories()
# Load the video frame transform
transform = models.load_transform()
@runway.command('classify', inputs={ 'video': text() }, outputs={ 'label': text() })
def classify(model, input):
yt = YouTube('https://youtube.com/embed/%s?start=%d&end=%d' % (input['video'], start, end))
video = yt.streams.all()[0]
video_file = video.download(videoPath)
num_segments = 16
print('Extracting frames using ffmpeg...')
frames = extract_frames(video_file, num_segments)
# Prepare input tensor
input = torch.stack([transform(frame) for frame in frames], 1).unsqueeze(0)
# Make video prediction
with torch.no_grad():
logits = model(input)
h_x = F.softmax(logits, 1).mean(dim=0)
probs, idx = h_x.sort(0, True)
# Output the prediction.
return categories[idx[0]]
""" print('RESULT ON ' + video_file)
for i in range(0, 5):
print('{:.3f} -> {}'.format(probs[i], categories[idx[i]])) """
# Render output frames with prediction text.
""" prediction = categories[idx[0]]
rendered_frames = render_frames(frames, prediction)
clip = mpy.ImageSequenceClip(rendered_frames, fps=4)
clip.write_videofile(args.rendered_output) """
|
fxyu/pygraphviz-1.3.1-win64Patch | pygraphviz/tests/test_clear.py | <reponame>fxyu/pygraphviz-1.3.1-win64Patch
# -*- coding: utf-8 -*-
from nose.tools import *
import pygraphviz as pgv
def test_del():
A = pgv.AGraph()
A.add_node(1,foo='bar')
# For some reasons after porting to Python 3 clear often cause infinite loop
A.delete_node('1')
assert_equal(len(A), 0)
def test_clear_node_with_attributes():
A = pgv.AGraph()
A.add_node(1,foo='bar')
# For some reasons after porting to Python 3 clear often cause infinite loop
A.clear()
assert_equal(len(A), 0)
assert_equal(A.nodes(), [])
assert_equal(A.node_attr.keys(), [])
def test_clear_graph_attributes():
A = pgv.AGraph()
A.add_node(1,foo='bar')
A.graph_attr.update(landscape='true',ranksep='0.1')
# For some reasons after porting to Python 3 clear often cause infinite loop
A.clear()
assert_equal(len(A), 0)
assert_equal(A.nodes(), [])
assert_equal(A.node_attr.keys(), [])
assert_equal(A.graph_attr.keys(), []) |
fxyu/pygraphviz-1.3.1-win64Patch | pygraphviz/version.py | <gh_stars>0
"""
Version information for PyGraphviz, created during installation.
Do not add this file to the repository.
"""
__version__ = '1.3.1'
__revision__ = None
__date__ = 'Sun Sep 6 07:49:58 2015'
|
fxyu/pygraphviz-1.3.1-win64Patch | pygraphviz/graphviz.py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_graphviz', [dirname(__file__)])
except ImportError:
import _graphviz
return _graphviz
if fp is not None:
try:
_mod = imp.load_module('_graphviz', fp, pathname, description)
finally:
fp.close()
return _mod
_graphviz = swig_import_helper()
del swig_import_helper
else:
import _graphviz
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def agopen(*args):
return _graphviz.agopen(*args)
agopen = _graphviz.agopen
def agraphnew(name,strict=False,directed=False):
if strict:
if directed:
return _graphviz.agopen(name,cvar.Agstrictdirected,None)
else:
return _graphviz.agopen(name,cvar.Agstrictundirected,None)
else:
if directed:
return _graphviz.agopen(name,cvar.Agdirected,None)
else:
return _graphviz.agopen(name,cvar.Agundirected,None)
def agclose(*args):
return _graphviz.agclose(*args)
agclose = _graphviz.agclose
def agread(*args):
return _graphviz.agread(*args)
agread = _graphviz.agread
def agwrite(*args):
return _graphviz.agwrite(*args)
agwrite = _graphviz.agwrite
def agisundirected(*args):
return _graphviz.agisundirected(*args)
agisundirected = _graphviz.agisundirected
def agisdirected(*args):
return _graphviz.agisdirected(*args)
agisdirected = _graphviz.agisdirected
def agisstrict(*args):
return _graphviz.agisstrict(*args)
agisstrict = _graphviz.agisstrict
def agnode(*args):
return _graphviz.agnode(*args)
agnode = _graphviz.agnode
def agidnode(*args):
return _graphviz.agidnode(*args)
agidnode = _graphviz.agidnode
def agsubnode(*args):
return _graphviz.agsubnode(*args)
agsubnode = _graphviz.agsubnode
def agfstnode(*args):
return _graphviz.agfstnode(*args)
agfstnode = _graphviz.agfstnode
def agnxtnode(*args):
return _graphviz.agnxtnode(*args)
agnxtnode = _graphviz.agnxtnode
def aglstnode(*args):
return _graphviz.aglstnode(*args)
aglstnode = _graphviz.aglstnode
def agprvnode(*args):
return _graphviz.agprvnode(*args)
agprvnode = _graphviz.agprvnode
def agedge(*args):
return _graphviz.agedge(*args)
agedge = _graphviz.agedge
def agidedge(*args):
return _graphviz.agidedge(*args)
agidedge = _graphviz.agidedge
def agsubedge(*args):
return _graphviz.agsubedge(*args)
agsubedge = _graphviz.agsubedge
def agfstin(*args):
return _graphviz.agfstin(*args)
agfstin = _graphviz.agfstin
def agnxtin(*args):
return _graphviz.agnxtin(*args)
agnxtin = _graphviz.agnxtin
def agfstout(*args):
return _graphviz.agfstout(*args)
agfstout = _graphviz.agfstout
def agnxtout(*args):
return _graphviz.agnxtout(*args)
agnxtout = _graphviz.agnxtout
def agfstedge(*args):
return _graphviz.agfstedge(*args)
agfstedge = _graphviz.agfstedge
def agnxtedge(*args):
return _graphviz.agnxtedge(*args)
agnxtedge = _graphviz.agnxtedge
def aghead(*args):
return _graphviz.aghead(*args)
aghead = _graphviz.aghead
def agtail(*args):
return _graphviz.agtail(*args)
agtail = _graphviz.agtail
def agattr(*args):
return _graphviz.agattr(*args)
agattr = _graphviz.agattr
def agattrsym(*args):
return _graphviz.agattrsym(*args)
agattrsym = _graphviz.agattrsym
def agnxtattr(*args):
return _graphviz.agnxtattr(*args)
agnxtattr = _graphviz.agnxtattr
def agget(*args):
return _graphviz.agget(*args)
agget = _graphviz.agget
def agxget(*args):
return _graphviz.agxget(*args)
agxget = _graphviz.agxget
def agset(*args):
return _graphviz.agset(*args)
agset = _graphviz.agset
def agxset(*args):
return _graphviz.agxset(*args)
agxset = _graphviz.agxset
def agsafeset(*args):
return _graphviz.agsafeset(*args)
agsafeset = _graphviz.agsafeset
def agattrname(*args):
return _graphviz.agattrname(*args)
agattrname = _graphviz.agattrname
def agattrdefval(*args):
return _graphviz.agattrdefval(*args)
agattrdefval = _graphviz.agattrdefval
def agsafeset_label(*args):
return _graphviz.agsafeset_label(*args)
agsafeset_label = _graphviz.agsafeset_label
def agattr_label(*args):
return _graphviz.agattr_label(*args)
agattr_label = _graphviz.agattr_label
def agsubg(*args):
return _graphviz.agsubg(*args)
agsubg = _graphviz.agsubg
def agfstsubg(*args):
return _graphviz.agfstsubg(*args)
agfstsubg = _graphviz.agfstsubg
def agnxtsubg(*args):
return _graphviz.agnxtsubg(*args)
agnxtsubg = _graphviz.agnxtsubg
def agparent(*args):
return _graphviz.agparent(*args)
agparent = _graphviz.agparent
def agroot(*args):
return _graphviz.agroot(*args)
agroot = _graphviz.agroot
def agdelsubg(*args):
return _graphviz.agdelsubg(*args)
agdelsubg = _graphviz.agdelsubg
def agnnodes(*args):
return _graphviz.agnnodes(*args)
agnnodes = _graphviz.agnnodes
def agnedges(*args):
return _graphviz.agnedges(*args)
agnedges = _graphviz.agnedges
def agdegree(*args):
return _graphviz.agdegree(*args)
agdegree = _graphviz.agdegree
def agraphof(*args):
return _graphviz.agraphof(*args)
agraphof = _graphviz.agraphof
def agnameof(*args):
return _graphviz.agnameof(*args)
agnameof = _graphviz.agnameof
def agdelnode(*args):
return _graphviz.agdelnode(*args)
agdelnode = _graphviz.agdelnode
def agdeledge(*args):
return _graphviz.agdeledge(*args)
agdeledge = _graphviz.agdeledge
def agnameof(handle):
name=_graphviz.agnameof(handle)
if name is None:
return None
if name==b'' or name.startswith(b'%'):
return None
else:
return name
AGRAPH = _graphviz.AGRAPH
AGNODE = _graphviz.AGNODE
AGOUTEDGE = _graphviz.AGOUTEDGE
AGINEDGE = _graphviz.AGINEDGE
AGEDGE = _graphviz.AGEDGE
# This file is compatible with both classic and new-style classes.
cvar = _graphviz.cvar
Agdirected = cvar.Agdirected
Agstrictdirected = cvar.Agstrictdirected
Agundirected = cvar.Agundirected
Agstrictundirected = cvar.Agstrictundirected
|
fxyu/pygraphviz-1.3.1-win64Patch | pygraphviz/tests/test_readwrite.py | # -*- coding: utf-8 -*-
from nose.tools import *
import pygraphviz as pgv
def test_readwrite():
A = pgv.AGraph(name='test graph')
A.add_path([1,2,3,4,5,6,7,8,9,10])
#FIXME
# >>> (fd,fname)=tempfile.mkstemp()
# A.write(fname)
# A.read(fname)
# assert_equal(B, AGraph(fname))
# assert_true(B == A)
# assert_false(B is A)
# >>> os.close(fd)
# >>> os.unlink(fname)
|
fxyu/pygraphviz-1.3.1-win64Patch | pygraphviz/tests/test_html.py | # -*- coding: utf-8 -*-
from nose.tools import *
import pygraphviz as pgv
from os import linesep
def test_html():
G = pgv.AGraph(label='<Hello<BR/>Graph>')
G.add_node('a', label='<Hello<BR/>Node>')
s = G.add_subgraph('b', label='<Hello<BR/>Subgraph>')
s.add_node('sa', label='<Hello<BR/>Subgraph Node b>')
G.add_edge('a','b', label='<Hello<BR/>Edge>')
assert_equal(G.string().expandtabs(2),
"""strict graph {
graph [label=<Hello<BR/>Graph>];
node [label="\\N"];
{
graph [label=<Hello<BR/>Subgraph>];
sa [label=<Hello<BR/>Subgraph Node b>];
}
a [label=<Hello<BR/>Node>];
a -- b [label=<Hello<BR/>Edge>];
}
""".replace('\n', linesep))
|
tullurivijay/Machine_Learning_basics | data_preprocessing.py | # Data Preprocessing Template
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Data.csv')
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, 3].values
# Taking care of Missing Data
from sklearn.impute import SimpleImputer
imp_mean = SimpleImputer(missing_values= np.nan, strategy='mean')
imp_mean.fit(X[:,1:3])
X[:,1:3]= imp_mean.transform(X[:,1:3])
print(X[:,1:3])
# Encoding Categorical Data
# This is encoding for countries becuse machine learning models take only number but not categories that's
#reason why we are encoding, as we see in the output, we have again one more problem France - 0, Spain - 2
# Germany - 1, where it is considered as 0<1<2 that is not the case here because they are countries but not
# example measurements, sizes like that so we need DUMMY ENCODING THAT IS ONEHOTENCODING THAT SPLITTING THESE INTO 3 COLUMNS AND
# REPRESENTED AS 1 AND 0'S LIKE IF WE ARE IN FRANCE THEN THAT IS 1 AND REMAINING ARE ZEROS
from sklearn.preprocessing import LabelEncoder
LabelEncoder_X = LabelEncoder()
X[:,0]= LabelEncoder_X.fit_transform(X[:,0])
# SO INSTEAD OF USING ABOVE 3 LINES WE ARE USING BELOW CODE
from sklearn.preprocessing import OneHotEncoder
onehotencoder_x = OneHotEncoder(categorical_features=[0])
X = onehotencoder_x.fit_transform(X).toarray()
# As Y is the dependent or target variable, ML model automativcaly know that this is category and need not to do One hot encoding
# It is go to with normal encoding process that is by label Encoder, which is need not to split into columns
LabelEncoder_Y = LabelEncoder()
y = LabelEncoder_Y.fit_transform(y)
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# As all the variables will be on the same scale for example in this salaries and ages, they are not on same scale
# Because of that when we are plotting a graph, the ages will be ignored because age 32 is not on the scale of salary 72000
# that's we need a transformation in order to maintain same scale
# This process is done by STANDARDIZATION AND NORMALIZATION --> Xstad = ((x - mean(x))/std(x)), Xnorm= x-min(x)/ max - min
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
# for the train set we have to do fit and transform and where as we need to only transform as they are already fitted with the object
# When We are scaling these variables, One may ask do we need to scale dummy encoded varaibles because they are already on good scale
# It depends on context, for example here if we scale country variable, we cant tell which observation belongs to which country
# But scaling those encoded variables gives us better predictions and accuray and mostly will between -1 and +1
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
# Here we are not scaling the dependent variable as it is on scale of 0 and 1, mostly this categorical dependent
|
rakibulislam01/Tweetme | tweets/api/serializers.py | from django.utils.timesince import timesince
from rest_framework import serializers
from accounts.api.serializers import UserDisplaySerializer
from tweets.models import Tweet
class TweetModelSerializer(serializers.ModelSerializer):
user = UserDisplaySerializer(read_only=True)
date_display = serializers.SerializerMethodField()
timesince = serializers.SerializerMethodField()
class Meta:
model = Tweet
fields = [
'user',
'content',
'timestamp',
'date_display',
'timesince',
]
def get_date_display(self, obj):
return obj.timestamp.strftime("%b %d, %Y at %I: %M %p")
def get_timesince(self, obj):
return timesince(obj.timestamp)
|
rakibulislam01/Tweetme | hashtags/models.py | from django.db import models
from django.urls import reverse_lazy
from tweets.models import Tweet
class HashTag(models.Model):
tag = models.CharField(max_length=120)
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.tag
def get_absolute_url(self):
return reverse_lazy("hashtag", kwargs={"hashtag": self.tag})
def get_tweets(self):
return Tweet.objects.filter(content__icontains="#" + self.tag)
|
rakibulislam01/Tweetme | tweets/urls.py | from django.urls import path
from django.views.generic.base import RedirectView
from .views import (
TweetListView,
TweetDetailView,
TweetCreateView,
TweetUpdateView,
TweetDeleteView
) # tweet_detail_view, tweet_list_view
app_name = 'tweets'
urlpatterns = [
path('search', TweetListView.as_view(), name='list'),
path('', RedirectView.as_view(url="/")),
path('create/', TweetCreateView.as_view(), name='create'),
path('<int:pk>/', TweetDetailView.as_view(), name='detail'),
path('<int:pk>/update/', TweetUpdateView.as_view(), name='update'),
path('<int:pk>/delete/', TweetDeleteView.as_view(), name='delete'),
]
|
rakibulislam01/Tweetme | tweets/api/views.py | from django.db.models import Q
from rest_framework import generics
from rest_framework import permissions
from tweets.models import Tweet
from .serializers import TweetModelSerializer
from .pagination import StandardResultPagination
class TweetCreateAPIView(generics.CreateAPIView):
serializer_class = TweetModelSerializer
permission_classes = [permissions.IsAuthenticated]
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class TweetListAPIView(generics.ListAPIView):
serializer_class = TweetModelSerializer
pagination_class = StandardResultPagination
def get_queryset(self):
im_following = self.request.user.profile.get_following()
qs1 = Tweet.objects.filter(user__in=im_following)
qs2 = Tweet.objects.filter(user=self.request.user)
qs = (qs1 | qs2).distinct().order_by("-timestamp")
query = self.request.GET.get('q', None)
if query is not None:
qs = qs.filter(
Q(content__icontains=query) |
Q(user__username__icontains=query)
)
return qs
|
rakibulislam01/Tweetme | accounts/migrations/0004_auto_20190723_1149.py | # Generated by Django 2.2.2 on 2019-07-23 05:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20190721_1551'),
]
operations = [
migrations.AlterModelManagers(
name='userprofile',
managers=[
],
),
]
|
rakibulislam01/Tweetme | tweets/api/urls.py | from django.conf.urls import url
from django.urls import path
from django.views.generic.base import RedirectView
from .views import (TweetListAPIView,
TweetCreateAPIView)
app_name = 'tweets-api'
urlpatterns = [
url('list', TweetListAPIView.as_view(), name='list'), # api/tweet/
url('create', TweetCreateAPIView.as_view(), name='create') # api/tweet/
]
|
rakibulislam01/Tweetme | accounts/urls.py | from django.urls import path
from django.views.generic.base import RedirectView
from .views import (
UserDetailView,
UserFollowView
) # tweet_detail_view, tweet_list_view
app_name = 'profiles'
urlpatterns = [
path('<username>/', UserDetailView.as_view(), name='detail'),
path('<username>/follow', UserFollowView.as_view(), name='follow'),
]
|
rakibulislam01/Tweetme | hashtags/admin.py | from django.contrib import admin
from .models import HashTag
admin.site.register(HashTag)
|
rakibulislam01/Tweetme | accounts/models.py | <reponame>rakibulislam01/Tweetme<gh_stars>0
from django.conf import settings
from django.db import models
from django.db.models.signals import post_save
from django.urls import reverse_lazy
class UserProfileManager(models.Manager):
use_for_related_fields = True
def all(self):
qs = self.get_queryset().all()
try:
if self.instance:
qs = qs.exclude(user=self.instance)
except:
pass
return qs
def toggle_follow(self, user, to_toggle_user):
user_profile, created = UserProfile.objects.get_or_create(user=user)
if to_toggle_user in user_profile.following.all():
user_profile.following.remove(to_toggle_user)
added = False
else:
user_profile.following.add(to_toggle_user)
added = True
return added
def is_following(self, user, followed_by_user):
user_profile, created = UserProfile.objects.get_or_create(user=user)
if created:
return False
if followed_by_user in user_profile.following.all():
return True
return False
class UserProfile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=True, related_name='profile')
following = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True, related_name='followed_by')
objects = UserProfileManager()
def __str__(self):
return str(self.following.all().count())
def get_following(self):
users = self.following.all()
return users.exclude(username=self.user.username)
def get_follow_url(self):
return reverse_lazy("profiles:follow", kwargs={"username": self.user.username})
def get_absolute_url(self):
return reverse_lazy("profiles:detail", kwargs={"username": self.user.username})
def post_save_user_receiver(sender, instance, created, *args, **kwargs):
if created:
new_profile = UserProfile.objects.get_or_create(user=instance)
post_save.connect(post_save_user_receiver, sender=settings.AUTH_USER_MODEL)
|
rakibulislam01/Tweetme | tweets/views.py | from django.contrib.auth.mixins import LoginRequiredMixin
from django.db.models import Q
# from django.shortcuts import get_object_or_404
from django.urls import reverse_lazy
from django.views.generic import DetailView, ListView, CreateView, UpdateView, DeleteView
from .forms import TweetModelForm
from .mixins import FormUserNeededMixin, UserOwnerMixin
from .models import Tweet
class TweetCreateView(LoginRequiredMixin, FormUserNeededMixin, CreateView):
# queryset = Tweet.objects.all()
form_class = TweetModelForm
template_name = 'tweets/create_view.html'
# success_url = '/tweet/create'
login_url = '/admin/'
# fields = ['user', 'content']
class TweetDetailView(DetailView):
queryset = Tweet.objects.all()
# def get_object(self):
# pk = self.kwargs.get("pk")
# obj = get_object_or_404(Tweet, pk=pk)
# return obj
class TweetDeleteView(LoginRequiredMixin, DeleteView):
model = Tweet
success_url = reverse_lazy("tweet:list")
class TweetUpdateView(LoginRequiredMixin, UserOwnerMixin, UpdateView):
queryset = Tweet.objects.all()
form_class = TweetModelForm
template_name = 'tweets/update_view.html'
# success_url = "/tweet/"
class TweetListView(ListView):
def get_queryset(self):
qs = Tweet.objects.all()
query = self.request.GET.get('q', None)
if query is not None:
qs = qs.filter(
Q(content__icontains=query) |
Q(user__username__icontains=query)
)
return qs
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['create_form'] = TweetModelForm()
context['create_url'] = reverse_lazy("tweet:create")
return context
# =====================# Function Base view #===============#
# def tweet_detail_view(request, id=1):
# obj = Tweet.objects.get(id=id)
# context = {
# "object": obj
# }
# return render(request, 'tweets/detail_view.html', context)
#
#
# def tweet_list_view(request):
# queryset = Tweet.objects.all()
# context = {
# "object_list": queryset
# }
# return render(request, 'tweets/list_view.html', context)
# def tweet_create_view(request):
# form = TweetModelForm(request.POST or None)
# if form.is_valid():
# instance = form.save(commit=False)
# instance.user = request.user
# instance.save()
# context = {
# "form": form
# }
# return render(request, 'tweet/create_view.html', context)
|
rakibulislam01/Tweetme | tweets/api/pagination.py | from rest_framework import pagination
class StandardResultPagination(pagination.PageNumberPagination):
page_size = 10
page_size_query_param = 'page_size'
max_page_size = 1000
|
rakibulislam01/Tweetme | tweets/migrations/0005_auto_20190719_0108.py | <filename>tweets/migrations/0005_auto_20190719_0108.py
# Generated by Django 2.2.2 on 2019-07-18 19:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tweets', '0004_auto_20190529_1942'),
]
operations = [
migrations.AlterModelOptions(
name='tweet',
options={'ordering': ['-timestamp']},
),
]
|
rakibulislam01/Tweetme | accounts/migrations/0003_auto_20190721_1551.py | <reponame>rakibulislam01/Tweetme
# Generated by Django 2.2.2 on 2019-07-21 09:51
from django.db import migrations
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_20190720_2006'),
]
operations = [
migrations.AlterModelManagers(
name='userprofile',
managers=[
('object', django.db.models.manager.Manager()),
],
),
]
|
rakibulislam01/Tweetme | tweets/models.py | <filename>tweets/models.py
# from django.contrib.auth.models import User
from django.conf import settings
from django.db import models
from django.urls import reverse
from .validators import validate_content
class Tweet(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
content = models.CharField(max_length=140, validators=[validate_content])
updated = models.DateTimeField(auto_now=True)
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.content)
def get_absolute_url(self):
return reverse("tweet:detail", kwargs={"pk": self.pk})
class Meta:
ordering = ['-timestamp']
# def clean(self, *args, **kwargs):
# content = self.content
# if content == "":
# raise ValidationError("Conn't be blank")
# return super(Tweet, self).clean(*args, **kwargs)
|
memeshack/lolgit | hangups/test/test_channel.py | <filename>hangups/test/test_channel.py<gh_stars>1-10
"""Tests for channel data parsing."""
import pytest
from hangups import channel
@pytest.mark.parametrize('input_,expected', [
(b'79\n[[0,["c","98803CAAD92268E8","",8]\n]\n,[1,[{"gsid":"7tCoFHumSL-IT6BHpCaxLA"}]]\n]\n',
('98803CAAD92268E8', '7tCoFHumSL-IT6BHpCaxLA')),
])
def test_parse_sid_response(input_, expected):
assert channel._parse_sid_response(input_) == expected
@pytest.mark.parametrize('input_,expected', [
# '€' is 3 bytes in UTF-8.
('€€'.encode()[:6], '€€'),
('€€'.encode()[:5], '€'),
('€€'.encode()[:4], '€'),
('€€'.encode()[:3], '€'),
('€€'.encode()[:2], ''),
('€€'.encode()[:1], ''),
('€€'.encode()[:0], ''),
])
def test_best_effort_decode(input_, expected):
assert channel._best_effort_decode(input_) == expected
def test_simple():
p = channel.PushDataParser()
assert list(p.get_submissions('10\n01234567893\nabc'.encode())) == [
'0123456789',
'abc',
]
def test_truncated_message():
p = channel.PushDataParser()
assert list(p.get_submissions('12\n012345678'.encode())) == []
def test_truncated_length():
p = channel.PushDataParser()
assert list(p.get_submissions('13'.encode())) == []
def test_malformed_length():
p = channel.PushDataParser()
# TODO: could detect errors like these with some extra work
assert list(p.get_submissions('11\n0123456789\n5e\n"abc"'.encode())) == [
'0123456789\n'
]
def test_incremental():
p = channel.PushDataParser()
assert list(p.get_submissions(''.encode())) == []
assert list(p.get_submissions('5'.encode())) == []
assert list(p.get_submissions('\n'.encode())) == []
assert list(p.get_submissions('abc'.encode())) == []
assert list(p.get_submissions('de'.encode())) == ['abcde']
assert list(p.get_submissions(''.encode())) == []
def test_unicode():
p = channel.PushDataParser()
# smile is actually 2 code units
assert list(p.get_submissions('3\na😀'.encode())) == ['a😀']
def test_split_characters():
p = channel.PushDataParser()
assert list(p.get_submissions(b'1\n\xe2\x82')) == []
assert list(p.get_submissions(b'\xac')) == ['€']
|
memeshack/lolgit | hangups/__init__.py | <reponame>memeshack/lolgit<filename>hangups/__init__.py
from .version import __version__
from .schemas import (TypingStatus, FocusStatus, FocusDevice, SegmentType,
MembershipChangeType, ConversationType,
OffTheRecordStatus)
from .client import Client
from .user import UserList, build_user_list
from .conversation import ConversationList
from .auth import get_auth, get_auth_stdin, GoogleAuthError
from .exceptions import HangupsError, NetworkError
from .conversation_event import (ChatMessageSegment, ConversationEvent,
ChatMessageEvent, RenameEvent,
MembershipChangeEvent)
|
memeshack/lolgit | hangups/javascript.py | <filename>hangups/javascript.py
"""Parser for a subset of JavaScript written with purplex.
Parses a broader subset of JavaScript than just JSON, needed for parsing some
API responses. This is only as complete as necessary to parse the responses
we're getting.
"""
import logging
import purplex
logger = logging.getLogger(__name__)
def loads(string):
"""Parse simple JavaScript types from string into Python types.
Raises ValueError if parsing fails.
"""
try:
return _PARSER.parse(string)
except purplex.exception.PurplexError as e:
raise ValueError('Failed to load JavaScript: {}'.format(e))
_ESCAPES = {
'b': '\b',
't': '\t',
'n': '\n',
'v': '\v',
'f': '\f',
'r': '\r',
'"': '"',
"'": "'",
'\\': '\\',
# Unicode escapes are a special case:
'u': '',
}
def _unescape_string(s):
"""Unescape JavaScript escape sequences."""
chars = list(s)
unescaped_chars = []
while len(chars) > 0:
c = chars.pop(0)
if c != '\\':
unescaped_chars.append(c)
else:
try:
c = chars.pop(0)
except IndexError:
raise ValueError('Reached end of string literal '
'prematurely: {}'.format(s))
if c == 'u':
# One character can be formed from multiple contiguous \u
# escape sequences.
char_hex = ''
while True:
try:
char_hex += ''.join([chars.pop(0) for _ in range(4)])
except IndexError:
raise ValueError('Reached end of string literal '
'prematurely: {}'.format(s))
if len(chars) > 1 and ''.join(chars[0:2]) == r'\u':
chars.pop(0)
chars.pop(0)
else:
break
try:
char = bytes.fromhex(char_hex).decode('utf-16be')
except (ValueError, UnicodeDecodeError) as e:
logger.warning('Failed to decode unicode escape: {}'
.format(e))
char = ''
unescaped_chars.extend(char)
else:
try:
unescaped_chars.append(_ESCAPES[c])
except KeyError:
# Mimic browser engines by ignoring the backslash if it
# forms an invalid escape sequence.
logger.warning('Ignoring invalid escape sequence: \\{}'
.format(c))
unescaped_chars.append(c)
return "".join(unescaped_chars)
class JavaScriptLexer(purplex.Lexer):
"""Lexer for a subset of JavaScript."""
# TODO: Negative integers
INTEGER = purplex.TokenDef(r'\d+')
FLOAT = purplex.TokenDef(r'[-+]?\d*[.]\d+')
NULL = purplex.TokenDef(r'null')
TRUE = purplex.TokenDef(r'true')
FALSE = purplex.TokenDef(r'false')
LIST_START = purplex.TokenDef(r'\[')
LIST_END = purplex.TokenDef(r'\]')
OBJECT_START = purplex.TokenDef(r'\{')
OBJECT_END = purplex.TokenDef(r'\}')
COMMA = purplex.TokenDef(r',')
COLON = purplex.TokenDef(r':')
STRING = purplex.TokenDef(
'(\'(([^\\\\\'])|(\\\\.))*?\')|("(([^\\\\"])|(\\\\.))*?")'
)
# TODO more unquoted keys are allowed
KEY = purplex.TokenDef(r'[a-zA-Z0-9_$]+')
WHITESPACE = purplex.TokenDef(r'[\s\n]+', ignore=True)
class JavaScriptParser(purplex.Parser):
"""Parser for a subset of JavaScript."""
# pylint: disable=C0111,R0201,W0613,R0913
LEXER = JavaScriptLexer
START = 'e'
PRECEDENCE = ()
@purplex.attach('listitems : e')
def listitems_1(self, child):
return [child]
@purplex.attach('listitems : e COMMA listitems')
def listitems_2(self, child, comma, rest_of_list):
return [child] + rest_of_list
@purplex.attach('listitems : COMMA listitems')
def listitems_3(self, comma, rest_of_list):
return [None] + rest_of_list
@purplex.attach('listitems : ')
def listitems_4(self):
return []
@purplex.attach('e : LIST_START listitems LIST_END')
def list(self, *children):
return children[1]
@purplex.attach('objectkey : e')
@purplex.attach('objectkey : KEY')
def objectkey(self, key):
# TODO not everything can be a key
return key
@purplex.attach('objectitems : ')
def objectitems_1(self):
return {}
@purplex.attach('objectitems : objectkey COLON e')
def objectitems_2(self, key, colon, val):
return {key: val}
@purplex.attach('objectitems : objectkey COLON e COMMA objectitems')
def objectitems_3(self, key, colon, val, comma, otheritems):
d = dict(otheritems)
d[key] = val
return d
@purplex.attach('e : OBJECT_START objectitems OBJECT_END')
def object(self, start, objectitems, end):
return objectitems
@purplex.attach('e : INTEGER')
def number(self, num):
return int(num)
@purplex.attach('e : FLOAT')
def float_number(self, num):
return float(num)
@purplex.attach('e : NULL')
def null(self, t):
return None
@purplex.attach('e : TRUE')
def true(self, t):
return True
@purplex.attach('e : FALSE')
def false(self, t):
return False
@purplex.attach('e : STRING')
def string(self, s):
return _unescape_string(s[1:-1])
# instantiate the parser at module-load time for better performance
_PARSER = JavaScriptParser()
|
memeshack/lolgit | setup.py | from setuptools import setup
from setuptools.command.test import test as TestCommand
import os
import sys
# Find __version__ without import that requires dependencies to be installed:
exec(open(os.path.join(
os.path.dirname(__file__), 'hangups/version.py'
)).read())
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
with open('README.rst') as f:
readme = f.read()
setup(
name='hangups',
version=__version__,
description=('the first third-party instant messaging client for Google '
'Hangouts'),
long_description=readme,
url='https://github.com/tdryer/hangups',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
packages=['hangups', 'hangups.ui'],
install_requires=[
'ConfigArgParse==0.9.3',
'aiohttp==0.15.1',
'appdirs==1.4.0',
'purplex==0.2.4',
'readlike>=0.1',
'requests==2.6.0',
'ReParser>=1.4',
# use forked urwid until there's a 1.3 release with colour bugfix
'hangups-urwid==1.2.2-dev',
# backport enum for python3.3:
'enum34==1.0.4',
# backport asyncio for python3.3:
'asyncio==3.4.3',
],
tests_require=[
'pytest',
],
cmdclass={'test': PyTest},
entry_points={
'console_scripts': [
'hangups=hangups.ui.__main__:main',
],
},
)
|
memeshack/lolgit | hangups/conversation.py | """Conversation objects."""
import asyncio
import logging
from hangups import (parsers, event, user, conversation_event, exceptions,
schemas)
logger = logging.getLogger(__name__)
class Conversation(object):
"""Wrapper around Client for working with a single chat conversation."""
def __init__(self, client, user_list, client_conversation,
client_events=[]):
"""Initialize a new Conversation."""
self._client = client # Client
self._user_list = user_list # UserList
self._conversation = client_conversation # ClientConversation
self._events = [] # [ConversationEvent]
self._events_dict = {} # {event_id: ConversationEvent}
self._send_message_lock = asyncio.Lock()
for event_ in client_events:
self.add_event(event_)
# Event fired when a user starts or stops typing with arguments
# (typing_message).
self.on_typing = event.Event('Conversation.on_typing')
# Event fired when a new ConversationEvent arrives with arguments
# (ConversationEvent).
self.on_event = event.Event('Conversation.on_event')
# Event fired when a watermark (read timestamp) is updated with
# arguments (WatermarkNotification).
self.on_watermark_notification = event.Event(
'Conversation.on_watermark_notification'
)
self.on_watermark_notification.add_observer(
self._on_watermark_notification
)
def _on_watermark_notification(self, notif):
"""Update the conversations latest_read_timestamp."""
if self.get_user(notif.user_id).is_self:
logger.info('latest_read_timestamp for {} updated to {}'
.format(self.id_, notif.read_timestamp))
self_conversation_state = (
self._conversation.self_conversation_state
)
self_conversation_state.self_read_state.latest_read_timestamp = (
parsers.to_timestamp(notif.read_timestamp)
)
def update_conversation(self, client_conversation):
"""Update the internal ClientConversation."""
# When latest_read_timestamp is 0, this seems to indicate no change
# from the previous value. Word around this by saving and restoring the
# previous value.
old_timestamp = self.latest_read_timestamp
self._conversation = client_conversation
if parsers.to_timestamp(self.latest_read_timestamp) == 0:
self_conversation_state = (
self._conversation.self_conversation_state
)
self_conversation_state.self_read_state.latest_read_timestamp = (
parsers.to_timestamp(old_timestamp)
)
@staticmethod
def _wrap_event(event_):
"""Wrap ClientEvent in ConversationEvent subclass."""
if event_.chat_message is not None:
return conversation_event.ChatMessageEvent(event_)
elif event_.conversation_rename is not None:
return conversation_event.RenameEvent(event_)
elif event_.membership_change is not None:
return conversation_event.MembershipChangeEvent(event_)
else:
return conversation_event.ConversationEvent(event_)
def add_event(self, event_):
"""Add a ClientEvent to the Conversation.
Returns an instance of ConversationEvent or subclass.
"""
conv_event = self._wrap_event(event_)
self._events.append(conv_event)
self._events_dict[conv_event.id_] = conv_event
return conv_event
def get_user(self, user_id):
"""Return the User instance with the given UserID."""
return self._user_list.get_user(user_id)
@asyncio.coroutine
def send_message(self, segments, image_file=None, image_id=None):
"""Send a message to this conversation.
A per-conversation lock is acquired to ensure that messages are sent in
the correct order when this method is called multiple times
asynchronously.
segments is a list of ChatMessageSegments to include in the message.
image_file is an optional file-like object containing an image to be
attached to the message.
image_id is an optional ID of an image to be attached to the message
(if you specify both image_file and image_id together, image_file
takes precedence and supplied image_id will be ignored)
Raises hangups.NetworkError if the message can not be sent.
"""
with (yield from self._send_message_lock):
# Send messages with OTR status matching the conversation's status.
otr_status = (schemas.OffTheRecordStatus.OFF_THE_RECORD
if self.is_off_the_record
else schemas.OffTheRecordStatus.ON_THE_RECORD)
if image_file:
try:
image_id = yield from self._client.upload_image(image_file)
except exceptions.NetworkError as e:
logger.warning('Failed to upload image: {}'.format(e))
raise
try:
yield from self._client.sendchatmessage(
self.id_, [seg.serialize() for seg in segments],
image_id=image_id, otr_status=otr_status
)
except exceptions.NetworkError as e:
logger.warning('Failed to send message: {}'.format(e))
raise
@asyncio.coroutine
def leave(self):
"""Leave conversation.
Raises hangups.NetworkError if conversation cannot be left.
"""
try:
if self._conversation.type_ == schemas.ConversationType.GROUP:
yield from self._client.removeuser(self.id_)
else:
yield from self._client.deleteconversation(self.id_)
except exceptions.NetworkError as e:
logger.warning('Failed to leave conversation: {}'.format(e))
raise
@asyncio.coroutine
def rename(self, name):
"""Rename the conversation.
Hangouts only officially supports renaming group conversations, so
custom names for one-to-one conversations may or may not appear in all
first party clients.
Raises hangups.NetworkError if conversation cannot be renamed.
"""
yield from self._client.setchatname(self.id_, name)
@asyncio.coroutine
def set_notification_level(self, level):
"""Set the notification level of the conversation.
Pass schemas.ClientNotificationLevel.QUIET to disable notifications,
or schemas.ClientNotificationLevel.RING to enable them.
Raises hangups.NetworkError if the request fails.
"""
yield from self._client.setconversationnotificationlevel(self.id_,
level)
@asyncio.coroutine
def set_typing(self, typing=schemas.TypingStatus.TYPING):
"""Set typing status.
TODO: Add rate-limiting to avoid unnecessary requests.
Raises hangups.NetworkError if typing status cannot be set.
"""
try:
yield from self._client.settyping(self.id_, typing)
except exceptions.NetworkError as e:
logger.warning('Failed to set typing status: {}'.format(e))
raise
@asyncio.coroutine
def update_read_timestamp(self, read_timestamp=None):
"""Update the timestamp of the latest event which has been read.
By default, the timestamp of the newest event is used.
This method will avoid making an API request if it will have no effect.
Raises hangups.NetworkError if the timestamp can not be updated.
"""
if read_timestamp is None:
read_timestamp = self.events[-1].timestamp
if read_timestamp > self.latest_read_timestamp:
logger.info(
'Setting {} latest_read_timestamp from {} to {}'
.format(self.id_, self.latest_read_timestamp, read_timestamp)
)
# Prevent duplicate requests by updating the conversation now.
state = self._conversation.self_conversation_state
state.self_read_state.latest_read_timestamp = (
parsers.to_timestamp(read_timestamp)
)
try:
yield from self._client.updatewatermark(self.id_,
read_timestamp)
except exceptions.NetworkError as e:
logger.warning('Failed to update read timestamp: {}'.format(e))
raise
@asyncio.coroutine
def get_events(self, event_id=None, max_events=50):
"""Return list of ConversationEvents ordered newest-first.
If event_id is specified, return events preceeding this event.
This method will make an API request to load historical events if
necessary. If the beginning of the conversation is reached, an empty
list will be returned.
Raises KeyError if event_id does not correspond to a known event.
Raises hangups.NetworkError if the events could not be requested.
"""
if event_id is None:
# If no event_id is provided, return the newest events in this
# conversation.
conv_events = self._events[-1 * max_events:]
else:
# If event_id is provided, return the events we have that are
# older, or request older events if event_id corresponds to the
# oldest event we have.
conv_event = self.get_event(event_id)
if self._events[0].id_ != event_id:
conv_events = self._events[self._events.index(conv_event) + 1:]
else:
logger.info('Loading events for conversation {} before {}'
.format(self.id_, conv_event.timestamp))
res = yield from self._client.getconversation(
self.id_, conv_event.timestamp, max_events
)
conv_events = [self._wrap_event(client_event) for client_event
in res.conversation_state.event]
logger.info('Loaded {} events for conversation {}'
.format(len(conv_events), self.id_))
for conv_event in reversed(conv_events):
self._events.insert(0, conv_event)
self._events_dict[conv_event.id_] = conv_event
return conv_events
def next_event(self, event_id, prev=False):
"""Return ConversationEvent following the event with given event_id.
If prev is True, return the previous event rather than the following
one.
Raises KeyError if no such ConversationEvent is known.
Return None if there is no following event.
"""
i = self.events.index(self._events_dict[event_id])
if prev and i > 0:
return self.events[i - 1]
elif not prev and i + 1 < len(self.events):
return self.events[i + 1]
else:
return None
def get_event(self, event_id):
"""Return ConversationEvent with the given event_id.
Raises KeyError if no such ConversationEvent is known.
"""
return self._events_dict[event_id]
@property
def id_(self):
"""The conversation's ID."""
return self._conversation.conversation_id.id_
@property
def users(self):
"""User instances of the conversation's current participants."""
return [self._user_list.get_user(user.UserID(chat_id=part.id_.chat_id,
gaia_id=part.id_.gaia_id))
for part in self._conversation.participant_data]
@property
def name(self):
"""The conversation's custom name, or None if it doesn't have one."""
return self._conversation.name
@property
def last_modified(self):
"""datetime timestamp of when the conversation was last modified."""
timestamp = self._conversation.self_conversation_state.sort_timestamp
# timestamp can be None for some reason when there is an ongoing video
# hangout
if timestamp is None:
timestamp = 0
return parsers.from_timestamp(timestamp)
@property
def latest_read_timestamp(self):
"""datetime timestamp of the last read ConversationEvent."""
timestamp = (self._conversation.self_conversation_state.
self_read_state.latest_read_timestamp)
return parsers.from_timestamp(timestamp)
@property
def events(self):
"""The list of ConversationEvents, sorted oldest to newest."""
return list(self._events)
@property
def unread_events(self):
"""List of ConversationEvents that are unread.
Events are sorted oldest to newest.
Note that some Hangouts clients don't update the read timestamp for
certain event types, such as membership changes, so this method may
return more unread events than these clients will show. There's also a
delay between sending a message and the user's own message being
considered read.
"""
return [conv_event for conv_event in self._events
if conv_event.timestamp > self.latest_read_timestamp]
@property
def is_archived(self):
"""True if this conversation has been archived."""
return (schemas.ClientConversationView.ARCHIVED_VIEW in
self._conversation.self_conversation_state.view)
@property
def is_quiet(self):
"""True if notification level for this conversation is quiet."""
level = self._conversation.self_conversation_state.notification_level
return level == schemas.ClientNotificationLevel.QUIET
@property
def is_off_the_record(self):
"""True if conversation is off the record (history is disabled)."""
status = self._conversation.otr_status
return status == schemas.OffTheRecordStatus.OFF_THE_RECORD
class ConversationList(object):
"""Wrapper around Client that maintains a list of Conversations."""
def __init__(self, client, conv_states, user_list, sync_timestamp):
self._client = client # Client
self._conv_dict = {} # {conv_id: Conversation}
self._sync_timestamp = sync_timestamp # datetime
self._user_list = user_list # UserList
# Initialize the list of conversations from Client's list of
# ClientConversationStates.
for conv_state in conv_states:
self.add_conversation(conv_state.conversation, conv_state.event)
self._client.on_state_update.add_observer(self._on_state_update)
self._client.on_connect.add_observer(self._sync)
self._client.on_reconnect.add_observer(self._sync)
# Event fired when a new ConversationEvent arrives with arguments
# (ConversationEvent).
self.on_event = event.Event('ConversationList.on_event')
# Event fired when a user starts or stops typing with arguments
# (typing_message).
self.on_typing = event.Event('ConversationList.on_typing')
# Event fired when a watermark (read timestamp) is updated with
# arguments (WatermarkNotification).
self.on_watermark_notification = event.Event(
'ConversationList.on_watermark_notification'
)
def get_all(self, include_archived=False):
"""Return list of all Conversations.
If include_archived is False, do not return any archived conversations.
"""
return [conv for conv in self._conv_dict.values()
if not conv.is_archived or include_archived]
def get(self, conv_id):
"""Return a Conversation from its ID.
Raises KeyError if the conversation ID is invalid.
"""
return self._conv_dict[conv_id]
def add_conversation(self, client_conversation, client_events=[]):
"""Add new conversation from ClientConversation"""
conv_id = client_conversation.conversation_id.id_
logger.info('Adding new conversation: {}'.format(conv_id))
conv = Conversation(
self._client, self._user_list,
client_conversation, client_events
)
self._conv_dict[conv_id] = conv
return conv
@asyncio.coroutine
def leave_conversation(self, conv_id):
"""Leave conversation and remove it from ConversationList"""
logger.info('Leaving conversation: {}'.format(conv_id))
yield from self._conv_dict[conv_id].leave()
del self._conv_dict[conv_id]
@asyncio.coroutine
def _on_state_update(self, state_update):
"""Receive a ClientStateUpdate and fan out to Conversations."""
if state_update.client_conversation is not None:
self._handle_client_conversation(state_update.client_conversation)
if state_update.typing_notification is not None:
yield from self._handle_set_typing_notification(
state_update.typing_notification
)
if state_update.watermark_notification is not None:
yield from self._handle_watermark_notification(
state_update.watermark_notification
)
if state_update.event_notification is not None:
yield from self._on_client_event(
state_update.event_notification.event
)
@asyncio.coroutine
def _on_client_event(self, event_):
"""Receive a ClientEvent and fan out to Conversations."""
self._sync_timestamp = parsers.from_timestamp(event_.timestamp)
try:
conv = self._conv_dict[event_.conversation_id.id_]
except KeyError:
logger.warning('Received ClientEvent for unknown conversation {}'
.format(event_.conversation_id.id_))
else:
conv_event = conv.add_event(event_)
yield from self.on_event.fire(conv_event)
yield from conv.on_event.fire(conv_event)
def _handle_client_conversation(self, client_conversation):
"""Receive ClientConversation and create or update the conversation."""
conv_id = client_conversation.conversation_id.id_
conv = self._conv_dict.get(conv_id, None)
if conv is not None:
conv.update_conversation(client_conversation)
else:
self.add_conversation(client_conversation)
@asyncio.coroutine
def _handle_set_typing_notification(self, set_typing_notification):
"""Receive ClientSetTypingNotification and update the conversation."""
conv_id = set_typing_notification.conversation_id.id_
conv = self._conv_dict.get(conv_id, None)
if conv is not None:
res = parsers.parse_typing_status_message(set_typing_notification)
yield from self.on_typing.fire(res)
yield from conv.on_typing.fire(res)
else:
logger.warning('Received ClientSetTypingNotification for '
'unknown conversation {}'.format(conv_id))
@asyncio.coroutine
def _handle_watermark_notification(self, watermark_notification):
"""Receive ClientWatermarkNotification and update the conversation."""
conv_id = watermark_notification.conversation_id.id_
conv = self._conv_dict.get(conv_id, None)
if conv is not None:
res = parsers.parse_watermark_notification(watermark_notification)
yield from self.on_watermark_notification.fire(res)
yield from conv.on_watermark_notification.fire(res)
else:
logger.warning('Received ClientWatermarkNotification for '
'unknown conversation {}'.format(conv_id))
@asyncio.coroutine
def _sync(self, initial_data=None):
"""Sync conversation state and events that could have been missed."""
logger.info('Syncing events since {}'.format(self._sync_timestamp))
try:
res = yield from self._client.syncallnewevents(
self._sync_timestamp
)
except exceptions.NetworkError as e:
logger.warning('Failed to sync events, some events may be lost: {}'
.format(e))
else:
for conv_state in res.conversation_state:
conv_id = conv_state.conversation_id.id_
conv = self._conv_dict.get(conv_id, None)
if conv is not None:
conv.update_conversation(conv_state.conversation)
for event_ in conv_state.event:
timestamp = parsers.from_timestamp(event_.timestamp)
if timestamp > self._sync_timestamp:
# This updates the sync_timestamp for us, as well
# as triggering events.
yield from self._on_client_event(event_)
else:
self.add_conversation(conv_state.conversation,
conv_state.event)
|
memeshack/lolgit | hangups/pblite.py | """A parser for the pblite serialization format.
pblite (sometimes called "protojson") is a way of encoding Protocol Buffer
messages to arrays. Google uses this in Hangouts because JavaScript handles
arrays better than bytes.
This module allows parsing lists together with a schema to produce
programmer-friendly objects. The conversation from not-quite-json strings to
lists can be done using hangups.javascript.
See:
https://code.google.com/p/google-protorpc/source/browse/python/protorpc/
protojson.py
TODO: Serialization code is currently unused and doesn't have any tests.
"""
import itertools
import types
class Field(object):
"""An untyped field, corresponding to a primitive type."""
def __init__(self, is_optional=False):
self._is_optional = is_optional
def parse(self, input_):
"""Parse the field.
Raises ValueError if the input is None and the Field is not optional.
"""
if not self._is_optional and input_ is None:
raise ValueError('Field is not optional')
else:
return input_
def serialize(self, input_):
"""Serialize the field.
Raises ValueError if the input is None and the Field is not optional.
"""
return self.parse(input_)
class EnumField(object):
"""An enumeration field.
Corresponds to a specified set of constants defined by the given Enum.
EnumFields are always required, but an enum may contain None as a value.
"""
def __init__(self, enum):
self._enum = enum
def parse(self, input_):
"""Parse the field.
Raises ValueError if the input is not an option in the enum.
"""
return self._enum(input_)
def serialize(self, input_):
"""Serialize the field.
Raises ValueError if the input is not an option in the enum.
"""
return self.parse(input_).value
class RepeatedField(object):
"""A field which may be repeated any number of times.
Corresponds to a list.
"""
def __init__(self, field, is_optional=False):
self._field = field
self._is_optional = is_optional
def parse(self, input_, serialize=False):
"""Parse the message.
Raises ValueError if the input is None and the RepeatedField is not
optional, or if the input is not a list.
"""
# Validate input:
if input_ is None and not self._is_optional:
raise ValueError('RepeatedField is not optional')
elif input_ is None and self._is_optional:
return None
elif not isinstance(input_, list):
raise ValueError('RepeatedField expected list but got {}'
.format(type(input_)))
res = []
for field_input in input_:
try:
if serialize:
res.append(self._field.serialize(field_input))
else:
res.append(self._field.parse(field_input))
except ValueError as e:
raise ValueError('RepeatedField item: {}'.format(e))
return res
def serialize(self, input_):
"""Serialize the message.
Raises ValueError if the input is None and the RepeatedField is not
optional, or if the input is not a list.
"""
return self.parse(input_, serialize=True)
class Message(object):
"""A field consisting of a collection of fields paired with a name.
Corresponds to an object (SimpleNamespace).
The input may be shorter than the number of fields and the trailing fields
will be assigned None. The input may be longer than the number of fields
and the trailing input items will be ignored. Fields with name None will
cause the corresponding input item to be optional and ignored.
"""
def __init__(self, *args, is_optional=False):
self._name_field_pairs = args
self._is_optional = is_optional
def parse(self, input_):
"""Parse the message.
Raises ValueError if the input is None and the Message is not optional,
or if any of the contained Fields fail to parse.
"""
# Validate input:
if input_ is None and not self._is_optional:
raise ValueError('Message is not optional')
elif input_ is None and self._is_optional:
return None
elif not isinstance(input_, list):
raise ValueError('Message expected list but got {}'
.format(type(input_)))
# Pad input with Nones if necessary
input_ = itertools.chain(input_, itertools.repeat(None))
res = types.SimpleNamespace()
for (name, field), field_input in zip(self._name_field_pairs, input_):
if name is not None:
try:
p = field.parse(field_input)
except ValueError as e:
raise ValueError('Message field \'{}\': {}'.
format(name, e))
setattr(res, name, p)
return res
def serialize(self, input_):
"""Serialize the message.
Raises ValueError if the input is None and the Message is not optional,
or if any of the contained Fields fail to parse.
"""
# Validate input:
if input_ is None and not self._is_optional:
raise ValueError('Message is not optional')
elif input_ is None and self._is_optional:
return None
elif not isinstance(input_, types.SimpleNamespace):
raise ValueError(
'Message expected types.SimpleNamespace but got {}'
.format(type(input_))
)
res = []
for name, field in self._name_field_pairs:
if name is not None:
field_input = getattr(input_, name)
res.append(field.serialize(field_input))
else:
res.append(None)
return res
|
memeshack/lolgit | hangups/test/test_javascript.py | <filename>hangups/test/test_javascript.py
"""Tests for the JavaScript parser."""
import pytest
from hangups import javascript
@pytest.mark.parametrize('input_,expected', [
# simple types
('12', 12),
('null', None),
('true', True),
('false', False),
# floats
('123.0', 123.0),
('-123.0', -123.0),
('.123', 0.123),
('-.123', -0.123),
# lists
('[ ]', []),
('[12]', [12]),
('[1, null, true, false, []]', [1, None, True, False, []]),
('[1,,2]', [1, None, 2]),
('[1,,,2]', [1, None, None, 2]),
('[,1]', [None, 1]),
('[,,1]', [None, None, 1]),
('[1,]', [1]),
('[1,,]', [1, None]),
# strings
('\'\'', ''),
('""', ''),
('\'f\'', 'f'),
('"f"', 'f'),
('\'foo\'', 'foo'),
('"foo"', 'foo'),
('[["foo","bar"],,,1232]', [['foo', 'bar'], None, None, 1232]),
('"😀"', '😀'),
# string escape sequences
(r'"\b"', '\b'),
(r'"\t"', '\t'),
(r'"\n"', '\n'),
(r'"\v"', '\v'),
(r'"\f"', '\f'),
(r'"\r"', '\r'),
(r'"\\"', '\\'),
(r'"\""', '"'),
(r"'\''", "'"),
(r'"a\u003db"', 'a=b'),
(r'"\ud83d\ude1c"', '😜'),
(r'"a\ud83d\ude1cb"', 'a😜b'),
(r'"a\ud83d\ude1c\ud83d\ude1cb"', 'a😜😜b'),
# invalid string escape sequences
(r'"\a"', 'a'),
(r'"a\uzzzzb"', 'ab'),
(r'"\ud83d\uffff"', ''),
# objects
('{ }', {}),
('{"foo": 1}', {'foo': 1}),
('{"foo": 1, "bar": 2}', {'foo': 1, 'bar': 2}),
('{foo: 1}', {'foo': 1}),
(r'"[\"foo\"]"', '["foo"]'),
])
def test_loads(input_, expected):
"""Test loading JS from a string."""
assert javascript.loads(input_) == expected
def test_loads_lex_error():
"""Test loading invalid JS that fails lexing."""
with pytest.raises(ValueError):
javascript.loads('{""": 1}')
def test_loads_parse_error():
"""Test loading invalid JS that fails parsing."""
with pytest.raises(ValueError):
javascript.loads('{"foo": 1}}')
|
memeshack/lolgit | hangups/conversation_event.py | <reponame>memeshack/lolgit
"""ConversationEvent base class and subclasses.
These classes are wrappers for ClientEvent instances from the API. Parsing is
done through property methods, which prefer logging warnings to raising
exceptions.
"""
import logging
import re
from hangups import parsers, message_parser, user, schemas
logger = logging.getLogger(__name__)
chat_message_parser = message_parser.ChatMessageParser()
class ConversationEvent(object):
"""An event which becomes part of the permanent record of a conversation.
This corresponds to ClientEvent in the API.
This is the base class for such events.
"""
def __init__(self, client_event):
self._event = client_event
@property
def timestamp(self):
"""A timestamp of when the event occurred."""
return parsers.from_timestamp(self._event.timestamp)
@property
def user_id(self):
"""A UserID indicating who created the event."""
return user.UserID(chat_id=self._event.sender_id.chat_id,
gaia_id=self._event.sender_id.gaia_id)
@property
def conversation_id(self):
"""The ID of the conversation the event belongs to."""
return self._event.conversation_id.id_
@property
def id_(self):
"""The ID of the ConversationEvent."""
return self._event.event_id
class ChatMessageSegment(object):
"""A segment of a chat message."""
def __init__(self, text, segment_type=None,
is_bold=False, is_italic=False, is_strikethrough=False,
is_underline=False, link_target=None):
"""Create a new chat message segment."""
if segment_type is not None:
self.type_ = segment_type
elif link_target is not None:
self.type_ = schemas.SegmentType.LINK
else:
self.type_ = schemas.SegmentType.TEXT
self.text = text
self.is_bold = is_bold
self.is_italic = is_italic
self.is_strikethrough = is_strikethrough
self.is_underline = is_underline
self.link_target = link_target
@staticmethod
def from_str(text):
"""Generate ChatMessageSegment list parsed from a string.
This method handles automatically finding line breaks, URLs and
parsing simple formatting markup (simplified Markdown and HTML).
"""
segment_list = chat_message_parser.parse(text)
return [ChatMessageSegment(segment.text, **segment.params) for segment in segment_list]
@staticmethod
def deserialize(segment):
"""Create a chat message segment from a parsed MESSAGE_SEGMENT."""
# The formatting options are optional.
if segment.formatting is None:
is_bold = False
is_italic = False
is_strikethrough = False
is_underline = False
else:
is_bold = bool(segment.formatting.bold)
is_italic = bool(segment.formatting.italic)
is_strikethrough = bool(segment.formatting.strikethrough)
is_underline = bool(segment.formatting.underline)
if segment.link_data is None:
link_target = None
else:
link_target = segment.link_data.link_target
return ChatMessageSegment(
segment.text, segment_type=segment.type_,
is_bold=is_bold, is_italic=is_italic,
is_strikethrough=is_strikethrough, is_underline=is_underline,
link_target=link_target
)
def serialize(self):
"""Serialize the segment to pblite."""
return [self.type_.value, self.text, [
1 if self.is_bold else 0,
1 if self.is_italic else 0,
1 if self.is_strikethrough else 0,
1 if self.is_underline else 0,
], [self.link_target]]
class ChatMessageEvent(ConversationEvent):
"""An event containing a chat message.
Corresponds to ClientChatMessage in the API.
"""
@property
def text(self):
"""A textual representation of the message."""
lines = ['']
for segment in self.segments:
if segment.type_ == schemas.SegmentType.TEXT:
lines[-1] += segment.text
elif segment.type_ == schemas.SegmentType.LINK:
lines[-1] += segment.text
elif segment.type_ == schemas.SegmentType.LINE_BREAK:
lines.append('')
else:
logger.warning('Ignoring unknown chat message segment type: {}'
.format(segment.type_))
lines.extend(self.attachments)
return '\n'.join(lines)
@property
def segments(self):
"""List of ChatMessageSegments in the message."""
seg_list = self._event.chat_message.message_content.segment
# seg_list may be None because the field is optional
if seg_list is not None:
return [ChatMessageSegment.deserialize(seg) for seg in seg_list]
else:
return []
@property
def attachments(self):
"""Attachments in the message."""
raw_attachments = self._event.chat_message.message_content.attachment
if raw_attachments is None:
raw_attachments = []
attachments = []
for attachment in raw_attachments:
if attachment.embed_item.type_ == [249]: # PLUS_PHOTO
# Try to parse an image message. Image messages contain no
# message segments, and thus have no automatic textual
# fallback.
try:
attachments.append(
attachment.embed_item.data['27639957'][0][3]
)
except (KeyError, TypeError, IndexError):
logger.warning(
'Failed to parse PLUS_PHOTO attachment: {}'
.format(attachment)
)
elif attachment.embed_item.type_ == [340, 335, 0]:
pass # Google Maps URL that's already in the text.
else:
logger.warning('Ignoring unknown chat message attachment: {}'
.format(attachment))
return attachments
class RenameEvent(ConversationEvent):
"""An event that renames a conversation.
Corresponds to ClientConversationRename in the API.
"""
@property
def new_name(self):
"""The conversation's new name.
An empty string if the conversation's name was cleared.
"""
return self._event.conversation_rename.new_name
@property
def old_name(self):
"""The conversation's old name.
An empty string if the conversation had no previous name.
"""
return self._event.conversation_rename.old_name
class MembershipChangeEvent(ConversationEvent):
"""An event that adds or removes a conversation participant.
Corresponds to ClientMembershipChange in the API.
"""
@property
def type_(self):
"""The membership change type (MembershipChangeType)."""
return self._event.membership_change.type_
@property
def participant_ids(self):
"""Return the UserIDs involved in the membership change.
Multiple users may be added to a conversation at the same time.
"""
return [user.UserID(chat_id=id_.chat_id, gaia_id=id_.gaia_id)
for id_ in self._event.membership_change.participant_ids]
|
memeshack/lolgit | hangups/event.py | """Simple event observer system supporting asyncio.
Observers must be removed to avoid memory leaks.
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
class Event(object):
"""Event that tracks a list of observer callbacks to notify when fired."""
def __init__(self, name):
"""Create a new Event with a name."""
self._name = str(name)
self._observers = []
def add_observer(self, callback):
"""Add an event observer callback.
callback may be a coroutine or function.
Raises ValueError if the callback has already been added.
"""
if callback in self._observers:
raise ValueError('{} is already an observer of {}'
.format(callback, self))
self._observers.append(callback)
def remove_observer(self, callback):
"""Remove an event observer callback.
Raises ValueError if the callback is not an event observer.
"""
if callback not in self._observers:
raise ValueError('{} is not an observer of {}'
.format(callback, self))
self._observers.remove(callback)
@asyncio.coroutine
def fire(self, *args, **kwargs):
"""Call all observer callbacks with the same arguments."""
logger.debug('Fired {}'.format(self))
for observer in self._observers:
gen = observer(*args, **kwargs)
if asyncio.iscoroutinefunction(observer):
yield from gen
def __repr__(self):
return 'Event(\'{}\')'.format(self._name)
|
memeshack/lolgit | hangups/test/test_pblite.py | <gh_stars>1-10
"""Tests for hangups.pblite."""
import enum
import pytest
import types
from hangups import pblite
##############################################################################
# Fixtures
##############################################################################
class Colour(enum.Enum):
RED = 1
BLUE = 2
field = pblite.Field()
optional_field = pblite.Field(is_optional=True)
enum_field = pblite.EnumField(Colour)
repeated_field = pblite.RepeatedField(pblite.Field())
optional_repeated_field = pblite.RepeatedField(pblite.Field(),
is_optional=True)
message = pblite.Message(
('item', pblite.Field()),
(None, pblite.Field()),
('count', pblite.Field(is_optional=True)),
)
##############################################################################
# Tests
##############################################################################
def test_field():
assert field.parse("test") == "test"
def test_field_none():
with pytest.raises(ValueError) as e:
field.parse(None)
assert e.value.args[0] == 'Field is not optional'
def test_optional_field_none():
assert optional_field.parse(None) == None
def test_enum_field():
assert enum_field.parse(2) == Colour.BLUE
def test_enum_field_invalid():
with pytest.raises(ValueError) as e:
enum_field.parse(None)
assert e.value.args[0] == 'None is not a valid Colour'
def test_repeated_field():
assert repeated_field.parse([1, 2, 3]) == [1, 2, 3]
def test_repeated_field_item_error():
with pytest.raises(ValueError) as e:
repeated_field.parse([1, None, 3])
assert e.value.args[0] == 'RepeatedField item: Field is not optional'
def test_repeated_field_none():
with pytest.raises(ValueError) as e:
repeated_field.parse(None)
assert e.value.args[0] == 'RepeatedField is not optional'
def test_repeated_field_not_list():
with pytest.raises(ValueError) as e:
repeated_field.parse(123)
assert e.value.args[0] == ('RepeatedField expected list but got '
'<class \'int\'>')
def test_optional_repeated_field_none():
assert optional_repeated_field.parse(None) == None
def test_message():
assert (message.parse(['rose', None, 1]).__dict__ ==
types.SimpleNamespace(item='rose', count=1).__dict__)
def test_message_extra_field():
assert (message.parse(['rose', None, 1, 100]).__dict__ ==
types.SimpleNamespace(item='rose', count=1).__dict__)
def test_message_missing_optional_field():
assert (message.parse(['rose', None]).__dict__ ==
types.SimpleNamespace(item='rose', count=None).__dict__)
def test_message_missing_field():
with pytest.raises(ValueError) as e:
message.parse([])
assert e.value.args[0] == 'Message field \'item\': Field is not optional'
def test_message_not_list():
with pytest.raises(ValueError) as e:
message.parse(123)
assert e.value.args[0] == ('Message expected list but got '
'<class \'int\'>')
|
simonqiang/gftest | app/main/api/__init__.py | __author__ = '608502920'
|
simonqiang/gftest | app/main/__init__.py | from flask import Blueprint
main = Blueprint('main', __name__)
from . import views, errors
from flask_restful import Api
from .api.gfcard import GiftCard
api = Api(main)
api.add_resource(GiftCard, '/giftcard/request')
from apscheduler.schedulers.background import BackgroundScheduler
from .jobs.giftCardSchedule import GiftCardSchedule
giftCArdSchedule = GiftCardSchedule()
scheduler = BackgroundScheduler()
scheduler.add_job(giftCArdSchedule.test, 'interval', seconds=3)
scheduler.start()
|
simonqiang/gftest | app/models.py | import datetime
from . import db
from sqlalchemy.dialects.mysql import BIGINT, INTEGER
from sqlalchemy import DateTime, func, text
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
users = db.relationship('User', backref='role', lazy='dynamic')
def __repr__(self):
return '<Role %r>' % self.name
class User(db.Model):
__tablename__ = 'users'
id = db.Column(BIGINT(display_width=20, unsigned=True), primary_key=True)
username = db.Column(db.String(64), unique=True, index=True)
fristname = db.Column(db.String(64), unique=True, index=True)
lastname = db.Column(db.String(64), unique=True, index=True)
middlename = db.Column(db.String(64), unique=True, index=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
def __repr__(self):
return '<User %r>' % self.username
class CardDenomination(db.Model):
__tablename__ = 'card_denomination'
id = db.Column(INTEGER(unsigned=True, display_width=11), primary_key=True, autoincrement=True, nullable=False)
code = db.Column(db.String(8, collation='utf8_unicode_ci'), unique=True, index=True, nullable=False)
point = db.Column(db.Integer, nullable=False)
amount = db.Column(db.Integer, nullable=False)
description = db.Column(db.String(255, collation='utf8_unicode_ci'), nullable=True, default=None)
serialCode = db.Column(db.String(8, collation='utf8_unicode_ci'), unique=True, index=True, nullable=False)
skuCode = db.Column(db.String(8, collation='utf8_unicode_ci'), unique=True, index=True, nullable=False)
def __repr__(self):
return '<card_denomination_code %r>' % self.code
class CardGiftCard(db.Model):
__tablename__ = 'card_giftcard'
id = db.Column(BIGINT(unsigned=True, display_width=20), primary_key=True, autoincrement=True, nullable=False)
gfSerial = db.Column(db.String(20, collation='utf8_unicode_ci'), unique=True, index=True, nullable=False)
gfPin = db.Column(db.String(20, collation='utf8_unicode_ci'), unique=True, index=True, nullable=False)
gfCreate_dt = db.Column(db.DateTime, default=datetime.datetime.utcnow(), nullable=False)
gfConsumed_dt = db.Column(DateTime, default=func.now(), nullable=False)
gfReference = db.Column(db.String(64, collation='utf8_unicode_ci'), nullable=False, index=True)
gfStatus = db.Column(db.String(1, collation='utf8_unicode_ci'), nullable=False, index=True)
gfSKU = db.Column(db.String(32, collation='utf8_unicode_ci'), nullable=False)
gfOrderID = db.Column(db.String(64, collation='utf8_unicode_ci'), nullable=True, default=text("NULL"), index=True)
gfDenom_id = db.Column(INTEGER(unsigned=True, display_width=11), db.ForeignKey('card_denomination.id'), index=True, nullable=False,)
def __repr__(self):
return '<card_gfcard_SKU %r>' % self.gfSKU
|
simonqiang/gftest | application.py | <reponame>simonqiang/gftest
import os, logging
from app import create_app
from logging.handlers import RotatingFileHandler
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
handler = RotatingFileHandler('giftcard.log', maxBytes=20000, backupCount=6)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
if __name__ == '__main__':
app.run()
|
simonqiang/gftest | app/main/api/gfcard.py | from flask_restful import Resource, request
from ..common.utils import Utils
from ..common.GiftCardCodeHelper import GiftCardCodeHelper
from app import db
from manage import app
import sys, traceback
from config import Config
class GiftCard(Resource):
def post(self):
try:
json_data = request.get_json(force=True)
denomination_code = Utils.validateParameter(json_data['denomination_code'])
count = Utils.validateParameter(json_data['count'])
hash = Utils.validateParameter(json_data['hash'])
sku = Utils.validateParameter(json_data['sku'])
reference_value = Utils.validateParameter(json_data['reference'])
# validate count
if not Utils.isInteger(count):
return 'invalid count', 200
# validate hash
if not Utils.validate_hash(hash, denomination_code, count, Config.GIFTCARD_SECRET_KEY):
return 'invalid hash code', 200
giftCardCodeHelper = GiftCardCodeHelper()
# query denomination id
serialCode = giftCardCodeHelper.get_denomination_serialCode(denomination_code)
skuCode = giftCardCodeHelper.get_denomination_skuCode(denomination_code)
# validate denomination id
if not serialCode or not skuCode:
return 'invalid denomination code', 200
if not reference_value:
return 'invlidate reference'
if not Utils.isInteger(sku):
return 'invalide sku', 200
count = int(count)
sku = int(sku)
app.logger.info('serial_code : ' + serialCode + ' reference : ' + reference_value)
giftCardCodeHelper.retrive_and_insert_to_db(count, sku, serialCode, skuCode, reference_value)
except:
app.logger.error('serial_code : ' + serialCode + ' reference : ' + reference_value)
db.session.close()
traceback.print_exc()
return {'error ': str(sys.exc_info()[0])}
return {'reference': reference_value, 'status': 200}
|
simonqiang/gftest | tests/main/common/testredishelper.py | <filename>tests/main/common/testredishelper.py
import unittest
from app.main.common.RedisHelper import RedisHelper
from app.main.jobs.giftCardSchedule import GiftCardSchedule
class RedisHelperTestCase(unittest.TestCase):
def test_redis_srandmember(self):
redishelper = RedisHelper()
aList = redishelper.redis_srandmember('liuqiang-sets', 4)
print(aList)
def test_redis_sadd_spop(self):
redishelper = RedisHelper()
mylist = []
mylist.append("1")
mylist.append("2")
mylist.append("3")
mylist.append("4")
mylist.append("5")
mylist.append("6")
mylist.append("7")
mylist.append("8")
redishelper = RedisHelper()
redishelper.redis_sadd('liuqiang-sets', mylist)
self.assertEqual(8, redishelper.redis_scard('liuqiang-sets'))
print(redishelper.redis_scard('liuqiang-sets'))
aList = redishelper.redis_spop_str('liuqiang-sets', 4)
print(aList)
self.assertEqual(4, len(aList))
aList = redishelper.redis_spop_str('liuqiang-sets', 4)
print(aList)
self.assertEqual(4, len(aList))
def test_redis_incr_get(self):
redisHelper = RedisHelper()
redisHelper.redis_set('index1', 0)
self.assertEqual(10, redisHelper.redis_inc('index1', 10))
self.assertEqual(10, int(redisHelper.redis_get('index1')))
def test_get_flag(self):
redisHelper = RedisHelper()
if redisHelper.redis_get('liuqiang_flag'):
print("there is something")
else:
print("there is None")
def test_set_flag(self):
redisHelper = RedisHelper()
redisHelper.redis_set_expire('liuqiang_flag', 1, 5)
def test_schedule(self):
giftCardSchedule = GiftCardSchedule()
giftCardSchedule.test()
if __name__ == '__main__':
unittest.main()
|
simonqiang/gftest | manage.py | #!/usr/bin/env python
import os, logging
from app import create_app, db
from app.models import User, Role, CardDenomination, CardGiftCard
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from logging.handlers import RotatingFileHandler
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
handler = RotatingFileHandler('giftcard.log', maxBytes=20000, backupCount=6)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role, CardDenomination=CardDenomination, CardGiftCard=CardGiftCard)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
simonqiang/gftest | app/main/common/RedisHelper.py | <reponame>simonqiang/gftest<filename>app/main/common/RedisHelper.py<gh_stars>0
import redis
from manage import app
class RedisHelper():
def redis_lpush_list(self, name_list, list):
pool = redis.ConnectionPool(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
r = redis.Redis(connection_pool=pool)
pipe = r.pipeline()
pipe.lpush(name_list, *list)
pipe.execute()
def redis_rpop(self, name_list):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
return str(r.rpop(name_list), encoding='UTF-8')
def redis_sadd(self, name_sets, list):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
r.sadd(name_sets, *list)
def redis_srandmember(self, name_sets, count):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
return r.srandmember(name_sets, count)
def redis_spop_str(self, name_sets, count):
mylist = self.redis_spop(name_sets, count)
# convert byte to string
for i in range(len(mylist)):
mylist[i] = str(mylist[i], encoding='UTF-8')
return mylist
def redis_spop(self, name_sets, count):
# r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
# return r.spop(name_list)
pool = redis.ConnectionPool(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
r = redis.Redis(connection_pool=pool)
pipe = r.pipeline()
for index in range(count):
pipe.spop(name_sets)
return pipe.execute()
def redis_scard(self, name_sets):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
return int(r.scard(name_sets))
def redis_delete(self, name_index):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
r.delete(name_index)
def redis_set(self, name_index, value):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
r.set(name_index, value)
def redis_get_str(self, name_index):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
return str(r.get(name_index), encoding='UTF-8')
def redis_get(self, name_index):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
return r.get(name_index)
def redis_set_expire(self, name_index, value, expire):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
r.set(name_index, value, expire)
def redis_inc(self, name_index, amount):
r = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'])
return r.incr(name_index, amount)
|
simonqiang/gftest | tests/main/common/testgiftcardcodegenerator.py | import unittest, random
from app.main.common.GiftCardCodeHelper import GiftCardCodeHelper
from app.main.common.RedisHelper import RedisHelper
class GiftCardCodeGeneratorHelperTestCase(unittest.TestCase):
def test_generate_pin(self):
giftCardCodeGenerator = GiftCardCodeHelper()
for i in range(10000):
print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
# print(giftCardCodeGenerator.generate_pin())
def test_generate_serial(self):
giftCardCodeHelper = GiftCardCodeHelper()
a = 1
print(giftCardCodeHelper.generate_serial(100, a, 'UPGC1S'))
a += 100
print(giftCardCodeHelper.generate_serial(100, a, 'UPGC1S'))
|
simonqiang/gftest | app/__init__.py | <reponame>simonqiang/gftest<filename>app/__init__.py
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.mail import Mail
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from config import config
from flask.ext.redis import Redis
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
redis1 = Redis()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
app.config['REDIS_HOST'] = 'localhost'
app.config['REDIS_PORT'] = 6379
app.config['REDIS_DB'] = 0
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
redis1.init_app(app)
from .main import main as main_blueprint
# from .main.common import common
app.register_blueprint(main_blueprint)
# app.register_blueprint(common)
return app
|
simonqiang/gftest | app/main/jobs/giftCardSchedule.py | <filename>app/main/jobs/giftCardSchedule.py
from config import Config
from app.main.common.GiftCardCodeHelper import GiftCardCodeHelper
from ..common.RedisHelper import RedisHelper
import sys, traceback
class GiftCardSchedule(object):
def test(self):
print("running jobs")
giftCardCodeHelper = GiftCardCodeHelper()
redisHelper = RedisHelper()
if not redisHelper.redis_get(giftCardCodeHelper.giftcard_prefix + giftCardCodeHelper.giftcard_flag):
redisHelper.redis_set_expire(giftCardCodeHelper.giftcard_prefix + giftCardCodeHelper.giftcard_flag, '1',
400)
for serial_code in Config.SERIAL_CODES:
try:
# check if there is enough serial
current_count = redisHelper.redis_scard(
giftCardCodeHelper.giftcard_prefix + serial_code + giftCardCodeHelper.giftcard_list)
if current_count <= giftCardCodeHelper.giftcard_min_count:
# if the index is None or index + generration cout is bigger than 99999999 set index to 1
index = redisHelper.redis_get(
giftCardCodeHelper.giftcard_prefix + serial_code + giftCardCodeHelper.giftcard_serial_index)
if not index:
index = 1
if (
index + giftCardCodeHelper.giftcard_generation_count > giftCardCodeHelper.giftcard_generation_max):
index = 1
redisHelper.redis_inc(
giftCardCodeHelper.giftcard_prefix + serial_code + giftCardCodeHelper.giftcard_serial_index,
giftCardCodeHelper.giftcard_generation_count)
# insert new serial in redis
print('insert new serial in reids')
serial_list = giftCardCodeHelper.generate_serial(index,
giftCardCodeHelper.giftcard_generation_count,
serial_code)
redisHelper.redis_sadd(
giftCardCodeHelper.giftcard_prefix + serial_code + giftCardCodeHelper.giftcard_list,
serial_list)
except:
redisHelper.redis_delete(giftCardCodeHelper.giftcard_prefix + giftCardCodeHelper.giftcard_flag)
e = sys.exc_info()[0]
traceback.print_exc()
return {'error ': str(e)}
redisHelper.redis_delete(giftCardCodeHelper.giftcard_prefix + giftCardCodeHelper.giftcard_flag)
|
simonqiang/gftest | tests/main/common/testutils.py | <filename>tests/main/common/testutils.py
import unittest, datetime
from app.main.common.utils import Utils
from app import create_app, db
from app.models import CardGiftCard, CardDenomination
class UtilsTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('development')
self.app_context = self.app.app_context()
self.app_context.push()
def tearDown(self):
db.session.remove()
self.app_context.pop()
def test_validateParameter(self):
self.assertIsNone(Utils.validateParameter(""))
self.assertIsNone(Utils.validateParameter(None))
print(Utils.validateParameter('money') + 'dfdfdfdf')
self.assertEqual('money', Utils.validateParameter('money'))
def test_isInteger(self):
self.assertTrue(Utils.isInteger('1254'))
self.assertFalse(Utils.isInteger('1254.343'))
self.assertFalse(Utils.isInteger('1254dfdf'))
self.assertFalse(Utils.isInteger(None))
def test_validate_hash(self):
self.assertTrue(Utils.validate_hash('f40b6cfbb93ce812fa9f92c2827543b7', "c74501e2-3910-4d8e-9192-98f67bdefe69", "A1dy1gV6Ri12H13B", ""))
def test_getDenomiation(self):
self.assertEquals(1, Utils.getDenominationId('10'))
def test_generate_uuid(self):
tempuuid = Utils.generate_uuid()
print(tempuuid)
self.assertIsNotNone(tempuuid)
def test_redis_add(self):
Utils.redis_add()
def test_mode_giftcard(self):
giftcard = CardGiftCard()
giftcard.gfSerial = 'serial1'
giftcard.gfPin = 'pin1'
giftcard.gfConsumed_dt = datetime.datetime.now()
giftcard.gfConsumed_dt = datetime.datetime.now()
giftcard.gfReference = 'reference1'
giftcard.gfStatus = 'X'
giftcard.gfSKU = 'SKU1'
giftcard.gfDenom_id = 1
giftcard2 = CardGiftCard()
giftcard2.gfSerial = 'serial2'
giftcard2.gfPin = 'pin2'
giftcard2.gfConsumed_dt = datetime.datetime.now()
giftcard2.gfConsumed_dt = datetime.datetime.now()
giftcard2.gfReference = 'reference2'
giftcard2.gfStatus = 'X'
giftcard2.gfSKU = 'SKU2'
giftcard2.gfDenom_id = 1
db.session().add(giftcard)
db.session().add(giftcard2)
db.session().commit()
def test_mode_giftcard_select(self):
sku_list = CardGiftCard.query.filter_by(gfDenom_id=1).all()
print(sku_list[0].gfSerial)
def test_mode_cardDenimination(self):
cardDenom = CardDenomination.query.filter_by(serialCode='UPGC1S').all()
print(cardDenom[0].id)
def test_generate_sku_prefix(self):
print(Utils.generate_skuCode_prefix('UP010'))
def test_query_demonation(self):
for i in range(10):
print(Utils.getCardDenomination('UP500'))
tempvalue = '27'
giftcard = CardGiftCard()
giftcard.gfSerial = tempvalue
giftcard.gfPin = tempvalue
giftcard.gfConsumed_dt = datetime.datetime.utcnow()
giftcard.gfCreate_dt = datetime.datetime.utcnow()
giftcard.gfReference = tempvalue
giftcard.gfStatus = 'X'
giftcard.gfSKU = tempvalue
giftcard.gfDenom_id = 6
db.session.add(giftcard)
db.session.commit()
# db.session.close()
for i in range(100):
print(Utils.getCardDenomination('UP500'))
if __name__ == '__main__':
unittest.main() |
simonqiang/gftest | app/main/common/utils.py | import hashlib, uuid
from app.models import CardDenomination
from time import strftime, gmtime
import redis, datetime
from app import redis1
class Utils():
@staticmethod
def validateParameter(param):
if param :
return param
else :
return None
@staticmethod
def isInteger(param):
if None != param and param.isdigit():
return True
else :
return False
@staticmethod
def getCardDenomination(param):
denomination = CardDenomination.query.filter_by(code=param).first()
return denomination
@staticmethod
def validate_hash(hash, denomination_code, count, secret):
m = hashlib.md5()
m.update(denomination_code.encode())
m.update(count.encode())
m.update(secret.encode())
temphash = m.hexdigest()
print(temphash)
return hash == temphash
@staticmethod
def hdm5_hash(denomination_code, count, secret):
m = hashlib.md5()
m.update(denomination_code.encode())
m.update(count.encode())
m.update(secret.encode())
return m.digest()
@staticmethod
def generate_uuid():
return str(uuid.uuid4())
@staticmethod
def redis_add():
redisinstance = redis.StrictRedis()
redisinstance.set('foo', 'bar')
print(str(redisinstance.get('foo')))
@staticmethod
def generate_skuCode_prefix(skuCode):
now = datetime.datetime.utcnow()
return strftime('%y%m', gmtime()) + skuCode
|
simonqiang/gftest | app/main/common/GiftCardCodeHelper.py | import random, datetime
from .RedisHelper import RedisHelper
from app.models import CardDenomination, CardGiftCard
from app import db
from .utils import Utils
from config import Config
class GiftCardCodeHelper():
giftcard_prefix = 'giftcard_prefix-'
giftcard_serial_index = '-giftcard_serial_index'
giftcard_list = '-giftcard_serial_list'
giftcard_sets = '-giftcard_serial_sets'
giftcard_flag = '-giftcard_serial_flag'
giftcard_min_count = Config.GIFTCARD_MIN_COUNT
giftcard_generation_count = Config.GIFTCARD_GENERATION_COUNT
giftcard_generation_max = Config.GIFTCARD_GENERATION_MAX
# function to generates the serial
# return a list of the serial
def generate_serial(self, start, count, serialCode):
serial_list = []
for i in range(count):
serial_list.append(serialCode + str(start + i).zfill(8))
return serial_list
def generate_pin(self):
pinList = ['1', '2', '3', '4', '5', '6', '7', '8', '1', '2', '3', '4', '5', '6', '7', '8']
random.shuffle(pinList)
return ''.join(pinList)
def insert_new_serial_into_redis(self, start, count, serialCode):
redisHelper = RedisHelper()
# increaee index
redisHelper.redis_inc(count, self.giftcard_prefix + serialCode + self.giftcard_serial_index)
# generates the list of the serial
serial_list = self.generate_serial(start, count, serialCode)
# insert into redis
redisHelper.redis_sadd(self.giftcard_prefix + serialCode + self.giftcard_list, serial_list)
def get_denomination_id(self, serialCode):
denomination = CardDenomination.query.filter_by(serialCode=serialCode).all()
if denomination and len(denomination) >= 1:
return denomination[0].id
def get_denomination_serialCode(self, code):
# denomination = CardDenomination.query.filter_by(code=code).all()
denomination = db.session.query(CardDenomination).filter_by(code=code).all()
if denomination and len(denomination) >= 1:
return denomination[0].serialCode
def get_denomination_skuCode(self, code):
denomination = CardDenomination.query.filter_by(code=code).all()
if denomination and len(denomination) >= 1:
return denomination[0].skuCode
def retrive_and_insert_to_db(self, serial_count, sku_start, serialCode, skuCode, reference):
reddisHelper = RedisHelper()
# get serial list
serial_list = reddisHelper.redis_spop_str(self.giftcard_prefix + serialCode + self.giftcard_list, serial_count)
denomination_id = self.get_denomination_id(serialCode)
sku_prefix = Utils.generate_skuCode_prefix(skuCode)
current_time = datetime.datetime.utcnow()
sku_count = 0
for i in range(len(serial_list)):
giftcard = CardGiftCard()
giftcard.gfSerial = serial_list[i]
giftcard.gfPin = self.generate_pin()
giftcard.gfConsumed_dt = '0000-00-00 00:00:00'
giftcard.gfCreate_dt = current_time
giftcard.gfReference = reference
giftcard.gfStatus = 'X'
giftcard.gfSKU = sku_prefix + str(sku_start + sku_count).zfill(6)
giftcard.gfDenom_id = denomination_id
sku_count += 1
db.session.add(giftcard)
db.session.commit()
db.session.close()
|
LeeSeongJinCa/NIKE-THE-DRAW-Calendar | parser/run.py | import json
from pprint import pprint
from time import sleep as delay
from get_drawable_items import get_drawable_items
from get_calendar_for_item import get_calendar_for_item
from render_row_from_item import render_row_from_item, render_table_header
from inject_result_to_readme import inject_result_to_readme
if __name__ == '__main__':
drawable_items = get_drawable_items()
delay(0.1)
markdown = ''
for item in drawable_items:
calendar = get_calendar_for_item(item['href'])
item['calendar'] = calendar
delay(0.25)
rendered_markdown = render_row_from_item(item)
markdown += rendered_markdown + '\n'
if len(drawable_items):
markdown = render_table_header() + markdown
else:
markdown = '**현재 진행중인 THE DRAW가 없습니다!**\n'
inject_result_to_readme(markdown)
pprint(drawable_items)
print('👟 Updated Draws!')
with open('../app/src/mockups/sneakers.json', 'w') as product_file:
json.dump(drawable_items, product_file, ensure_ascii=False, indent=2)
product_file.write('\n')
|
LeeSeongJinCa/NIKE-THE-DRAW-Calendar | parser/api.py | import requests
from bs4 import BeautifulSoup
ROOT_URL = 'https://www.nike.com'
def get_request(path='/kr'):
response = requests.get(ROOT_URL + path, headers={
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/85.0.4183.121 Safari/537.36',
})
soup = BeautifulSoup(response.text, 'html.parser')
return soup
|
LeeSeongJinCa/NIKE-THE-DRAW-Calendar | parser/get_calendar_for_item.py | import api
def get_calendar_for_item(item_href):
soup = api.get_request(item_href)
description_wrapper = soup.find_all('p', class_='draw-info')
calendar = [
paragraph.text for paragraph in description_wrapper
]
return calendar
|
LeeSeongJinCa/NIKE-THE-DRAW-Calendar | parser/render_row_from_item.py | <filename>parser/render_row_from_item.py
from api import ROOT_URL
def render_table_header():
return (
'| 사진 | 제품명 | 응모 일정 |\n'
'| --- | ---- | ------- |\n'
)
def render_column_from_calendar(item_calendar):
return '<br />'.join([
line if is_not_first_line else f'<strong>{line}</strong>'
for is_not_first_line, line in enumerate(item_calendar)
])
def render_row_from_item(item):
rendered_calendar = render_column_from_calendar(item['calendar'])
url_to_item = ROOT_URL + item['href']
return (
f"| <img src=\"{item['image']}\" width=\"256\" /> "
f"| <a href=\"{url_to_item}\"><strong>{item['title']}</strong><br /></a>"
f" \"{item['theme']}\" "
f'| {rendered_calendar} |'
)
|
LeeSeongJinCa/NIKE-THE-DRAW-Calendar | parser/get_drawable_items.py | <reponame>LeeSeongJinCa/NIKE-THE-DRAW-Calendar
import api
def get_drawable_items():
soup = api.get_request(
'/kr/launch?type=upcoming&activeDate=date-filter:AFTER')
launch_items = soup.find_all('div', class_='product-card')
# print(launch_items)
drawable_items = []
for launch_item in launch_items:
soldout_button = launch_item.find('a', class_='ncss-btn-primary-dark')
print(soldout_button)
if (soldout_button and soldout_button.text.strip() == 'THE DRAW 진행예정'):
launch_item_information = launch_item.find(
'a', class_='comingsoon')
launch_item_image = launch_item.find('img', class_='img-component')
drawable_items.append({
'title': launch_item_information.attrs['title'],
'theme': launch_item_image.attrs['alt'],
'image': launch_item_image.attrs['data-src'],
'href': launch_item_information.attrs['href'],
})
return drawable_items
|
LeeSeongJinCa/NIKE-THE-DRAW-Calendar | parser/inject_result_to_readme.py | <reponame>LeeSeongJinCa/NIKE-THE-DRAW-Calendar
from datetime import datetime
from render_row_from_item import render_table_header
START_TAG = '<!-- DRAW CALENDAR: START -->\n'
END_TAG = '\n<!-- DRAW CALENDAR: END -->'
README_PATH = '../README.md'
def get_timestamp():
return datetime.today().strftime('%Y.%m.%d')
def inject_result_to_readme(injected_content):
with open(README_PATH) as readme:
readme_content = readme.read()
start_index = readme_content.find(START_TAG)
end_index = readme_content.find(END_TAG)
timestamp = get_timestamp()
updated_content = (readme_content[0:start_index]
+ START_TAG + '\n'
+ f'> 👟 Updated Draws at {timestamp}‼️\n\n'
+ injected_content
# + END_TAG + '\n'
+ readme_content[end_index:])
with open(README_PATH, 'w') as readme:
readme.write(updated_content)
|
czetech/tecoroute-proxy | src/tecoroute_proxy/_cli.py | <reponame>czetech/tecoroute-proxy<gh_stars>0
from argparse import ArgumentParser, Namespace
from asyncio import Event, get_event_loop, run, set_event_loop_policy
from logging import DEBUG, INFO, basicConfig
from signal import SIGINT, SIGTERM
from ._misc import CONTROL, HOST, ORIGIN, PORT, dist
from ._server import ProxyServer
try:
from uvloop import EventLoopPolicy
except ImportError:
pass
else:
set_event_loop_policy(EventLoopPolicy())
async def _main(args: Namespace) -> None:
basicConfig(level=DEBUG if args.verbose else INFO)
proxy_server = ProxyServer(args.host, args.port, args.control, args.origin)
await proxy_server.start()
runner = Event()
for signum in (SIGINT, SIGTERM):
get_event_loop().add_signal_handler(signum, lambda: runner.set())
await runner.wait()
await proxy_server.stop()
def cli() -> None:
"""Run the command-line interface."""
parser = ArgumentParser(
prog=dist.entry_points[0].name, description=dist.metadata["Summary"]
)
parser.add_argument(
"-H",
"--host",
default=HOST,
help="host to listen on, all interfaces if not set",
)
parser.add_argument(
"-p",
"--port",
default=PORT,
type=int,
help="port to listen on",
)
parser.add_argument(
"-c",
"--control",
default=CONTROL,
help="control path",
)
parser.add_argument(
"-o",
"--origin",
default=ORIGIN,
help="TecoRoute service URL",
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="verbose mode",
)
parser.add_argument(
"--version",
action="version",
version=f"TecoRoute Proxy {dist.version}",
)
run(_main(parser.parse_args()))
|
czetech/tecoroute-proxy | src/tecoroute_proxy/__init__.py | """TecoRoute Proxy library.
Example of asynchronous server startup:
.. code-block:: python
from tecoroute_proxy import ProxyServer
async def proxy_server_start():
server = ProxyServer(port=8080)
await server.start()
"""
from ._cli import cli
from ._request import ProxyRequest
from ._server import ProxyServer
__all__ = ["ProxyServer", "ProxyRequest", "cli"]
|
czetech/tecoroute-proxy | src/tecoroute_proxy/_request.py | <reponame>czetech/tecoroute-proxy
from __future__ import annotations
from asyncio import sleep
from base64 import b64decode, b64encode
from binascii import Error as BinasciiError
from hashlib import sha1
from http.cookies import Morsel, SimpleCookie
from json import JSONDecodeError, dumps, loads
from logging import DEBUG, WARNING
from random import choices
from string import ascii_lowercase, digits
from types import TracebackType
from typing import Any, Optional, Union
from xml.etree import ElementTree
from zlib import compress, decompress
from zlib import error as zlib_error
from aiohttp import (
ClientConnectionError,
ClientPayloadError,
ClientResponse,
ClientResponseError,
ClientSession,
)
from aiohttp.web import HTTPServiceUnavailable, Request, Response
from yarl import URL
from ._misc import HTTP_NAME, ORIGIN, logger
class ProxyRequest:
"""Proxy request.
:param request: User request.
:param origin: TecoRoute service URL.
"""
_cookie_name = "tecoroute-proxy"
_tr_login = "/TR_LOGIN.XML"
_tr_logout = "/TR_LOGOUT.XML"
def __init__(self, request: Request, origin: str = ORIGIN) -> None:
self._request = request
self._origin = URL(origin).origin()
self._fetch_no = 0
logs = [(DEBUG, "Created")]
# Create HTTP client with headers
client_headers = request.headers.copy()
for header_key in ("Content-Length", "Cookie"):
client_headers.pop(header_key, None)
client_headers["User-Agent"] = HTTP_NAME
self._client = ClientSession(headers=client_headers)
# Process request session
# The value of `request_cookie` is a cookie from the user's browser that stores
# the session between the user and the proxy server.
# The value of `self._session['cookies']` are cookies that store the session
# between the proxy server and TecoRoute.
# The value of `client_cookies` are cookies passed to TecoRoute.
self._session = {}
try:
request_cookie = self._request.cookies[self._cookie_name]
session = loads(decompress(b64decode(request_cookie)))
assert isinstance(session, dict)
assert "id" in session
self._session.update(session)
try:
client_cookies: SimpleCookie[Any] = SimpleCookie()
cookies = self._session["cookies"]
for cookie in cookies:
client_cookies.load(cookie)
self._client.cookie_jar.update_cookies(client_cookies)
except KeyError:
logs.append((WARNING, "Missing session cookies"))
except TypeError:
logs.append((WARNING, f"Invalid session cookies ({cookies})"))
else:
logs.append((DEBUG, "Session loaded"))
except KeyError:
logs.append((DEBUG, "Empty cookie"))
except (BinasciiError, zlib_error, JSONDecodeError):
logs.append((WARNING, f"Invalid cookie ({request_cookie})"))
except AssertionError:
logs.append((WARNING, f"Invalid session ({self._session})"))
alphabet = ascii_lowercase + digits
if not self._session:
session_id = "".join(choices(alphabet, k=8))
self._session["id"] = session_id
logs.append((DEBUG, "New session initialized"))
else:
session_id = self._session["id"]
request_id = "".join(choices(alphabet, k=8))
self._id = f"{session_id}-{request_id}"
self._logger = logger.getChild(f"{self._id}")
for log in logs:
self._logger.log(*log)
async def __aenter__(self) -> "ProxyRequest":
return self
async def __aexit__(self, _: type, __: BaseException, ___: TracebackType) -> None:
await self.close()
@property
def _route(self) -> Optional[str]:
morsel: Morsel[str] = next(
(morsel for morsel in self._client.cookie_jar if morsel.key == "RoutePLC"),
Morsel(),
)
return morsel.value
async def _fetch(
self,
method: str = "GET",
path: str = "/",
data: Optional[Union[dict[str, str], bytes]] = None,
) -> ClientResponse:
"""Fetch a response from TecoRoute."""
await sleep(max(self._fetch_no - 2, 0))
last = True if self._fetch_no == 5 else False
self._logger.debug(
f"TecoRoute request ({method} {path}) fetch {self._fetch_no}"
)
self._fetch_no += 1
try:
client_response = await self._client.request(
method, self._origin.join(URL(path)), data=data, allow_redirects=False
)
self._logger.debug(f"TecoRoute response ({client_response.status})")
if (
client_response.status == 302
and client_response.headers.get("Location") == self._tr_login
and self._session.get("credentials") is not None
):
self._logger.debug("TecoRoute response redirected to login")
if last:
raise RecursionError("Can't create a RoutePLC session")
return await self._login()
else:
return client_response
except (ClientConnectionError, ClientPayloadError, ClientResponseError) as e:
self._logger.warning(f"TecoRoute request failed ({e})")
if last:
raise RecursionError("TecoRoute can't be reached") from None
return await self._fetch(method, path, data)
async def _login(self) -> ClientResponse:
self._logger.debug(
"RoutePLC " + (f"({self._route})" if self._route else "empty")
)
credentials = self._session["credentials"]
secret = sha1(
((self._route or "") + credentials["password"]).encode()
).hexdigest()
body = {
"USER": credentials["username"],
"PASS": <PASSWORD>,
"PLC": credentials["plc"],
}
client_response = await self._fetch("POST", self._tr_login, body)
if (
client_response.status == 200
and client_response.method == "POST"
and client_response.url == self._origin.join(URL(self._tr_login))
):
try:
et = ElementTree.fromstring(await client_response.text())
acer = et.find("ACER")
if acer is None:
raise KeyError
errno = acer.attrib["VALUE"]
except (ElementTree.ParseError, KeyError):
self._logger.warning("Unknown TecoRoute login response")
else:
raise RecursionError(f"TecoRoute login failed [{errno}]")
return client_response
def _sync_cookies(self, response: Response) -> None:
self._session["cookies"] = []
for morsel in self._client.cookie_jar:
self._session["cookies"].append(morsel.output(header="").lstrip())
# Convert the morsel object to the set_cookie arguments
attrs = {
k.replace("-", "_"): v or None
for k, v in morsel.items()
if k != "comment"
}
if attrs["domain"] == self._origin.host:
attrs["domain"] = ""
response.set_cookie(morsel.key, morsel.value, **attrs) # type: ignore
cookie = b64encode(compress(dumps(self._session).encode())).decode()
response.set_cookie(self._cookie_name, cookie)
async def _prepare_response(self, client_response: ClientResponse) -> Response:
headers = client_response.headers.copy()
for header_key in ("Content-Encoding", "Content-Length", "Set-Cookie"):
headers.pop(header_key, None)
response = Response(
body=await client_response.read(),
status=client_response.status,
headers=headers,
)
self._sync_cookies(response)
return response
def _response_unavailable(self, message: str) -> Response:
response = HTTPServiceUnavailable(
text=(
"503: Sorry, service temporary unavailable, please try again later "
f"({message}).\n\nLog ID: {self._logger.name.split('.')[-1]}"
)
)
self._sync_cookies(response)
return response
async def login(self, username: str, password: str, plc: str) -> Response:
"""Clear client cookies, save credentials and login.
:param username: TecoRoute username.
:param password: <PASSWORD>.
:param plc: PLC to connect.
"""
self._client.cookie_jar.clear()
credentials = {k: v for k, v in locals().items() if k != "self"}
self._logger.info(f"Login from {self._request.remote} with {credentials}")
self._session["credentials"] = credentials
try:
return await self._prepare_response(await self._login())
except RecursionError as e:
return self._response_unavailable(str(e))
async def logout(self) -> Response:
"""Delete credentials and logout."""
logger.info(f"Logout from {self._request.remote}")
self._session.pop("credentials", None)
try:
return await self._prepare_response(await self._fetch(path=self._tr_logout))
except RecursionError as e:
return self._response_unavailable(str(e))
async def response(self) -> Response:
"""Make a proxy request and get a response."""
self._logger.info(
(
f"Request from {self._request.remote} {self._request.method} "
f'"{self._request.rel_url}"'
)
)
try:
client_response = await self._fetch(
self._request.method,
str(self._request.rel_url),
await self._request.read(),
)
return await self._prepare_response(client_response)
except RecursionError as e:
return self._response_unavailable(str(e))
async def close(self) -> None:
"""Close request."""
await self._client.close()
|
czetech/tecoroute-proxy | src/tecoroute_proxy/_server.py | <reponame>czetech/tecoroute-proxy
from typing import Optional
from aiohttp.typedefs import Handler
from aiohttp.web import (
Application,
AppRunner,
HTTPBadRequest,
HTTPMethodNotAllowed,
Request,
Response,
StreamResponse,
TCPSite,
middleware,
post,
route,
)
from yarl import URL
from ._misc import CONTROL, HOST, HTTP_NAME, ORIGIN, PORT, logger
from ._request import ProxyRequest
class ProxyServer:
"""Proxy server.
:param host: The host to listen on, all interfaces if None.
:param port: The port to listen on.
:param control: The control path.
:param origin: TecoRoute service URL.
"""
def __init__(
self,
host: Optional[str] = HOST,
port: int = PORT,
control: str = CONTROL,
origin: str = ORIGIN,
) -> None:
self._host = host
self._port = port
self._origin = origin
server = Application(middlewares=[self._middleware])
control_path = URL("/").join(URL(control)).path
server.add_routes(
[
post(control_path, self._handler_control_post),
route("*", control_path, self._handler_control),
route("*", "/{url:.*}", self._handler_all),
]
)
self._runner = AppRunner(server, access_log=None)
@middleware # type: ignore
async def _middleware(self, request: Request, handler: Handler) -> StreamResponse:
response = await handler(request)
response.headers["Server"] = HTTP_NAME
return response
async def _handler_control_post(self, request: Request) -> Response:
post = await request.post()
action = post.get("action")
if action == "login":
try:
login = {key: post[key] for key in ("username", "password", "plc")}
except KeyError as e:
return HTTPBadRequest(reason=f"Missing {e}.")
async with ProxyRequest(request, self._origin) as proxy_request:
return await proxy_request.login(**login) # type: ignore
if action == "logout":
async with ProxyRequest(request, self._origin) as proxy_request:
return await proxy_request.logout()
else:
return HTTPBadRequest(reason="Invalid action.")
async def _handler_control(self, request: Request) -> Response:
return HTTPMethodNotAllowed(method=request.method, allowed_methods=("POST",))
async def _handler_all(self, request: Request) -> Response:
async with ProxyRequest(request, self._origin) as proxy_request:
return await proxy_request.response()
async def start(self) -> None:
"""Start the server asynchronously in the event loop."""
await self._runner.setup()
await TCPSite(self._runner, self._host, self._port).start()
logger.info(f"The server is running on {self._host or ''}:{self._port}")
async def stop(self) -> None:
"""Stop the server."""
await self._runner.cleanup()
logger.info("The server stopped")
|
czetech/tecoroute-proxy | src/tecoroute_proxy/_misc.py | <reponame>czetech/tecoroute-proxy
from importlib.metadata import distribution
from logging import getLogger
_module = __name__.split(".")[0]
logger = getLogger(_module)
dist = distribution(_module)
HTTP_NAME = f"{dist.metadata['Name']}/{dist.version} (+{dist.metadata['Home-page']})"
# Default values
CONTROL = "/TR_PROXY"
HOST = None
ORIGIN = "http://route.tecomat.com:61682"
PORT = 80
|
czetech/tecoroute-proxy | docs/conf.py | <reponame>czetech/tecoroute-proxy
"""Sphinx configuration."""
from importlib.metadata import distribution
import tecoroute_proxy
_dist = distribution(tecoroute_proxy.__name__)
_author = _dist.metadata["Author"]
# Project information
project = "TecoRoute Proxy"
author = _author
copyright = f"2022, {_author}"
version = _dist.version
# General configuration
extensions = ["m2r2", "sphinx.ext.autodoc", "sphinx.ext.viewcode"]
# Options for HTML output
html_theme = "sphinx_rtd_theme"
# Options for extension sphinx.ext.autodoc
autodoc_member_order = "bysource"
|
czetech/tecoroute-proxy | src/tecoroute_proxy/__main__.py | """Invoke the command-line interface when the package is run as a script."""
from ._cli import cli
if __name__ == "__main__":
cli()
|
9dev/django-flags | flags/admin.py | from django.contrib import admin
from .models import Approve, Flag
class FlagAdmin(admin.ModelAdmin):
list_display = ('content_object', 'creator', 'creation_date')
actions = ['delete_selected_flagged_objects', 'approve']
def approve(self, request, queryset):
for flag in queryset:
Approve.objects.create(content_object=flag.content_object, creator=request.user)
self.message_user(request, "Successfully approved selected objects.")
def delete_selected_flagged_objects(self, request, queryset):
for flag in queryset:
flag.content_object.delete()
flag.delete()
self.message_user(request, "Successfully deleted selected flagged objects.")
class ApproveAdmin(admin.ModelAdmin):
list_display = ('content_object', 'creator', 'creation_date')
admin.site.register(Flag, FlagAdmin)
admin.site.register(Approve, ApproveAdmin)
|
9dev/django-flags | demo/main/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(
r'^$',
views.ArticleListView.as_view(),
name='article_list'
),
url(
r'^article/(?P<pk>[0-9]+)$',
views.ArticleDetailView.as_view(),
name='article_detail'
),
]
|
9dev/django-flags | flags/urls.py | <reponame>9dev/django-flags
from django.conf.urls import patterns, url
from flags import views
urlpatterns = patterns('',
url(
r'^create/(?P<app_label>[a-z0-9_]+)/(?P<model_name>[a-z0-9_]+)/(?P<pk>[0-9]+)$',
views.FlagCreateView.as_view(),
name='flag_create',
),
)
|
9dev/django-flags | flags/forms.py | <reponame>9dev/django-flags
from django import forms
class FlagCreateForm(forms.Form):
pass
|
9dev/django-flags | fts/Flags.py | from selenium.webdriver.support.ui import Select
from django.contrib.auth.models import User
from django.test import override_settings
from flags.models import Approve, Flag
from main.models import Article
from ._base import BaseTestCase
class TestFlags(BaseTestCase):
def test_registered_user_can_flag_an_object(self):
# Florence logs in as an admin.
self.login_as_regular_user()
# She hits a detail page for an Article object.
self.get('/article/1')
# She clicks on a link to flag the object.
self.browser.find_element_by_partial_link_text('Report abuse').click()
# She confirms she want to flag this article.
self.submit()
# She logs out and logs in to an admin account.
self.reopen_browser()
self.login_as_admin()
# She hits flags admin panel.
self.get('/admin/flags/flag')
# She sees a new flag object.
self.assertIn('<Article id=1>', self.get_text())
def test_unregistered_user_cannot_flag_an_object(self):
# Florence hits a detail page for an Article object.
self.get('/article/1')
# She clicks on a link to flag the object.
self.browser.find_element_by_partial_link_text('Report abuse').click()
# She is asked to log in first.
self.assertEqual(
self.browser.current_url,
'{}/admin/login/?next=/flags/create/main/article/1'.format(self.live_server_url)
)
def test_can_approve_an_object_in_admin(self):
# There is a flagged Article object.
Flag.objects.create(content_object=Article.objects.get(pk=1), creator=User.objects.get(pk=1))
# Florence logs in as an admin.
self.login_as_admin()
# She hits flags admin panel.
self.get('/admin/flags/flag')
# She approves an Article object.
self.get_by_id('action-toggle').click()
select = Select(self.browser.find_element_by_css_selector('select[name="action"]'))
select.select_by_visible_text('Approve')
self.browser.find_element_by_css_selector('button[name="index"]').click()
# She confirms that existing flag for an object disappeared.
self.assertIn('0 flags', self.get_text())
def test_can_delete_an_object_in_admin(self):
# There is a flagged Article object.
Flag.objects.create(content_object=Article.objects.get(pk=1), creator=User.objects.get(pk=1))
# Florence logs in as an admin.
self.login_as_admin()
# She hits flags admin panel.
self.get('/admin/flags/flag')
# She deletes an Article object.
self.get_by_id('action-toggle').click()
select = Select(self.browser.find_element_by_css_selector('select[name="action"]'))
select.select_by_visible_text('Delete selected flagged objects')
self.browser.find_element_by_css_selector('button[name="index"]').click()
# She confirms that existing flag for an object disappeared.
self.assertIn('0 flags', self.get_text())
# She hits articles admin panel.
self.get('/admin/main/article')
# She confirms that an Article was deleted.
self.assertIn('0 articles', self.get_text())
def test_cannot_flag_object_if_approved(self):
# There is an approved object.
Approve.objects.create(content_object=Article.objects.get(pk=1), creator=User.objects.get(pk=1))
# Florence logs in as an admin.
self.login_as_regular_user()
# She hits a detail page for an Article object.
self.get('/article/1')
# She clicks on a link to flag the object.
self.browser.find_element_by_partial_link_text('Report abuse').click()
# She confirms she want to flag this article.
self.submit()
# She logs out and logs in again as admin.
self.reopen_browser()
self.login_as_admin()
# She hits flags admin panel.
self.get('/admin/flags/flag')
# She doesn't see any new flag object.
self.assertIn('0 flags', self.get_text())
@override_settings(FLAGS_THRESHOLD=3)
def test_object_is_removed_after_hitting_flag_threshold_if_threshold_set(self):
# There is an object with two flags.
obj = Article.objects.get(pk=1)
Flag.objects.create(content_object=obj, creator=User.objects.get(pk=2))
Flag.objects.create(content_object=obj, creator=User.objects.get(pk=3))
# Florence logs in as an admin.
self.login_as_admin()
# She hits a detail page for an Article object.
self.get('/article/1')
# She clicks on a link to flag the object.
self.browser.find_element_by_partial_link_text('Report abuse').click()
# She confirms she want to flag this article.
self.submit()
# She hits articles admin panel.
self.get('/admin/main/article')
# She confirms that an Article object disappeared.
self.assertIn('0 articles', self.get_text())
# She hits flags admin panel.
self.get('/admin/flags/flag')
# She confirms that all flags for this Article disappeared.
self.assertIn('0 flags', self.get_text())
def test_can_flag_object_only_once(self):
# Florence logs in as an admin.
self.login_as_admin()
# She hits a detail page for an Article object.
self.get('/article/1')
# She clicks on a link to flag the object.
self.browser.find_element_by_partial_link_text('Report abuse').click()
# She confirms she want to flag this article.
self.submit()
# She repeats that procedure.
self.get('/article/1')
self.browser.find_element_by_partial_link_text('Report abuse').click()
self.submit()
# She hits flags admin panel.
self.get('/admin/flags/flag')
# She sees only one flag object.
self.assertIn('1 flag', self.get_text())
|
9dev/django-flags | flags/templatetags/flags.py | <gh_stars>0
from django import template
from django.core.urlresolvers import reverse
register = template.Library()
@register.simple_tag
def flag_create_url(obj):
return reverse('flags:flag_create', kwargs={
'app_label': obj._meta.app_label,
'model_name': obj._meta.object_name.lower(),
'pk': obj.pk
})
|
9dev/django-flags | flags/models.py | <filename>flags/models.py
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class Flag(models.Model):
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
content_object = GenericForeignKey('content_type', 'object_id')
creator = models.ForeignKey('auth.User')
creation_date = models.DateTimeField(auto_now_add=True)
class Approve(models.Model):
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
content_object = GenericForeignKey('content_type', 'object_id')
creator = models.ForeignKey('auth.User')
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = (('object_id', 'content_type'),)
@receiver(post_save, sender=Approve)
def on_save_approve(sender, instance, **kwargs):
Flag.objects.filter(object_id=instance.object_id, content_type=instance.content_type).delete()
@receiver(post_save, sender=Flag)
def on_save_flag(sender, instance, **kwargs):
try:
Approve.objects.get(object_id=instance.object_id, content_type=instance.content_type)
instance.delete()
except Approve.DoesNotExist:
flags = Flag.objects.filter(object_id=instance.object_id, content_type=instance.content_type)
exists = flags.filter(creator=instance.creator).count() > 1
if exists:
instance.delete()
else:
threshold = getattr(settings, 'FLAGS_THRESHOLD', None)
if threshold:
count = flags.count()
if count >= threshold:
instance.content_object.delete()
flags.delete()
|
9dev/django-flags | flags/views.py | from django.apps import apps
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import Http404
from django.utils.decorators import method_decorator
from django.views.generic import FormView
from .forms import FlagCreateForm
from .models import Flag
class FlagCreateView(FormView):
form_class = FlagCreateForm
template_name = 'flags/flag_form.html'
success_url = '/'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(FlagCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
try:
app_label = self.kwargs.pop('app_label')
model_name = self.kwargs.pop('model_name')
pk = self.kwargs.pop('pk')
cls = apps.get_model(app_label=app_label, model_name=model_name)
self.obj = cls.objects.get(pk=pk)
except (KeyError, LookupError, ObjectDoesNotExist):
raise Http404
return super(FlagCreateView, self).get_form_kwargs()
def form_valid(self, form):
Flag.objects.create(content_object=self.obj, creator=self.request.user)
return super(FlagCreateView, self).form_valid(form)
|
9dev/django-flags | demo/main/models.py | <reponame>9dev/django-flags
from django.db import models
from django.core.urlresolvers import reverse
class Article(models.Model):
title = models.CharField(max_length=100)
content = models.CharField(max_length=1000)
def get_absolute_url(self):
return reverse('article_detail', kwargs={'pk': self.pk})
def __str__(self):
return '<Article id={}>'.format(self.pk)
|
9dev/django-flags | demo/main/views.py | <filename>demo/main/views.py
from django.views.generic import DetailView, ListView
from .models import Article
class ArticleListView(ListView):
model = Article
class ArticleDetailView(DetailView):
model = Article
|
gecrooks/disorder | disorder/about.py | <filename>disorder/about.py
# Copyright 2021, <NAME> and contributors
#
# This source code is licensed under the Apache License 2.0
# found in the LICENSE file in the root directory of this source tree.
# Command line interface for the about() function
# > python -m disorder.about
#
# NB: This module should not be imported by any other code in the package
# (else we will get multiple import warnings)
if __name__ == "__main__":
import disorder
disorder.about()
|
gecrooks/disorder | examples/fenergy_errors.py | #
# Explration of different erorr estiamtes for free energies
#
import numpy as np
from disorder import fenergy_bar, fenergy_bayesian
fe = 0
diss = 100
count = 1000
def errors(diss, count):
fe = 0
work_f = np.random.normal(loc=diss + fe, scale=np.sqrt(2 * diss), size=(count,))
work_r = np.random.normal(loc=diss - fe, scale=np.sqrt(2 * diss), size=(count,))
print()
print(f"Gaussian work, count: {count}, mean dissipation: {diss} kT")
print("BAR error:", fenergy_bar(work_f, work_r, uncertainty_method="BAR")[1])
print("MBAR error:", fenergy_bar(work_f, work_r, uncertainty_method="MBAR")[1])
print("Bayesian error:", fenergy_bayesian(work_f, work_r)[1])
print(
"Logistic approx:",
fenergy_bar(work_f, work_r, uncertainty_method="Logistic")[1],
)
errors(10, 1000)
errors(100, 1000)
|
gecrooks/disorder | disorder/__init__.py | <filename>disorder/__init__.py
# Copyright 2021, <NAME> and contributors
#
# This source code is licensed under the Apache License 2.0
# found in the LICENSE file in the root directory of this source tree.
from .config import __version__ # noqa: F401
from .config import about # noqa: F401
from .fenergy import * # noqa: F401 F403
|
gecrooks/disorder | disorder/fenergy_test.py | # Copyright 2021, <NAME> and contributors
#
# This source code is licensed under the Apache License 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
import numpy as np
import pytest
from numpy.random import normal
from scipy.special import expit
from disorder import (
fenergy_bar,
fenergy_bayesian,
fenergy_logmeanexp,
fenergy_logmeanexp_gaussian,
fenergy_symmetric_bar,
fenergy_symmetric_bidirectional,
fenergy_symmetric_nnznm,
)
from disorder.fenergy import _logexpit
def test_fenergy_logmeanexp() -> None:
assert np.isclose(fenergy_logmeanexp(work_f), 9.462820605969167)
assert np.isclose(fenergy_logmeanexp(work_r), -8.992876786373467)
def test_fenergy_logmeanexp_gaussian() -> None:
assert np.isclose(fenergy_logmeanexp_gaussian(work_f), 8.687926143699123)
def test_fenergy_bar() -> None:
assert np.allclose(
fenergy_bar(work_f, work_r), (10.126372790165997, 0.39715725693786963)
)
assert np.allclose(
fenergy_bar(work_r, work_f), (-10.126372790165997, 0.39715725693786963)
)
assert np.allclose(
fenergy_bar(work_f, work_r, uncertainty_method="BAR"),
(10.126372790165997, 0.39715725693786963),
)
assert np.allclose(
fenergy_bar(work_f, work_r, uncertainty_method="MBAR"),
(10.126372790165997, 0.40080301959064535),
)
assert np.allclose(
fenergy_bar(work_f, work_r, uncertainty_method="Logistic"),
(10.126372790165997, 0.40080301959064535),
)
# Note: for high dissipation the error estimate too low with BAR, too high with MBAR
assert np.allclose(
fenergy_bar(work_f_diss, work_r_diss, uncertainty_method="BAR"),
(9.341808032205543, 1.1165063184118718),
)
assert np.allclose(
fenergy_bar(work_f_diss, work_r_diss, uncertainty_method="MBAR"),
(9.341808032205543, 78.83605670383308),
)
assert np.allclose(
fenergy_bar(work_f_diss, work_r_diss, uncertainty_method="Logistic"),
(9.341808032205543, 5.928521477224136),
)
assert np.allclose(
fenergy_bar(
work_f_diss,
work_r_diss,
weights_f=np.ones_like(work_f_diss),
weights_r=np.ones_like(work_r_diss),
uncertainty_method="Logistic",
),
(9.341808032205543, 5.928521477224136),
)
def test_fenergy_bar_error() -> None:
with pytest.raises(ValueError):
fenergy_bar(work_f, work_r, uncertainty_method="NOT_A_METHOD")
def test_fenergy_bayesian() -> None:
assert np.allclose(
fenergy_bayesian(work_f, work_r),
(10.129329739999651, 0.5091539010758899),
)
assert np.allclose(
fenergy_bayesian(work_f_diss, work_r_diss),
(9.491682713739426, 5.564798805609418),
)
def test_logexpit() -> None:
# For values near 0, no tricks
for n in range(10):
x = normal()
assert np.isclose(_logexpit(x), np.log(expit(x)))
assert np.isclose(_logexpit(-1000.0), -1000.0) # type: ignore
assert np.isclose(_logexpit(1000.0), 0.0) # type: ignore
def test_fenergy_symmetric_bar() -> None:
assert np.allclose(
fenergy_symmetric_bar(work_sym_f, work_sym_r),
(1.1867614815656458, 0.21931716664445491),
)
def test_fenergy_symmetric_nnznm() -> None:
# Note only tests that code runs. No regression as of yet.
fenergy_symmetric_nnznm(work_sym_f, work_sym_r)
def test_fenergy_symmetric_bidirectional() -> None:
# Note only tests that code runs. No regression as of yet.
fenergy_symmetric_bidirectional(work_sym_f, work_sym_r)
# -- data --
# Fixed random work samples for testing.
# fe = 10
# diss = 4
# work_f = random.normal(loc=diss + fe, scale=np.sqrt(2 * diss), size=(20,))
# work_r = random.normal(loc=diss - fe, scale=np.sqrt(2 * diss), size=(20,))
work_f = np.array(
[
12.26186058,
12.02296197,
15.81950764,
10.89312792,
10.25310224,
6.57237884,
20.70918778,
12.52717333,
10.98228327,
15.11407533,
10.1928446,
11.86228712,
13.85018359,
17.51308605,
13.71277499,
14.78423781,
10.79028309,
15.73678937,
13.34587623,
14.83121349,
]
)
work_r = np.array(
[
-7.03024785,
-5.97147992,
-9.54545887,
-4.15253714,
-5.13042167,
-10.94999928,
-8.83169733,
-3.70606829,
-8.25225619,
-4.41218262,
-8.61761745,
-4.86517229,
-4.23155588,
-9.87869866,
-8.15082176,
-9.83398181,
-7.43388798,
-8.42498504,
-8.58419969,
-9.82309618,
]
)
# Fixed random work samples for testing, high dissipation
# fe = 10
# diss = 20
work_f_diss = np.array(
[
31.93691244,
37.52776423,
27.84175637,
44.88480481,
42.96347851,
28.25698468,
23.82297813,
23.34256961,
18.7881143,
25.31003161,
]
)
work_r_diss = np.array(
[
8.99668577,
0.83206027,
4.19698101,
9.21375163,
7.14493204,
7.36804987,
0.76412305,
13.23269751,
17.78017694,
16.22043168,
]
)
# Generated from a simple simulation of
# harmonic oscillator moving to and fro.
work_sym_f = np.array(
[
1.82819018,
1.36855722,
3.15124547,
3.70311204,
7.97671385,
2.65068191,
8.19611493,
0.24970324,
3.98034835,
5.1818346,
1.57846703,
1.19969679,
-0.40316984,
3.66036448,
8.0229261,
4.25364127,
3.94486632,
2.96836278,
0.36038236,
1.76974979,
0.8004306,
4.67280096,
1.49874792,
4.81476805,
4.96769674,
1.90638043,
-0.32600167,
3.38392651,
3.79314834,
0.94598643,
1.95623881,
6.11134062,
1.87657695,
5.82658531,
5.87858761,
4.54366715,
1.96390498,
3.63142078,
4.4287167,
3.3585574,
5.1397567,
4.79327128,
3.15155961,
0.30086432,
2.04971419,
1.34595448,
6.27214952,
5.25437326,
8.03978488,
2.72734344,
0.88520126,
5.75472987,
4.97882958,
0.31924861,
7.07856703,
7.95909254,
2.3827212,
6.32013187,
6.40346971,
6.75324138,
8.28685036,
1.20987223,
5.30231901,
2.64366919,
4.66766505,
4.92502304,
-3.55804016,
8.83309716,
-0.15187293,
6.99749128,
2.13440322,
7.14182222,
7.47293331,
5.5724692,
5.10984664,
5.72953769,
4.5543944,
2.03872872,
5.85949342,
7.90117102,
3.08241451,
7.00464683,
5.18575533,
5.67510774,
5.70554863,
3.31311725,
2.00246591,
9.08164636,
3.63798151,
3.65981263,
3.30865548,
1.22848849,
5.15646295,
5.58547037,
11.92743536,
0.14573392,
4.44491747,
4.68708094,
5.80561741,
4.29157184,
]
)
work_sym_r = np.array(
[
1.32803648,
-1.46565987,
-3.91438172,
-0.18983798,
-4.88579712,
-1.26741734,
-5.27859627,
0.55945393,
-2.11904812,
-1.02599031,
0.38483052,
0.70294448,
5.04710393,
-2.83653239,
-4.57776588,
-3.05089428,
-4.42805097,
-1.63812375,
2.07513872,
3.0545816,
2.97141303,
-2.83771512,
0.62048284,
-4.16506427,
-1.80694098,
-1.00990265,
2.36238464,
-2.49992193,
-4.76327979,
2.46269644,
2.31389956,
-6.14219227,
-0.74538983,
-7.77629193,
-2.64513459,
-4.53824845,
0.72225589,
1.09491531,
-1.33724845,
-3.13851951,
-1.98835653,
-6.11549039,
-3.92871721,
-3.64929078,
-0.92120638,
0.30030679,
-6.64325116,
-1.59348937,
-5.08270715,
-1.07814159,
3.03018205,
-4.29619546,
-0.84256382,
-0.43260979,
-7.26314053,
-5.56629806,
-3.60012248,
-6.22214006,
-5.7474615,
-3.2881238,
-7.07396391,
-0.85047779,
-4.68073175,
0.08697061,
-1.2678808,
-3.86896004,
1.0097779,
-2.35230744,
4.13973734,
-8.47694328,
3.85231732,
-5.34715043,
-6.48893033,
-3.52519835,
-2.20753183,
0.48452572,
-3.16530233,
-0.29440516,
-0.9906192,
-3.58376776,
-2.49564153,
-6.31664668,
1.31543616,
-2.63037867,
-3.11807526,
-2.59228726,
-2.09934505,
-3.36107132,
-1.67505966,
-0.59628801,
2.64434669,
-0.8510169,
-0.46134474,
-4.36162684,
-12.99690175,
-1.31253928,
0.72939001,
-1.35800046,
-4.76475975,
-1.66313632,
]
)
|
gecrooks/disorder | disorder/fenergy.py | <reponame>gecrooks/disorder
# Copyright 2021, <NAME> and contributors
#
# This source code is licensed under the Apache License 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
from typing import Tuple
import numpy as np
from numpy.typing import ArrayLike # numpy v1.20
from scipy import optimize
from scipy.special import expit # Logistic sigmoid function: expit(x) = 1/(1+exp(-x))
from scipy.special import logsumexp
__all__ = (
"fenergy_bar",
"fenergy_bayesian",
"fenergy_logmeanexp",
"fenergy_logmeanexp_gaussian",
"fenergy_symmetric_bar",
"fenergy_symmetric_bidirectional",
"fenergy_symmetric_nnznm",
)
def fenergy_logmeanexp(work_f: ArrayLike) -> float:
work_f = np.asarray(work_f, dtype=np.float64)
N_f = work_f.size
delta_fenergy = -(logsumexp(-work_f) - np.log(N_f))
return delta_fenergy
def fenergy_logmeanexp_gaussian(work_f: ArrayLike) -> float:
work_f = np.asarray(work_f, dtype=np.float64)
delta_fenergy = np.average(work_f) - 0.5 * np.var(work_f)
return delta_fenergy
def fenergy_bar(
work_f: ArrayLike,
work_r: ArrayLike,
weights_f: ArrayLike = None,
weights_r: ArrayLike = None,
uncertainty_method: str = "BAR",
) -> Tuple[float, float]:
"""
Args:
work_f: Measurements of work from forward protocol.
work_r: Measurements of work from reverse protocol.
weights_f: Optional weights for forward works
weights_r: Optional weights for reverse works
uncertainty_method: Method to calculate errors ("BAR", "MBAR", or "Logistic")
Returns:
Estimated free energy difference, and the estimated error
"""
W_f = np.asarray(work_f, dtype=np.float64)
W_r = np.asarray(work_r, dtype=np.float64)
if weights_f is None:
weights_f = np.ones_like(W_f)
weights_f = np.asarray(weights_f, dtype=np.float64)
if weights_r is None:
weights_r = np.ones_like(W_r)
weights_r = np.asarray(weights_r, dtype=np.float64)
N_f = sum(weights_f)
N_r = sum(weights_r)
M = np.log(N_f / N_r)
lower = min(np.amin(W_f), np.amin(-W_r))
upper = max(np.amax(W_f), np.amax(-W_r))
def _bar(delta_fenergy: float) -> float:
diss_f = W_f - delta_fenergy + M
diss_r = W_r + delta_fenergy - M
f = np.log(np.sum(weights_f * expit(-diss_f)))
r = np.log(np.sum(weights_r * expit(-diss_r)))
return f - r
# Maximum likelihood free energy
delta_fenergy = optimize.brentq(_bar, lower, upper) # Find root
# Error estimation
diss_f = work_f - delta_fenergy + M
diss_r = work_r + delta_fenergy - M
slogF = np.sum(weights_f * expit(-diss_f))
slogR = np.sum(weights_r * expit(-diss_r))
slogF2 = np.sum(weights_f * expit(-diss_f) ** 2)
slogR2 = np.sum(weights_r * expit(-diss_r) ** 2)
nratio = (N_f + N_r) / (N_f * N_r)
if uncertainty_method == "BAR":
# BAR error estimate
# (Underestimates error if posterior not Gaussian)
err = np.sqrt((slogF2 / slogF ** 2) + (slogR2 / slogR ** 2) - nratio)
elif uncertainty_method == "MBAR":
# MBAR error estimate
# (Massively overestimates error if posterior not Gaussian)
err = np.sqrt(1.0 / (slogF - slogF2 + slogR - slogR2) - nratio)
elif uncertainty_method == "Logistic":
# MBAR error with a correction for non-overlapping work distributions
mbar_err = np.sqrt(1.0 / (slogF - slogF2 + slogR - slogR2) - nratio)
min_hysteresis = np.min(work_f) + np.min(work_r)
logistic_err = np.sqrt((min_hysteresis ** 2 + 4 * np.pi ** 2) / 12)
err = min(logistic_err, mbar_err)
else:
raise ValueError("Unknown uncertainty estimation method")
return delta_fenergy, err
def fenergy_bayesian(work_f: ArrayLike, work_r: ArrayLike) -> Tuple[float, float]:
"""Bayesian free energy estimate
Args:
work_f: Measurements of work from forward protocol.
work_r: Measurements of work from reverse protocol.
Returns:
Posterior mean estimate of the free energy difference, and the estimated error
"""
df, prob = fenergy_posterior(work_f, work_r)
delta_fenergy = np.sum(df * prob)
err = np.sqrt(np.sum(df * df * prob) - delta_fenergy ** 2)
return delta_fenergy, err
def fenergy_posterior(
work_f: ArrayLike, work_r: ArrayLike
) -> Tuple[np.ndarray, np.ndarray]:
"""DOCME"""
w_f = np.asarray(work_f, dtype=np.float64)
w_r = np.asarray(work_r, dtype=np.float64)
fe, err = fenergy_bar(work_f, work_r, uncertainty_method="Logistic")
lower = fe - 4 * err
upper = fe + 4 * err
x = np.linspace(lower, upper, 100, dtype=np.float64)
log_prob = np.zeros_like(x)
N_f = w_f.size
N_r = w_r.size
M = np.log(N_f / N_r)
for idx in range(x.size):
fe = x[idx]
diss_f = w_f - fe + M
diss_r = w_r + fe - M
log_prob[idx] = np.sum(_logexpit(diss_f)) + np.sum(_logexpit(diss_r))
log_prob -= np.amax(log_prob)
prob = np.exp(log_prob)
prob /= np.sum(prob)
return x, prob
def fenergy_symmetric_bar(
work_ab: ArrayLike,
work_bc: ArrayLike,
uncertainty_method: str = "BAR",
) -> Tuple[float, float]:
"""BAR for symmetric periodic protocols.
Args:
work_ab: Measurements of work from first half of protocol.
work_bc: Measurements of work from mirror image second half of protocol.
uncertainty_method: Method to calculate errors (BAR, MBAR, or Logistic)
Returns:
Estimated free energy difference to the middle point of the protocol, and
an estimated error
"""
work_ab = np.asarray(work_ab, dtype=np.float64)
work_bc = np.asarray(work_bc, dtype=np.float64)
weights_r = np.exp(-work_ab - fenergy_logmeanexp(work_ab))
return fenergy_bar(work_ab, work_bc, None, weights_r, uncertainty_method)
def fenergy_symmetric_nnznm(work_ab: ArrayLike, work_bc: ArrayLike) -> float:
"""Free energy estimate for cyclic protocol.
"Non equilibrium path-ensemble averages for symmetric protocols"
Nguyen, Ngo, Zerba, Noskov, & Minh (2009), Eq 2
Args:
work_ab: Measurements of work from first half of protocol.
work_bc: Measurements of work from mirror image second half of protocol.
Returns:
Estimate of the free energy
"""
work_ab = np.asarray(work_ab, dtype=np.float64)
work_bc = np.asarray(work_bc, dtype=np.float64)
delta_fenegy = (
-np.log(2)
+ fenergy_logmeanexp(-work_ab)
+ np.log(1 + np.exp(-fenergy_logmeanexp(-work_ab - work_bc)))
)
return delta_fenegy
def fenergy_symmetric_bidirectional(work_ab: ArrayLike, work_bc: ArrayLike) -> float:
"""
The bidirectional Minh-Chodera free energy estimate specialized to a symmetric
protocol.
Delta F = (2/N) sum (e^W_ab + e^-W_bc)^-1)
Args:
work_ab: Measurements of work from first half of protocol.
work_bc: Measurements of work from mirror image second half of protocol.
Returns:
Estimate of the free energy
"""
work_ab = np.asarray(work_ab, dtype=np.float64)
work_bc = np.asarray(work_bc, dtype=np.float64)
N = work_ab.size
return -(logsumexp(-work_ab + _logexpit(-work_ab - work_bc)) - np.log(N / 2))
def _logexpit(a: np.ndarray) -> np.ndarray:
"""
log(expit(+x)) = log(1/(1+exp(-x)))
= x + log(1/(1+exp(+x)))
= x + log(expit(-x))
"""
return np.piecewise(
a,
[a < 0, a >= 0],
[lambda x: x + np.log(expit(-x)), lambda x: np.log(expit(x))],
)
|
alLe746/CTI_besoin | interface/pythonui/interface/DocumentMeta.py | from django import forms
from django.db import models
import os
class DocumentMeta(forms.Form):
Description = forms.CharField(label="Description", max_length=64)
Date_demande = forms.DateField(label="Date de la Demande")
class FileUpload(forms.Form):
file = forms.FileField(label="document Word ( .docx)")
class FileUploadjson(forms.Form):
file = forms.FileField(label="Fichier Json ( .json, .txt,...)")
class Modeldocx(models.Model):
docx=models.FileField()
class Jsonchoice(forms.Form):
def __init__(self,*arguments,**kwargs):
self.max_value = kwargs.pop('max_value')
super().__init__(*arguments,**kwargs)
json = forms.CharField(widget=forms.HiddenInput())
res = forms.CharField(widget=forms.HiddenInput())
choice = forms.IntegerField(label="valeur de I",max_value=self.max_value,min_value=0) |
alLe746/CTI_besoin | interface/pythonui/interface/views.py | from datetime import datetime
import docx
import json
import pandas as pd
from django.shortcuts import render, redirect
from django.http import HttpResponse, JsonResponse
# Create your views here.
from .DocumentMeta import DocumentMeta,FileUpload,Modeldocx,Jsonchoice,FileUploadjson
from docx import Document
import os
def index(request):
return render(request, "interface/index.html")
def wordtojson(request):
form = FileUpload(request.POST)
if request.method == 'POST':
document = request.FILES['file']
with open('tempword.docx', 'wb+') as destination:
for chunk in document.chunks():
destination.write(chunk)
docu_valid = True
try:
document = Document('tempword.docx')
except docx.opc.exceptions.PackageNotFoundError as e:
print("document introuvable, vérifier l'emplacement")
print("message d'erreur : " + str(e))
docu_valid = False
tables = []
for table in document.tables:
df = [['' for i in range(len(table.columns))] for j in range(len(table.rows))]
for i, row in enumerate(table.rows):
for j, cell in enumerate(row.cells):
if cell.text:
df[i][j] = cell.text
tables.append(pd.DataFrame(df))
res = {}
# Partie demandeur
for i in range(1, 5):
if i == 3:
res[tables[1][0][i]] = datetime.strptime(tables[1][1][i], "%d/%m/%Y").strftime("%Y-%m-%d")
res['@timestamp'] = res[tables[1][0][i]]
else:
res[tables[1][0][i]] = tables[1][1][i]
for i in range(1, 7):
if i == 5:
if tables[2][1][i] != "":
res[tables[2][0][i]] = datetime.strptime(tables[2][1][i], "%d/%m/%Y").strftime("%Y-%m-%d")
else:
res[tables[2][0][i]] = tables[2][1][i]
for i in range(1, 3):
res[tables[3][0][i]] = tables[3][1][i]
# Partie Reponse
for i in range(1, 6):
if i==3 or i==4:
if tables[4][1][i] != "":
res[tables[4][0][i]] = datetime.strptime(tables[4][1][i], "%d/%m/%Y").strftime("%Y-%m-%d")
else:
res[tables[4][0][i]] = tables[4][1][i]
response = HttpResponse(json.dumps(res),
content_type='application/json')
response['Content-Disposition'] = 'attachment; filename=download.json'
return response
else:
return render(request, "interface/converttojson.html", {'form':form})
def jsontoword(request):
form = FileUploadjson(request.POST)
if request.method == 'POST':
document = request.FILES['file']
json_return = ""
with open('tempjson.json', 'wb+') as destination:
for chunk in document.chunks():
destination.write(chunk)
docu_valid = True
file=open("tempjson.json")
res=json.load(file)
file.close()
num_table = 0
document = Document(os.path.join(os.path.dirname(__file__), 'Template demande de besoin.docx'))
for table in document.tables:
if num_table != 0:
num_row = 0
for i, row in enumerate(table.rows):
if num_row != 0:
temp = ""
for j, cell in enumerate(row.cells):
if (cell.text == "") and (temp != ""):
if temp=="Date de la demande" or temp=="Date de livraison" or temp=="Date de réception de la demande" or temp=="Date de traitement":
cell.text = datetime.strptime((res[temp]),"%Y-%m-%d").strftime("%d/%m/%Y")
else:
cell.text = res[temp]
# print("json: " + json_retour[temp])
if cell.text:
temp = cell.text
# print("cell.text: "+cell.text)
num_row = num_row + 1
num_table = num_table + 1
response = HttpResponse(
content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document')
response['Content-Disposition'] = 'attachment; filename=download.docx'
document.save(response)
return response
else:
return render(request, "interface/converttoword.html", {'form': form})
def ressources(request):
if request.method=='POST':
document = Document(os.path.join(os.path.dirname(__file__), 'Template demande de besoin.docx'))
response = HttpResponse(
content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document')
response['Content-Disposition'] = 'attachment; filename=download.docx'
document.save(response)
return response
else:
return render(request, "interface/ressources.html") |
framr/playground_deepbayes | s4_gp/bayes_opt.py | import numpy as np
from scipy.stats import norm
from scipy.optimize import minimize
from matplotlib import pyplot
import GPy
def lower_confidence_bound(mean_values, std_values, coefficient=2):
return mean_values.ravel() - coefficient * std_values.ravel()
def log_expected_improvement(mean_values, variance_values, opt_value):
estimated_values = mean_values.ravel()
eps = 0.05/len(estimated_values)
delta = (opt_value - estimated_values - eps).ravel()
estimated_errors = (variance_values ** 0.5).ravel()
non_zero_error_inds = np.where(estimated_errors > 1e-6)[0]
Z = np.zeros(len(delta))
Z[non_zero_error_inds] = delta[non_zero_error_inds]/estimated_errors[non_zero_error_inds]
log_EI = np.log(estimated_errors) + norm.logpdf(Z) + np.log(1 + Z * np.exp(norm.logcdf(Z) - norm.logpdf(Z)))
return log_EI
def expected_improvement(mean_values, std_values, opt_values):
improvement = (opt_values.ravel()[0] - mean_values).ravel()
std_values = std_values.ravel()
EI = improvement * norm.cdf(improvement / std_values) + std_values * norm.pdf(improvement / std_values)
return EI
def get_new_point(model, lb, ub, data=None, multistart=10, criterion='ei', k=1, random_state=None):
if random_state is None:
random_state = np.random.RandomState()
lb = np.array(lb).reshape(1, -1)
ub = np.array(ub).reshape(1, -1)
x_random = random_state.uniform(size=(multistart, np.array(lb).ravel().shape[0]))
x_random *= ub - lb
x_random += lb
def objective(x):
if x.ndim == 1:
x = x.reshape(1, -1)
mean_values, variance = model.predict(x)
std_values = np.sqrt(variance)
if criterion == 'ei':
return -log_expected_improvement(mean_values, std_values, data[1].min())
elif criterion == 'lcb':
return lower_confidence_bound(mean_values, std_values, k)
else:
raise NotImplementedError('Criterion is not implemented!')
criterion_value = objective(x_random)
best_result = None
best_value = np.inf
for x_init in x_random:
optimization_result = minimize(objective, x_init, method='L-BFGS-B', bounds=np.vstack((lb, ub)).T)
if optimization_result.fun < best_value:
best_result = optimization_result
best_value = best_result.fun[0]
return best_result.x, best_result.fun
def optimization_step(x_train, y_train, kernel, objective, lb=None, ub=None, criterion='ei', k=1, plot=False):
model = GPy.models.GPRegression(x_train, y_train, kernel)
model.optimize_restarts(num_restarts=10, verbose=False)
x_new, criterion_value = get_new_point(model, data=(x_train, y_train), lb=lb, ub=ub, criterion=criterion, k=k)
if plot:
plot1d(x_train, y_train, model, objective, x_new, criterion_value)
pyplot.show()
x_new = x_new.reshape(1, -1)
x_train = np.vstack([x_train, x_new])
y_train = np.vstack([y_train, np.asarray(objective(x_new)).reshape(1, -1)])
return x_train, y_train, model
def plot1d(x_train, y_train, model, objective, x_new, criterion_value):
x_grid = np.linspace(0, 1, 100).reshape(-1, 1)
y_grid = objective(x_grid)
prediction, variance = model.predict(x_grid)
std = np.sqrt(variance)
prediction = prediction.ravel()
std = std.ravel()
pyplot.figure(figsize=(8, 6))
pyplot.plot(x_train, y_train, 'or', markersize=8, label='Training set')
pyplot.plot(x_grid, y_grid, '--b', linewidth=2, label='True function')
pyplot.plot(x_grid, prediction, '-k', linewidth=2, label='Approximation')
pyplot.fill_between(x_grid.ravel(), prediction - 2 * std, prediction + 2 * std, alpha=0.3)
pyplot.plot(x_new, objective(x_new), 'og', markersize=10, label='New point')
pyplot.ylim([-15, 20])
pyplot.legend(loc='best')
def plot2d(objective, x_train, y_train, model):
grid_size = 50
x = np.meshgrid(np.linspace(-1, 1, grid_size), np.linspace(-1, 1, grid_size))
x = np.hstack((x[0].reshape(-1, 1), x[1].reshape(-1, 1)))
y = objective(x)
prediction, variance = model.predict(x)
std = np.sqrt(variance).ravel()
x_train = (x_train + 1) * grid_size / 2
log_EI = np.exp(log_expected_improvement(prediction, std, y_train.min()))
values = [prediction, y, std, log_EI]
names = ['Predicted values', 'Exact values', 'Predicted std', 'log EI']
figure, axes = pyplot.subplots(nrows=2, ncols=2, figsize=(6, 6))
for i, ax in enumerate(axes.ravel()):
if i < 3:
ax.imshow(values[i].reshape(grid_size, grid_size), vmin=0, vmax=1, alpha=0.8)
else:
ax.imshow(values[i].reshape(grid_size, grid_size), alpha=0.8)
ax.scatter(x_train[:-1, 0], x_train[:-1, 1], c='r', s=20)
ax.scatter(x_train[-1, 0], x_train[-1, 1], marker='d', edgecolor='k', c='g', s=180)
ax.set_xlim([-0.5, grid_size + 0.5])
ax.set_ylim([-0.5, grid_size + 0.5])
ax.axis('off')
ax.set_title(names[i])
figure.tight_layout()
def demo_2d(n_init, budget, kernel, save_path='./library/2d_demo.mp4'):
global x_train, y_train, model
def f2d(x):
t = np.sum((x + 0.6)**2, axis=1) - 0.3
y = np.sin(t)**2 / np.tanh(t**2 + 0.4)
return y.reshape(-1, 1)
lb = [-1, -1]
ub = [1, 1]
np.random.seed(42)
x_train = np.random.rand(n_init, 2) * 2 - 1
y_train = f2d(x_train)
model = GPy.models.GPRegression(x_train, y_train, kernel)
model.optimize()
# Set up formatting for the movie files
import matplotlib.animation as animation
from mpl_toolkits.axes_grid1 import make_axes_locatable
Writer = animation.writers['ffmpeg_file']
writer = Writer(fps=1, metadata=dict(artist='<NAME>'))
grid_size = 50
x = np.meshgrid(np.linspace(-1, 1, grid_size), np.linspace(-1, 1, grid_size))
x = np.hstack((x[0].reshape(-1, 1), x[1].reshape(-1, 1)))
y = f2d(x)
def get_model_values(model, x, x_train):
prediction, variance = model.predict(x)
std = np.sqrt(variance).ravel()
log_EI = np.exp(log_expected_improvement(prediction, std, y_train.min()))
values = [prediction, y, log_EI]
return values
values = get_model_values(model, x, x_train)
history = [y_train.min()]
names = ['Predicted values', 'Exact values', 'log EI']
# Set up initial canvas
figure, axes = pyplot.subplots(nrows=2, ncols=2, figsize=(6, 6))
heatmaps = []
scatters = []
new_point_scatters = []
for i, ax in enumerate(axes.ravel()[:-1]):
heatmaps.append(ax.matshow(values[i].reshape(grid_size, grid_size), alpha=0.8))
x_scatter = (x_train + 1) * grid_size / 2
scatters.append(ax.scatter(x_scatter[:-1, 0], x_scatter[:-1, 1], c='r', s=20))
new_point_scatters.append(ax.scatter(x_scatter[-1, 0], x_scatter[-1, 1], marker='d', edgecolor='k',
c='g', s=180))
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
figure.colorbar(heatmaps[-1], cax=cax)
ax.set_xlim([-0.5, grid_size + 0.5])
ax.set_ylim([-0.5, grid_size + 0.5])
ax.axis('off')
ax.set_title(names[i])
convergence_plot = axes.ravel()[-1].plot([y_train.shape[0]], [y_train.min()], '-')
axes.ravel()[-1].set_xlabel('iteration')
axes.ravel()[-1].set_ylabel(r'$y_{min}$')
axes.ravel()[-1].set_xlim([n_init - 1, n_init + budget])
axes.ravel()[-1].set_ylim([0, 0.0073])
figure.tight_layout()
# Define function that updates figure
def update_fig(iteration):
global x_train, y_train, model
# global y_train
# global model
if iteration == 0:
return heatmaps + scatters + new_point_scatters + convergence_plot
model = GPy.models.GPRegression(x_train, y_train, model.kern)
model.optimize()
x_new, criterion = get_new_point(model, lb, ub, data=(x_train, y_train), multistart=10, random_state=None)
x_new = x_new.reshape(1, -1)
x_train = np.vstack([x_train, x_new])
y_train = np.vstack([y_train, f2d(x_new)])
history.append(y_train.min())
values = get_model_values(model, x, x_train)
for i, val in enumerate(values):
heatmaps[i].set_array(val.reshape(grid_size, -1))
x_scatter = (x_train + 1) * grid_size / 2
scatters[i].set_offsets(x_scatter[:-1])
new_point_scatters[i].set_offsets(x_scatter[-1:])
# adjust colorbar for std and log EI plot
vmin = val.min()
vmax = val.max()
heatmaps[i].set_clim(vmin, vmax)
convergence_plot[0].set_data(range(n_init, y_train.shape[0] + 1), history)
return heatmaps + scatters + new_point_scatters + convergence_plot
anim = animation.FuncAnimation(figure, update_fig,
blit=False,
repeat=False,
frames=budget)
anim.save(save_path, writer=writer)
|
iamneha/python | kubernetes/client/models/v1_job_status.py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1JobStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'active': 'int',
'completion_time': 'datetime',
'conditions': 'list[V1JobCondition]',
'failed': 'int',
'start_time': 'datetime',
'succeeded': 'int'
}
attribute_map = {
'active': 'active',
'completion_time': 'completionTime',
'conditions': 'conditions',
'failed': 'failed',
'start_time': 'startTime',
'succeeded': 'succeeded'
}
def __init__(self, active=None, completion_time=None, conditions=None, failed=None, start_time=None, succeeded=None):
"""
V1JobStatus - a model defined in Swagger
"""
self._active = None
self._completion_time = None
self._conditions = None
self._failed = None
self._start_time = None
self._succeeded = None
self.discriminator = None
if active is not None:
self.active = active
if completion_time is not None:
self.completion_time = completion_time
if conditions is not None:
self.conditions = conditions
if failed is not None:
self.failed = failed
if start_time is not None:
self.start_time = start_time
if succeeded is not None:
self.succeeded = succeeded
@property
def active(self):
"""
Gets the active of this V1JobStatus.
The number of actively running pods.
:return: The active of this V1JobStatus.
:rtype: int
"""
return self._active
@active.setter
def active(self, active):
"""
Sets the active of this V1JobStatus.
The number of actively running pods.
:param active: The active of this V1JobStatus.
:type: int
"""
self._active = active
@property
def completion_time(self):
"""
Gets the completion_time of this V1JobStatus.
Represents time when the job was completed. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:return: The completion_time of this V1JobStatus.
:rtype: datetime
"""
return self._completion_time
@completion_time.setter
def completion_time(self, completion_time):
"""
Sets the completion_time of this V1JobStatus.
Represents time when the job was completed. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:param completion_time: The completion_time of this V1JobStatus.
:type: datetime
"""
self._completion_time = completion_time
@property
def conditions(self):
"""
Gets the conditions of this V1JobStatus.
The latest available observations of an object's current state. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
:return: The conditions of this V1JobStatus.
:rtype: list[V1JobCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""
Sets the conditions of this V1JobStatus.
The latest available observations of an object's current state. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
:param conditions: The conditions of this V1JobStatus.
:type: list[V1JobCondition]
"""
self._conditions = conditions
@property
def failed(self):
"""
Gets the failed of this V1JobStatus.
The number of pods which reached phase Failed.
:return: The failed of this V1JobStatus.
:rtype: int
"""
return self._failed
@failed.setter
def failed(self, failed):
"""
Sets the failed of this V1JobStatus.
The number of pods which reached phase Failed.
:param failed: The failed of this V1JobStatus.
:type: int
"""
self._failed = failed
@property
def start_time(self):
"""
Gets the start_time of this V1JobStatus.
Represents time when the job was acknowledged by the job controller. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:return: The start_time of this V1JobStatus.
:rtype: datetime
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""
Sets the start_time of this V1JobStatus.
Represents time when the job was acknowledged by the job controller. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:param start_time: The start_time of this V1JobStatus.
:type: datetime
"""
self._start_time = start_time
@property
def succeeded(self):
"""
Gets the succeeded of this V1JobStatus.
The number of pods which reached phase Succeeded.
:return: The succeeded of this V1JobStatus.
:rtype: int
"""
return self._succeeded
@succeeded.setter
def succeeded(self, succeeded):
"""
Sets the succeeded of this V1JobStatus.
The number of pods which reached phase Succeeded.
:param succeeded: The succeeded of this V1JobStatus.
:type: int
"""
self._succeeded = succeeded
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1JobStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
iamneha/python | kubernetes/client/apis/custom_objects_api.py | <gh_stars>1-10
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class CustomObjectsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_cluster_custom_object(self, group, version, plural, body, **kwargs):
"""
Creates a cluster scoped Custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_custom_object(group, version, plural, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param object body: The JSON schema of the Resource to create. (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_custom_object_with_http_info(group, version, plural, body, **kwargs)
else:
(data) = self.create_cluster_custom_object_with_http_info(group, version, plural, body, **kwargs)
return data
def create_cluster_custom_object_with_http_info(self, group, version, plural, body, **kwargs):
"""
Creates a cluster scoped Custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_custom_object_with_http_info(group, version, plural, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param object body: The JSON schema of the Resource to create. (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `create_cluster_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `create_cluster_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `create_cluster_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_custom_object(self, group, version, namespace, plural, body, **kwargs):
"""
Creates a namespace scoped Custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_custom_object(group, version, namespace, plural, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param object body: The JSON schema of the Resource to create. (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_custom_object_with_http_info(group, version, namespace, plural, body, **kwargs)
else:
(data) = self.create_namespaced_custom_object_with_http_info(group, version, namespace, plural, body, **kwargs)
return data
def create_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, body, **kwargs):
"""
Creates a namespace scoped Custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_custom_object_with_http_info(group, version, namespace, plural, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param object body: The JSON schema of the Resource to create. (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `create_namespaced_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `create_namespaced_custom_object`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `create_namespaced_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_custom_object(self, group, version, plural, name, body, **kwargs):
"""
Deletes the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_custom_object(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param V1DeleteOptions body: (required)
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.delete_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs)
return data
def delete_cluster_custom_object_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
Deletes the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_custom_object_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param V1DeleteOptions body: (required)
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `delete_cluster_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `delete_cluster_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `delete_cluster_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_custom_object(self, group, version, namespace, plural, name, body, **kwargs):
"""
Deletes the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_custom_object(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param V1DeleteOptions body: (required)
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.delete_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def delete_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
Deletes the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param V1DeleteOptions body: (required)
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `delete_namespaced_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `delete_namespaced_custom_object`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `delete_namespaced_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_custom_object(self, group, version, plural, name, **kwargs):
"""
Returns a cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_custom_object(group, version, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_custom_object_with_http_info(group, version, plural, name, **kwargs)
else:
(data) = self.get_cluster_custom_object_with_http_info(group, version, plural, name, **kwargs)
return data
def get_cluster_custom_object_with_http_info(self, group, version, plural, name, **kwargs):
"""
Returns a cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_custom_object_with_http_info(group, version, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `get_cluster_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_cluster_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `get_cluster_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_cluster_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_custom_object_scale(self, group, version, plural, name, **kwargs):
"""
read scale of the specified custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_custom_object_scale(group, version, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_custom_object_scale_with_http_info(group, version, plural, name, **kwargs)
else:
(data) = self.get_cluster_custom_object_scale_with_http_info(group, version, plural, name, **kwargs)
return data
def get_cluster_custom_object_scale_with_http_info(self, group, version, plural, name, **kwargs):
"""
read scale of the specified custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_custom_object_scale_with_http_info(group, version, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_custom_object_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `get_cluster_custom_object_scale`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_cluster_custom_object_scale`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `get_cluster_custom_object_scale`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_cluster_custom_object_scale`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}/scale', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_custom_object_status(self, group, version, plural, name, **kwargs):
"""
read status of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_custom_object_status(group, version, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_custom_object_status_with_http_info(group, version, plural, name, **kwargs)
else:
(data) = self.get_cluster_custom_object_status_with_http_info(group, version, plural, name, **kwargs)
return data
def get_cluster_custom_object_status_with_http_info(self, group, version, plural, name, **kwargs):
"""
read status of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_custom_object_status_with_http_info(group, version, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_custom_object_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `get_cluster_custom_object_status`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_cluster_custom_object_status`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `get_cluster_custom_object_status`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_cluster_custom_object_status`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_namespaced_custom_object(self, group, version, namespace, plural, name, **kwargs):
"""
Returns a namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, **kwargs)
else:
(data) = self.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, **kwargs)
return data
def get_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, **kwargs):
"""
Returns a namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_namespaced_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `get_namespaced_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_namespaced_custom_object`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `get_namespaced_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `get_namespaced_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_namespaced_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_namespaced_custom_object_scale(self, group, version, namespace, plural, name, **kwargs):
"""
read scale of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object_scale(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, **kwargs)
else:
(data) = self.get_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, **kwargs)
return data
def get_namespaced_custom_object_scale_with_http_info(self, group, version, namespace, plural, name, **kwargs):
"""
read scale of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_namespaced_custom_object_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `get_namespaced_custom_object_scale`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_namespaced_custom_object_scale`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `get_namespaced_custom_object_scale`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `get_namespaced_custom_object_scale`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_namespaced_custom_object_scale`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/scale', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_namespaced_custom_object_status(self, group, version, namespace, plural, name, **kwargs):
"""
read status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object_status(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, **kwargs)
else:
(data) = self.get_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, **kwargs)
return data
def get_namespaced_custom_object_status_with_http_info(self, group, version, namespace, plural, name, **kwargs):
"""
read status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_namespaced_custom_object_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `get_namespaced_custom_object_status`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `get_namespaced_custom_object_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `get_namespaced_custom_object_status`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `get_namespaced_custom_object_status`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_namespaced_custom_object_status`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_custom_object(self, group, version, plural, **kwargs):
"""
list or watch cluster scoped custom objects
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_custom_object(group, version, plural, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_cluster_custom_object_with_http_info(group, version, plural, **kwargs)
else:
(data) = self.list_cluster_custom_object_with_http_info(group, version, plural, **kwargs)
return data
def list_cluster_custom_object_with_http_info(self, group, version, plural, **kwargs):
"""
list or watch cluster scoped custom objects
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_custom_object_with_http_info(group, version, plural, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'pretty', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `list_cluster_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `list_cluster_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `list_cluster_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/json;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_custom_object(self, group, version, namespace, plural, **kwargs):
"""
list or watch namespace scoped custom objects
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_custom_object(group, version, namespace, plural, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_custom_object_with_http_info(group, version, namespace, plural, **kwargs)
else:
(data) = self.list_namespaced_custom_object_with_http_info(group, version, namespace, plural, **kwargs)
return data
def list_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, **kwargs):
"""
list or watch namespace scoped custom objects
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_custom_object_with_http_info(group, version, namespace, plural, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: The custom resource's group name (required)
:param str version: The custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'pretty', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `list_namespaced_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `list_namespaced_custom_object`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `list_namespaced_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/json;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_custom_object(self, group, version, plural, name, body, **kwargs):
"""
patch the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to patch. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.patch_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs)
return data
def patch_cluster_custom_object_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
patch the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to patch. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `patch_cluster_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `patch_cluster_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `patch_cluster_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_custom_object_scale(self, group, version, plural, name, body, **kwargs):
"""
partially update scale of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_scale(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs)
return data
def patch_cluster_custom_object_scale_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
partially update scale of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_custom_object_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `patch_cluster_custom_object_scale`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `patch_cluster_custom_object_scale`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `patch_cluster_custom_object_scale`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_custom_object_scale`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_custom_object_scale`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}/scale', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_custom_object_status(self, group, version, plural, name, body, **kwargs):
"""
partially update status of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_status(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_custom_object_status_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.patch_cluster_custom_object_status_with_http_info(group, version, plural, name, body, **kwargs)
return data
def patch_cluster_custom_object_status_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
partially update status of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_custom_object_status_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_custom_object_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `patch_cluster_custom_object_status`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `patch_cluster_custom_object_status`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `patch_cluster_custom_object_status`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_custom_object_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_custom_object_status`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}/status', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_custom_object(self, group, version, namespace, plural, name, body, **kwargs):
"""
patch the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_custom_object(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to patch. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.patch_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def patch_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
patch the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to patch. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `patch_namespaced_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `patch_namespaced_custom_object`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `patch_namespaced_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_custom_object_scale(self, group, version, namespace, plural, name, body, **kwargs):
"""
partially update scale of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_custom_object_scale(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.patch_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def patch_namespaced_custom_object_scale_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
partially update scale of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_custom_object_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `patch_namespaced_custom_object_scale`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `patch_namespaced_custom_object_scale`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_custom_object_scale`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `patch_namespaced_custom_object_scale`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_custom_object_scale`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_custom_object_scale`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/scale', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_custom_object_status(self, group, version, namespace, plural, name, body, **kwargs):
"""
partially update status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.patch_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def patch_namespaced_custom_object_status_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
partially update status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_custom_object_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `patch_namespaced_custom_object_status`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `patch_namespaced_custom_object_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_custom_object_status`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `patch_namespaced_custom_object_status`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_custom_object_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_custom_object_status`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/status', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_custom_object(self, group, version, plural, name, body, **kwargs):
"""
replace the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_custom_object(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to replace. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.replace_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs)
return data
def replace_cluster_custom_object_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
replace the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_custom_object_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to replace. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `replace_cluster_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `replace_cluster_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `replace_cluster_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_custom_object_scale(self, group, version, plural, name, body, **kwargs):
"""
replace scale of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_custom_object_scale(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.replace_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs)
return data
def replace_cluster_custom_object_scale_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
replace scale of the specified cluster scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_custom_object_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `replace_cluster_custom_object_scale`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `replace_cluster_custom_object_scale`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `replace_cluster_custom_object_scale`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_custom_object_scale`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_custom_object_scale`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}/scale', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_custom_object_status(self, group, version, plural, name, body, **kwargs):
"""
replace status of the cluster scoped specified custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_custom_object_status(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_cluster_custom_object_status_with_http_info(group, version, plural, name, body, **kwargs)
else:
(data) = self.replace_cluster_custom_object_status_with_http_info(group, version, plural, name, body, **kwargs)
return data
def replace_cluster_custom_object_status_with_http_info(self, group, version, plural, name, body, **kwargs):
"""
replace status of the cluster scoped specified custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_custom_object_status_with_http_info(group, version, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_custom_object_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `replace_cluster_custom_object_status`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `replace_cluster_custom_object_status`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `replace_cluster_custom_object_status`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_custom_object_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_custom_object_status`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/{plural}/{name}/status', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_custom_object(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to replace. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.replace_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def replace_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: The JSON schema of the Resource to replace. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_custom_object" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `replace_namespaced_custom_object`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `replace_namespaced_custom_object`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_custom_object`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `replace_namespaced_custom_object`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_custom_object`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_custom_object`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_custom_object_scale(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace scale of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_scale(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.replace_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def replace_namespaced_custom_object_scale_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace scale of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_custom_object_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `replace_namespaced_custom_object_scale`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `replace_namespaced_custom_object_scale`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_custom_object_scale`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `replace_namespaced_custom_object_scale`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_custom_object_scale`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_custom_object_scale`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/scale', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_custom_object_status(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
else:
(data) = self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs)
return data
def replace_namespaced_custom_object_status_with_http_info(self, group, version, namespace, plural, name, body, **kwargs):
"""
replace status of the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group', 'version', 'namespace', 'plural', 'name', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_custom_object_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group' is set
if ('group' not in params) or (params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `replace_namespaced_custom_object_status`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `replace_namespaced_custom_object_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_custom_object_status`")
# verify the required parameter 'plural' is set
if ('plural' not in params) or (params['plural'] is None):
raise ValueError("Missing the required parameter `plural` when calling `replace_namespaced_custom_object_status`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_custom_object_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_custom_object_status`")
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group']
if 'version' in params:
path_params['version'] = params['version']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'plural' in params:
path_params['plural'] = params['plural']
if 'name' in params:
path_params['name'] = params['name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/status', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
viktordremio/vds_refresh_by_path | src/vds_reflection_refresh_by_path.py | import requests
import re
#INPUT:
dataset_path = 'test."concat test limit.20"' #copy vds path from UI, click Copy Path
dremio_url = 'http://localhost:9047' # url dremio
user="user" # user name in dremio
pwd="<PASSWORD>" # password ame in dremio
verify_ssl=False # SSL enanled/disabled for REST calls
#######################
token = <PASSWORD>"
content_type = "application/json"
def recursive(datasets):
headers = {'Content-Type': 'application/json', 'Authorization': token}
for dataset in datasets:
response = requests.get(dremio_url + '/api/v3/catalog/'+ dataset['id'] +'/graph', headers=headers,verify=False)
graph(response.json()['parents'])
def graph(parents):
datasets=[]
for parent in parents:
if (parent['datasetType']=='VIRTUAL'):
datasets.append({"id": parent['id']})
else:
refresh_pds(parent["id"])
break
recursive(datasets)
def refresh_pds(id):
headers = {'Content-Type': 'application/json', 'Authorization': token}
data=""
try:
response = requests.post(dremio_url + '/api/v3/catalog/'+ id +'/refresh', headers=headers, data=data, verify=verify_ssl)
print("done")
except requests.exceptions.SSLError as e:
print("SSLError")
def get_vds_id_by_path(path):
headers = {'Content-Type': 'application/json', 'Authorization': token}
dataset_obj={}
try:
response = response = requests.get(dremio_url + '/api/v3/catalog/by-path'+ path, headers=headers,verify=verify_ssl)
dataset_obj={"res":True, "id":response.json()["id"], "type":response.json()["type"] }
return dataset_obj
except requests.exceptions.SSLError as e:
print("SSLError")
dataset_obj={"res":False, "id":None, "type":None}
return dataset_obj
def parse_dataset_path(dataset_path):
path=""
dataset_path = re.findall('"[^"]*"|[^.]+', dataset_path)
dataset_path = [dataset_path_part.replace('"', '') for dataset_path_part in dataset_path]
dataset_path = [dataset_path_part.replace(' ', '%20') for dataset_path_part in dataset_path]
for part in dataset_path:
path=path+"/"+part
return (path)
def login():
headers = {'Content-Type': content_type}
data = '{"userName": "'+ user + '","password": "'+pwd+'" }'
token_obj={}
try:
response = requests.post(dremio_url + '/apiv2/login', headers=headers, data=data, verify=verify_ssl)
token = response.json()['token'] # _dremio is prepended to the token
token_obj={"res":True, "token":token}
return token_obj
except requests.exceptions.SSLError as e:
print("SSLError")
token_obj={"res":False, "token":None}
return token_obj
if __name__ == "__main__":
token_obj=login()
datasets=[]
if (token_obj["res"]):
token=token+token_obj["token"]
path=parse_dataset_path(dataset_path)
datasets.append({"id":get_vds_id_by_path(path)["id"], "type": get_vds_id_by_path(path)["id"]})
recursive(datasets)
|
viktordremio/vds_refresh_by_path | src/refresh_only_one_vds.py | <gh_stars>0
import requests
import re
import json
# INPUT:
# copy vds path from UI, click Copy Path
dataset_path = 'test.delays'
dremio_url = 'http://localhost:9047' # url dremio
user = "viktor" # user name in dremio
pwd = "<PASSWORD>" # password ame in dremio
verify_ssl = False # SSL enanled/disabled for REST calls
#######################
token = <PASSWORD>"
content_type = "application/json"
def reenable_reflection(refl_json):
headers = {'Content-Type': 'application/json', 'Authorization': token}
data = refl_json
key_to_remove = ['updatedAt', 'createdAt',
'currentSizeBytes', 'totalSizeBytes']
for key in key_to_remove:
data.pop(key)
refl_id = data['id']
data['enabled'] = False
data = json.dumps(data)
# disable reflesction
res = requests.put(dremio_url + '/api/v3/reflection/' +
refl_id, headers=headers, data=data, verify=verify_ssl)
# get new tag version for update
res = requests.get(
dremio_url + '/api/v3/reflection/'+refl_id, headers=headers, verify=verify_ssl)
data = res.json()
for key in key_to_remove:
data.pop(key)
data['enabled'] = True
data = json.dumps(data)
# enable reflection, will trigger an update
requests.put(dremio_url + '/api/v3/reflection/' +
refl_id, headers=headers, data=data, verify=verify_ssl)
def get_vds_id_by_path(path):
headers = {'Content-Type': 'application/json', 'Authorization': token}
dataset_obj = {}
try:
response = requests.get(
dremio_url + '/api/v3/catalog/by-path' + path, headers=headers, verify=verify_ssl)
dataset_obj = {"res": True, "id": response.json(
)["id"], "type": response.json()["type"]}
return dataset_obj
except requests.exceptions.SSLError as e:
print("SSLError")
dataset_obj = {"res": False, "id": None, "type": e}
return dataset_obj
def get_all_reflections():
headers = {'Content-Type': 'application/json', 'Authorization': token}
return requests.get(
dremio_url + '/api/v3/reflection', headers=headers, verify=verify_ssl)
def parse_dataset_path(dataset_path):
path = ""
dataset_path = re.findall('"[^"]*"|[^.]+', dataset_path)
dataset_path = [dataset_path_part.replace(
'"', '') for dataset_path_part in dataset_path]
dataset_path = [dataset_path_part.replace(
' ', '%20') for dataset_path_part in dataset_path]
for part in dataset_path:
path = path+"/"+part
return (path)
def login():
headers = {'Content-Type': content_type}
data = '{"userName": "' + user + '","password": "'+<PASSWORD>+'" }'
token_obj = {}
try:
response = requests.post(
dremio_url + '/apiv2/login', headers=headers, data=data, verify=verify_ssl)
token = response.json()['token'] # _dremio is prepended to the token
token_obj = {"res": True, "token": token}
return token_obj
except requests.exceptions.SSLError as e:
print("SSLError")
token_obj = {"res": False, "token": e}
return token_obj
if __name__ == "__main__":
token_obj = login()
if (token_obj["res"]):
token = token+token_obj["token"]
dataset_id = get_vds_id_by_path(parse_dataset_path(dataset_path))['id']
response = get_all_reflections()
reflections = response.json()
for reflection in reflections['data']:
if dataset_id == reflection['datasetId']:
if reflection['enabled'] == True:
reenable_reflection(reflection)
|
cyang-kth/xyconvert | xyconvert/xyconvert.py | """
Created on Nov 16, 2020
@author: <NAME>
Convert/project xy coordinates in numpy between WGS-84, GCJ-02 and BD-09 system
"""
import numpy as np
import pyproj
a = 6378245.0 # lngg axis
ee = 0.006693421883570923
# f = 0.00335233
# b = a * (1 - f)
# ee = (a*a - b*b)/(a*a)
def assert_xy(xy):
np.testing.assert_equal(xy.ndim,2,"xy should be two dimension")
np.testing.assert_equal(xy.shape[1],2,"xy should contain lng,lat only")
def proj(xy, from_srid, to_srid):
"""Project xy coordinates
Args:
xy: a 2D numpy array storing coordinates (lng,lat) in shape of (N,2)
from_srid: an integer representing the input srid
to_srid: an integer representing the output srid
Returns:
a 2D numpy array with projected coordinates (lng,lat) in shape of (N,2)
"""
assert_xy(xy)
f_proj = pyproj.Proj(init='epsg:{}'.format(from_srid))
t_proj = pyproj.Proj(init='epsg:{}'.format(to_srid))
fx, fy = pyproj.transform(f_proj, t_proj, xy[:,0], xy[:,1])
return np.dstack([fx, fy])[0]
def __transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + 0.1 * lng * lat + 0.2 * np.sqrt(np.abs(lng))
ret = ret + (20.0 * np.sin(6.0 * lng * np.pi) + 20.0 * np.sin(2.0 * lng * np.pi)) * 2.0 / 3.0
ret = ret + (20.0 * np.sin(lat * np.pi) + 40.0 * np.sin(lat / 3.0 * np.pi)) * 2.0 / 3.0
ret = ret + (160.0 * np.sin(lat / 12.0 * np.pi) + 320.0 * np.sin(lat * np.pi / 30.0)) * 2.0 / 3.0
return ret
def __transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + 0.1 * lng * lat + 0.1 * np.sqrt(abs(lng))
ret = ret + (20.0 * np.sin(6.0 * lng * np.pi) + 20.0 * np.sin(2.0 * lng * np.pi)) * 2.0 / 3.0
ret = ret + (20.0 * np.sin(lng * np.pi) + 40.0 * np.sin(lng / 3.0 * np.pi)) * 2.0 / 3.0
ret = ret + (150.0 * np.sin(lng / 12.0 * np.pi) + 300.0 * np.sin(lng * np.pi / 30.0)) * 2.0 / 3.0
return ret
def wgs2gcj(xy):
"""Convert xy coordinates in wgs84 to GCJ02
Args:
xy: a 2D numpy array storing wgs84 coordinates (lng,lat) in shape of (N,2).
Returns:
a 2D numpy array storing GCJ02 coordinates (lng,lat) in shape of (N,2).
"""
assert_xy(xy)
wgslng = xy[:,0]
wgslat = xy[:,1]
dlat = __transformlat(wgslng - 105.0, wgslat - 35.0)
dlng = __transformlng(wgslng - 105.0, wgslat - 35.0)
radlat = wgslat/180.0 * np.pi
magic = np.sin(radlat)
magic = 1 - ee*magic*magic
sqrtMagic = np.sqrt(magic)
dlat = (dlat * 180.0)/((a * (1-ee)) / (magic*sqrtMagic) * np.pi)
dlng = (dlng * 180.0)/(a/sqrtMagic * np.cos(radlat) * np.pi)
gcjlat = wgslat + dlat
gcjlng = wgslng + dlng
return np.vstack([gcjlng, gcjlat]).T
def gcj2wgs(xy):
"""Convert xy coordinates in GCJ02 to WGS84
Args:
xy: a 2D numpy array storing GCJ02 coordinates (lng,lat) in shape of (N,2).
Returns:
a 2D numpy array storing WGS84 coordinates (lng,lat) in shape of (N,2)
"""
assert_xy(xy)
lng = xy[:,0]
lat = xy[:,1]
dlat = __transformlat(lng - 105.0, lat - 35.0)
dlng = __transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * np.pi
magic = np.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = np.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * np.pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * np.cos(radlat) * np.pi)
mglat = lat + dlat
mglng = lng + dlng
return np.vstack([lng * 2 - mglng, lat * 2 - mglat]).T
def gcj2bd(xy):
"""Convert xy coordinates in GCJ02 to BD09
Args:
xy: a 2D numpy array storing GCJ02 coordinates (lng,lat) in shape of (N,2).
Returns:
a 2D numpy array storing BD09 coordinates (lng,lat) in shape of (N,2)
"""
assert_xy(xy)
gcjLon = xy[:,0]
gcjLat = xy[:,1]
z = np.sqrt(np.power(gcjLat,2) + np.power(gcjLon,2)) + 0.00002 * np.sin(gcjLat * np.pi * 3000.0/180.0)
theta = np.arctan2(gcjLat,gcjLon) + 0.000003 * np.cos(gcjLon * np.pi * 3000.0/180.0)
bdLon = z * np.cos(theta) + 0.0065
bdLat = z * np.sin(theta) + 0.006
return np.vstack([bdLon, bdLat]).T
def bd2gcj(xy):
"""Convert xy coordinates in BD09 to GCJ02
Args:
xy: a 2D numpy array storing BD09 coordinates (lng,lat) in shape of (N,2).
Returns:
a 2D numpy array storing GCJ02 coordinates (lng,lat) in shape of (N,2)
"""
assert_xy(xy)
bdLon = xy[:,0]
bdLat = xy[:,1]
x = bdLon - 0.0065
y = bdLat - 0.006
z = np.sqrt(np.power(x,2) + np.power(y,2)) - 0.00002 * np.sin(y * np.pi * 3000.0/180.0)
theta = np.arctan2(y,x) - 0.000003 * np.cos(x * np.pi *3000.0/180.0)
gcjLon = z * np.cos(theta)
gcjLat = z * np.sin(theta)
return np.vstack([gcjLon, gcjLat]).T
def wgs2bd(xy):
"""Convert xy coordinates in WGS84 to BD09
Args:
xy: a 2D numpy array storing WGS84 coordinates (lng,lat) in shape of (N,2).
Returns:
a 2D numpy array storing BD09 coordinates (lng,lat) in shape of (N,2)
"""
assert_xy(xy)
return gcj2bd(wgs2gcj(xy))
def bd2wgs(xy):
"""Convert xy coordinates in BD09 to WGS84
Args:
xy: a 2D numpy array storing BD09 coordinates (lng,lat) in shape of (N,2).
Returns:
a 2D numpy array storing WGS84 coordinates (lng,lat) in shape of (N,2)
"""
assert_xy(xy)
return gcj2wgs(bd2gcj(xy))
|
cyang-kth/xyconvert | setup.py | <reponame>cyang-kth/xyconvert
from setuptools import setup
setup(
name='xyconvert',
version='0.1.2',
package_dir={'': 'xyconvert'}, # Optional
py_modules=["xyconvert"],
license='MIT',
description='Convert xy coordinates',
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
install_requires=['numpy','pyproj'],
url='https://github.com/cyang-kth/xyconvert',
author='<NAME>',
author_email='<EMAIL>'
)
|
giltwizy/qrcode-generator | qrcode-generator.py | ###############################################
# program: qrcode-generator.py
# author: <NAME>
# version: 1.1
# date: 10 July 2020
###############################################
# importing qrcode module
import qrcode
# create a string to be encoded
site = "https://github.com/giltwizy"
# create a string which will be the output filename.png
filename = "myqr.png"
img = qrcode.make(site)
img.save(filename) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.