max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
networks/isonetwork.py
|
andrewcpotter/holopy
| 1
|
12774451
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 11 14:03:30 2020
@author: acpotter
"""
#%% -- IMPORTS --
import sys
sys.path.append("..") # import one subdirectory up in files
# external packages
import numpy as np
import qiskit as qk
import networkx as nx
#import tenpy
# custom things
#import mps
#%%
class ParamCircuit(object):
"""
Parameterized circuit
Circuit + parameters
"""
def __init__(self,circ,param_names):
self.circ=circ
self.param_names = param_names
def bind_parameters(self,params):
self.circ.bind_parameters(params)
return self.circ
# def bind_from_array(self,param_vals):
# """
# input: param_vals, np.array of values, must be same length as self.param_names
# """
# params = dict(zip(self.param_names,param_vals))
# return self.bind_from_array(params)
class QKParamCircuit(ParamCircuit):
"""
ParamCircuit implemented with qiskit
"""
def __init__(self,circ,param_names):
self.circ=circ
self.param_names = param_names
self.circuit_format='qiskit'
def bind_parameters(self,params):
cres = self.circ.bind_parameters(params)
return cres
def unitary(self,params):
"""
input: params = dictionary of qiskit circuit parameters
output: returns unitary for circuit
"""
bound_circ = self.bind_parameters(params)
simulator = qk.Aer.get_backend('unitary_simulator')
result = qk.execute(bound_circ,simulator).result()
u = result.get_unitary(bound_circ)
return u
# def bind_from_array(self,params):
# """
# sets named parameters to particular values
# input:
# params: dictionary {parameter name: numerical value}
# output:
# circuit with parameters resolved
# """
# return self.circ.bind_parameters(params)
#%% -- ISOTENSOR CLASS --
class IsoTensor(object):
"""
node of an isometric tensor-network, generated by parameterized cirq unitary
works equally for tensor network state (TNS) or operator (TNO);
for TNS: physical register implicitly assumed to start from reference state: |00..0>
Intention: circuit object intended to be easily adaptable to work equally with cirq, qiskit, etc...
"""
def __init__(self,
name, # label for the tensor
qregs, # listof quantum registers
pcirc, # parameterized circuit object
#param_names, # list of circuit parameter names (str's)
meas_list=[], # list of tuples: (qreg, creg, measurement circuit)
circuit_format:str='qiskit', # string specifying circuit type
thermal = False,
thermal_prob = 0 #the chance of flipping a physical site
):
self.name=name
self.qregs=qregs
self.regdims = [2**len(reg) for reg in qregs]
self.circ= pcirc.circ
self.param_names = pcirc.param_names
# self.param_names=param_names
self.circuit_format=circuit_format
self.meas_list=meas_list
self.p =thermal_prob
self.thermal = thermal
def __str__(self):
return self.name
def __rep__(self):
return self.name
## Resolve Circuit Parameters ##
def resolve_circuit(self,params,include_measurements=True):
"""
resolves parameters in circuit
inputs:
params: dictionary of parameter names and values
include_measurements, bool, whether or not to include measurement and reset
outputs:
resolved circuit
"""
if self.circuit_format == 'qiskit':
cres = self.circ.bind_parameters(params)
if include_measurements:
for qreg,creg,mcirc,cbits in self.meas_list:
cres = cres.combine(mcirc)
cres.add_register(creg)
# add the measurement circuit
cres.measure(qreg,cbits)
cres.reset(qreg)
if self.thermal: #do a pre-measurement circuit to flip a site to |1> with prob. p
pre_cir = qk.QuantumCircuit()
for reg in self.qregs: pre_cir.add_register(reg)
if include_measurements:
for qreg,creg,mcirc,cbits in self.meas_list:
pre_cir.add_register(creg)
cdict = {}
for i in range(len(self.qregs[0])):#need to match register to combine
cdict['c_pre'+str(i)] = qk.ClassicalRegister(1,'c_pre'+str(i))
cres.add_register(cdict['c_pre'+str(i)])
pre_cir.add_register(cdict['c_pre'+str(i)])
pre_cir.rx(2*np.arcsin(np.sqrt(abs(self.p[i]))),self.qregs[0][i])
pre_cir.measure(self.qregs[0][i],cdict['c_pre'+str(i)])
pre_cir.reset(self.qregs[0][i])
pre_cir.x(self.qregs[0][i]).c_if(cdict['c_pre'+str(i)], 1)
cres = pre_cir.combine(cres)
return cres
else:
raise NotImplementedError()
def bind_params(self,params):
"""
inputs:
- params: dictionary {'name':value} for parameters in circuit
outputs:
- circuit with symbolic parameters set to numerical values
"""
if self.circuit_format == 'qiskit':
return self.circ.bind_parameters(params)
else:
raise NotImplementedError()
## Compute unitaries ##
def unitary(self,params):
"""
inputs:
- params: dictionary {'name':value} for parameters in circuit
outputs:
- unitary for circuit, as numpy array with shape regdims (output legs),regdims (input legs)
"""
if self.circuit_format == 'qiskit':
return self.unitary_qiskit(params)
elif self.circuit_format == 'cirq':
return self.unitary_cirq(params)
else:
raise NotImplementedError('only qiskit implemented')
def unitary_qiskit(self,params):
"""
inputs:
- params, dictionary {parameter:value} for parameters in circuit
note: parameter key type depends on type of circuit
for qiskit: parameter keys are qiskit circuit parameters
for cirq: they are sympy symbols
"""
# setup unitary simulator and compute unitary
bound_circ = self.circ.bind_parameters(params)
simulator = qk.Aer.get_backend('unitary_simulator')
result = qk.execute(bound_circ,simulator).result()
u = result.get_unitary(bound_circ)
# need to re-size and re-order to be ampatible with expected indexing
# note: qiskit writes bases in opposite order of usual convention
# e.g. for 3-qubit register: [q0,q1,q2],
# the state 011 refers to: q0=1, q1=1, q2=0
u = u.reshape(self.regdims[::-1]+self.regdims[::-1]) # reshape as tensor
nreg = len(self.qregs)
old_order = list(range(2*nreg))
new_order = old_order.copy()
new_order[0:nreg] = old_order[0:nreg][::-1]
new_order[nreg::] = old_order[nreg::][::-1]
u = np.moveaxis(u,old_order,new_order)
return u
def unitary_cirq(self,params):
""" unitary constructor for cirq-based circuits """
qubit_order = [q for qreg in self.qregs for q in qreg] # order to return the qubit unitary
# resolve the symbolic circuit parameters to numerical values
resolver = cirq.ParamResolver(params)
resolved_circuit = cirq.resolve_parameters(self.circuit, resolver)
u = resolved_circuit.unitary(qubit_order = qubit_order)
return u.reshape(self.regdims) # reshape as a multi-l
#%%
class IsoNetwork(object):
"""
NetworkX directed graph with:
nodes = IsoTensors
edges have list of qubits
To Do:
- add global measurement register names list
- create to_qasm function that traverses the grapha and assembles
together the qasm files for each node, adding the appropriate header
and defining qubits and measurement registers one time in the beginning
"""
def __init__(self,nodes=[],
edges=[],
qregs=[],
circuit_format='qiskit'
):
"""
nodes, list of IsoTensors
edges, list of tuples (output node, input node, list of qubits passed along edge)
qregs, list of qubit registers
(for cirq: each qubit register is list of qubits,
for qiskit, each qreg is a QuantumRegister object)
cregs, list of classical registers
# meas_dict, dictionary of classical registers to
# hold measurement values for each node that gets measured
# keys=MeasurementNode, values = list of tuples:
# (qreg to be measured, creg that stores outcome, circuit to transform qubits to measurement basis)
# note: keys of this define which nodes get measured
param_assignments,
dict with key = node, value = list of parameter objects for that node
for qiskit: parameters are inbuilt circuit parameter
for cirq: parameters are sympy symbols
measurement_nodes, list of IsoTensors that get measured
i.e. have at least one output leg that terminates in a measurement
actual basis for measurement only specified at qasm output/simulator step
"""
self.circuit_format=circuit_format
# construct graph and check that is a DAG
# check for repeated node names
self.graph = nx.DiGraph()
self.graph.add_nodes_from(nodes)
self.graph.add_edges_from(edges)
# check that graph is directed & acyclic (DAG)
if nx.algorithms.dag.is_directed_acyclic_graph(self.graph) != True:
raise RuntimeError('Graph must be directed and acyclic')
# store node information
self.nodes = nodes
self.qregs = qregs
# self.creg_dict = creg_dict
self.node_names = [node.name for node in nodes]
if len(self.node_names) != len(set(self.node_names)):
raise ValueError('Tensor nodes must have unique names')
# store variational parameter info
self.param_assignments = {}
for node in nodes:
self.param_assignments[node]=node.param_names
# self.param_assignments = param_assignments
# topologically sort nodes in order of execution
self.sorted_nodes = [node for node in nx.topological_sort(self.graph)]
## Circuit Construction Methods ##
def construct_circuit(self,param_dict,include_measurements=True):
"""
input:
param_dict, dict of {parameter:value}
output:
circuit
"""
if self.circuit_format=='qiskit':
return self.construct_cirquit_qiskit(param_dict,include_measurements)
else:
raise NotImplementedError
def construct_cirquit_qiskit(self,param_dict,include_measurements=True):
"""
construct circuit for network using qiskit
"""
self.circ = qk.QuantumCircuit()
# add quantum and classical registers
for reg in self.qregs: self.circ.add_register(reg)
#for reg in list(self.creg_dict.values()): self.circ.add_register(reg)
for node in self.sorted_nodes:
node_dict = {k:param_dict[k] for k in self.param_assignments[node]}
node_circ = node.resolve_circuit(node_dict,include_measurements)
self.circ = self.circ.combine(node_circ)
return self.circ
def to_qasm(self,param_dict):
if self.circuit_format=='qiskit':
return self.construct_circuit(param_dict).qasm()
else:
raise NotImplementedError()
#%%
| 2.171875
| 2
|
audream_first/test.py
|
Lavabar/audream
| 0
|
12774452
|
from tkinter import Tk, Entry, Button
import threading
flag = True
master = Tk()
e = Entry(master)
e.pack()
e.focus_set()
def enterName():
print(e.get())
def stop():
global flag
flag = False
def exitApp():
master.destroy()
def cycle():
def callback():
global flag
a = 0
flag = True
while flag:
a = a + 2
print(a)
t1 = threading.Thread(target=callback)
t1.start()
b1 = Button(master, text="start", width=10, command=cycle)
b1.pack()
b2 = Button(master, text="stop", width=10, command=stop)
b2.pack()
b3 = Button(master, text="exit", width=10, command=exitApp)
b3.pack()
master.mainloop()
| 3.65625
| 4
|
app/core/helpers.py
|
jcPOLO/polonet
| 0
|
12774453
|
import ipaddress
import os
import errno
import logging
import sys
from typing import List, Union
import csv, io, json
dir_path = os.path.dirname(os.path.realpath(__file__))
def is_ip(string: str) -> bool:
try:
ipaddress.ip_address(string)
return True
except ValueError:
return False
# Create dir if not exists
def check_directory(path: str):
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def is_int(v: any) -> bool:
v = str(v).strip()
return (
v == "0"
or (
v if v.find("..") > -1 else v.lstrip("-+").rstrip("0").rstrip(".")
).isdigit()
)
def get_platforms(path="templates") -> list:
if not os.path.exists(os.path.dirname(path)):
try:
return os.listdir(path)
except Exception as e:
raise e
def configure_logging(logger, debug=""):
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
fh = logging.FileHandler(f"{dir_path}/auto-nornir.log")
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
# ch.setFormatter(formatter)
fh.setFormatter(formatter)
logger.addHandler(ch)
logger.addHandler(fh)
return logger
class HumanBytes:
"""
USAGE
print(HumanBytes.format(2251799813685247)) # 2 pebibytes
print(HumanBytes.format(2000000000000000, True)) # 2 petabytes
print(HumanBytes.format(1099511627776)) # 1 tebibyte
print(HumanBytes.format(1000000000000, True)) # 1 terabyte
print(HumanBytes.format(1000000000, True)) # 1 gigabyte
print(HumanBytes.format(4318498233, precision=3)) # 4.022 gibibytes
print(HumanBytes.format(4318498233, True, 3)) # 4.318 gigabytes
print(HumanBytes.format(-4318498233, precision=2)) # -4.02 gibibytes
"""
METRIC_LABELS: List[str] = ["B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
BINARY_LABELS: List[str] = [
"B",
"KiB",
"MiB",
"GiB",
"TiB",
"PiB",
"EiB",
"ZiB",
"YiB",
]
PRECISION_OFFSETS: List[float] = [0.5, 0.05, 0.005, 0.0005] # PREDEFINED FOR SPEED.
PRECISION_FORMATS: List[str] = [
"{}{:.0f} {}",
"{}{:.1f} {}",
"{}{:.2f} {}",
"{}{:.3f} {}",
] # PREDEFINED FOR SPEED.
@staticmethod
def format(num: Union[int, float], metric: bool = False, precision: int = 1) -> str:
"""
Human-readable formatting of bytes, using binary (powers of 1024)
or metric (powers of 1000) representation.
"""
assert isinstance(num, (int, float)), "num must be an int or float"
assert isinstance(metric, bool), "metric must be a bool"
assert (
isinstance(precision, int) and precision >= 0 and precision <= 3
), "precision must be an int (range 0-3)"
unit_labels = HumanBytes.METRIC_LABELS if metric else HumanBytes.BINARY_LABELS
last_label = unit_labels[-1]
unit_step = 1000 if metric else 1024
unit_step_thresh = unit_step - HumanBytes.PRECISION_OFFSETS[precision]
is_negative = num < 0
if is_negative: # Faster than ternary assignment or always running abs().
num = abs(num)
for unit in unit_labels:
if num < unit_step_thresh:
# VERY IMPORTANT:
# Only accepts the CURRENT unit if we're BELOW the threshold where
# float rounding behavior would place us into the NEXT unit: F.ex.
# when rounding a float to 1 decimal, any number ">= 1023.95" will
# be rounded to "1024.0". Obviously we don't want ugly output such
# as "1024.0 KiB", since the proper term for that is "1.0 MiB".
break
if unit != last_label:
# We only shrink the number if we HAVEN'T reached the last unit.
# NOTE: These looped divisions accumulate floating point rounding
# errors, but each new division pushes the rounding errors further
# and further down in the decimals, so it doesn't matter at all.
num /= unit_step
return HumanBytes.PRECISION_FORMATS[precision].format(
"-" if is_negative else "", num, unit
)
# TODO: This is not good at all. Only works on not nested jsons
def json_to_csv(js):
csv = []
keys = []
for key in js[0].keys():
keys.append(key)
for host in js:
for key in keys:
if key != "groups":
csv.append(str(host[key]))
csv.append(",")
csv.pop()
csv.append("\n")
csv = "".join(csv)
keys = ",".join(keys) + "\n"
csv_text = keys + csv
return csv_text
def csv_to_json(csv_text):
reader = csv.DictReader(io.StringIO(csv_text))
json_data = json.dumps(list(reader))
return json_data
| 2.484375
| 2
|
Code/transformer.py
|
JohnlNguyen/Comment2Code
| 0
|
12774454
|
<reponame>JohnlNguyen/Comment2Code
import numpy as np
import tensorflow as tf
import util
from pdb import set_trace
class AttentionLayer(tf.keras.layers.Layer):
def __init__(self, attention_dim, num_heads=None, hidden_dim=None):
super(AttentionLayer, self).__init__()
if hidden_dim == None:
hidden_dim = attention_dim
self.attention_dim = attention_dim
self.num_heads = 1 if num_heads is None else num_heads
self.attn_query = tf.keras.layers.Dense(self.attention_dim, use_bias=False)
self.attn_keys = tf.keras.layers.Dense(self.attention_dim, use_bias=False)
self.attn_values = tf.keras.layers.Dense(self.attention_dim, use_bias=False)
# Typically, attention_dim == hidden_dim, so unless a different value is
# passed, we assume as much
self.weight_out = tf.keras.layers.Dense(
self.attention_dim if hidden_dim is None else hidden_dim, use_bias=False)
self.ln = [LayerNormalization(hidden_dim) for _ in range(2)]
"""Applies multi-headed attention to the provided input(s).
Supports providing just one set of states (self-attention) or separate input states to compute the keys and values over.
Supports masked attention, in which we explicitly mask out "future" values, e.g. for generative language modeling.
Note: masks are ignored if key_states are set (encoded states should be fully visible), but the reverse may not apply (e.g. in sequence tagging we self-attend without masks).
Args:
states: states to compute queries over. If key_states is None, also used to compute keys and values.
mask: if not None, used to "mask out" values that must not be seen, e.g. padding tokens in the input or "future" tokens in a decoder/generator
key_states: optional input states to attend over, e.g. the encoded input in neural machine translation
"""
def call(self, states, masks=None, key_states=None):
# Compute key, query and value vectors, reshaped to [Batch, Heads, Time, Dim]
# where Dim is attention_dim//num_heads
query, keys, values = self.compute_qkv(states, key_states)
# Compute attention weights, and context from these
alpha = self.get_attention_weights(query, keys, masks)
context = tf.matmul(alpha, values)
# Concatenate heads and transform output to hidden_dim
context = self.concatenate_heads(context)
context = self.weight_out(context)
return context
# Compute key, query and value vectors. If separate key_states are
# provided, attend over the input instead and thus assume attention is not
# masked
def compute_qkv(self, states, key_states=None):
query = self.attn_query(states) # Queries are always computed on states
keys = self.attn_keys(states if key_states is None else key_states)
values = self.attn_values(states if key_states is None else key_states)
return self.reshape_for_heads(query), self.reshape_for_heads(keys), self.reshape_for_heads(values)
# Split projections by heads and swap sequence and head axis to allow simpler multiplication
def reshape_for_heads(self, value):
value = tf.reshape(value, [value.shape[0], value.shape[1],
self.num_heads, self.attention_dim // self.num_heads])
value = tf.transpose(value, [0, 2, 1, 3])
return value
# Compute attention weights from cross-product between keys and queries
# (scaled, masked, softmaxed)
def get_attention_weights(self, query, keys, masks=None):
alpha = tf.matmul(query, keys, transpose_b=True)
alpha *= tf.math.rsqrt(tf.cast(self.attention_dim // self.num_heads, "float32"))
if masks is not None:
alpha += (1.0 - masks) * tf.float32.min
alpha = tf.nn.softmax(alpha)
return alpha
# Concatenate attention context for each head for multi-head attention
def concatenate_heads(self, context):
context = tf.transpose(context, [0, 2, 1, 3])
context = tf.reshape(context, [context.shape[0], context.shape[1], self.attention_dim])
return context
class LayerNormalization(tf.keras.layers.Layer):
def __init__(self, hidden_dim):
super(LayerNormalization, self).__init__()
self.hidden_dim = hidden_dim
def build(self, _):
self.scale = tf.Variable(tf.ones(self.hidden_dim))
self.bias = tf.Variable(tf.zeros(self.hidden_dim))
self.build = True
def call(self, x, epsilon=1e-3):
mean, variance = tf.nn.moments(x, -1, keepdims=True)
norm_x = (x - mean) * tf.math.rsqrt(variance + epsilon)
return norm_x * self.scale + self.bias
class Transformer(tf.keras.layers.Layer):
def __init__(self, embed_dim, hidden_dim, vocab_dim, attention_dim, num_layers, ff_dim, num_heads=8,
dropout_rate=0.1):
super(Transformer, self).__init__()
random_init = tf.random_normal_initializer(mean=0, stddev=embed_dim ** -0.5)
# Set up embedding and multi-headed attention layers
self.embed = tf.Variable(random_init([vocab_dim, embed_dim]), dtype=tf.float32)
self.self_attention = [AttentionLayer(
attention_dim=attention_dim, num_heads=num_heads, hidden_dim=hidden_dim) for _ in range(num_layers)]
self.key_attention = [AttentionLayer(
attention_dim=attention_dim, num_heads=num_heads, hidden_dim=hidden_dim)
for _ in range(num_layers)]
# Layer normalization for every residual layer
self.ln = [[LayerNormalization(hidden_dim) for _ in range(3)] for _ in range(num_layers)]
self.ln_out = LayerNormalization(hidden_dim)
# Two-layer feed-forward with wide layer in the middle
self.ff_1 = [tf.keras.layers.Dense(ff_dim, activation="relu") for _ in range(num_layers)]
self.ff_2 = [tf.keras.layers.Dense(hidden_dim) for _ in range(num_layers)]
self.dropout_rate = dropout_rate
"""Transformer language model: converts indices into hidden states through 6 layers of multi-headed attention
To generate language from the resulting states, pass the states to "predict". Note that predict assumes input vocabulary is output vocabulary.
Args:
mask: if not None, used to mask tokens e.g. "future" tokens. See "get_sequence_mask" to get a mask specifically for this purpose
enc_states: If not None, applies both self-attention and input attention. In that case, we never mask attention -- encoded states are assumed to be fully known
"""
def call(self, indices, masks=None, key_states=None, key_masks=None, training=True):
states = tf.nn.embedding_lookup(self.embed, indices)
states *= tf.math.sqrt(tf.cast(states.shape[-1], "float32"))
states += util.positional_encoding(states.shape[-1], states.shape[-2])
if training:
states = tf.nn.dropout(states, rate=self.dropout_rate)
for ix, att in enumerate(self.self_attention):
new_states = att(self.ln[ix][0](states), masks=masks)
if training:
new_states = tf.nn.dropout(new_states, rate=self.dropout_rate)
states = states + new_states
if key_states is not None:
new_states = self.key_attention[ix](self.ln[ix][1](states), key_states=key_states, masks=key_masks)
if training:
new_states = tf.nn.dropout(new_states, rate=self.dropout_rate)
states = states + new_states
new_states = self.ff_1[ix](self.ln[ix][2](states))
if training:
new_states = tf.nn.dropout(new_states, rate=self.dropout_rate)
new_states = self.ff_2[ix](new_states)
states = states + new_states
states = self.ln_out(states)
return states
"""Returns a sequence mask in which each token can only see states up to its own position. Useful for generative language modeling (e.g. decoding)."""
def get_sequence_mask(self, seq_len):
return tf.sequence_mask(lengths=list(range(1, seq_len + 1)), maxlen=seq_len, dtype=tf.float32)
"""Generates tokens from transformer states using the transposed embedding layer"""
def predict(self, states):
return util.tensor_matrix_mul(states, tf.transpose(self.embed))
| 2.8125
| 3
|
tests/pools/config.py
|
bolshoytoster/chia-blockchain
| 6
|
12774455
|
<gh_stars>1-10
job_timeout = 45
| 0.980469
| 1
|
schedule_lib/schedule.py
|
allankellynet/mimas
| 0
|
12774456
|
#-----------------------------------------------------
# Mimas: conference submission and review system
# (c) <NAME> 2016-2020 http://www.allankelly.net
# Licensed under MIT License, see LICENSE file
# -----------------------------------------------------
# schedule.py
#
# System imports
import datetime
# Google imports
import logging
from google.appengine.ext import ndb
# Local imports
class Slot():
def __init__(self, start, end, type):
self.start_time = start
self.end_time = end
self.slot_type = type # Tracks or Plenary
class ScheduleDay():
def __init__(self):
self.day_tracks = []
self.day_slots = {}
class Schedule(ndb.Model):
setup_days_db = ndb.PickleProperty()
assignment_db = ndb.PickleProperty() # map: Dayname -> Track -> Slot -> SubKey
def __init__(self, *args, **kwargs):
super(Schedule, self).__init__(*args, **kwargs)
self.setup_days_db = {}
self.assignment_db = {}
def day_names(self):
return self.setup_days_db.keys()
def add_day(self, day_name):
self.setup_days_db[day_name] = ScheduleDay()
self.put()
def get_day(self, day_name):
return self.setup_days_db[day_name]
def delete_day(self, day_name):
if self.setup_days_db.has_key(day_name):
del self.setup_days_db[day_name]
self.put()
def tracks(self, day_name):
if self.setup_days_db.has_key(day_name):
return self.setup_days_db[day_name].day_tracks
return []
def add_track(self, day_name, track):
self.setup_days_db[day_name].day_tracks.append(track)
self.put()
def del_track(self, day_name, track):
self.setup_days_db[day_name].day_tracks.remove(track)
self.put()
def slots(self, day_name):
if self.setup_days_db.has_key(day_name):
return self.setup_days_db[day_name].day_slots
return []
def orderd_slot_keys(self, day_name):
if self.setup_days_db.has_key(day_name):
keys = self.setup_days_db[day_name].day_slots.keys()
keys.sort()
return keys
return []
def add_slot(self, day_name, slot):
self.setup_days_db[day_name].day_slots[slot.start_time]=slot
self.put()
def delete_slot_by_start_time(self, day_name, start_time):
self.setup_days_db[day_name].day_slots.pop(start_time, None)
self.put()
def get_assignment(self, day, track, slot):
if self.assignment_db.has_key(day):
if self.assignment_db[day].has_key(track):
if self.assignment_db[day][track].has_key(slot):
return self.assignment_db[day][track][slot]
return "Empty"
def assign_talk(self, sub_key, day, track, slot):
if not(self.assignment_db.has_key(day)):
self.assignment_db[day] = {}
if not(self.assignment_db[day].has_key(track)):
self.assignment_db[day][track] = {}
self.assignment_db[day][track][slot] = sub_key
self.put()
def clear_talk(self, day, track, slot):
if not(self.assignment_db.has_key(day)):
return
if not(self.assignment_db[day].has_key(track)):
return
del self.assignment_db[day][track][slot]
self.put()
def get_assigned_submissions(self):
submissions = []
for day in self.assignment_db:
for track in self.assignment_db[day]:
for slot in self.assignment_db[day][track]:
submissions.append(self.assignment_db[day][track][slot])
return submissions
def make_schedule(conf_key):
sched = Schedule(parent=conf_key)
sched.put()
return [sched.key]
def get_conference_schedule(conf_key):
sched_keys = Schedule.query(ancestor=conf_key).fetch(keys_only=True)
if len(sched_keys) == 0:
sched_keys = make_schedule(conf_key)
return sched_keys[0]
def talkTitle(safeKey):
if safeKey=="Empty":
return "Empty"
sub = ndb.Key(urlsafe=safeKey).get()
return sub.title()
| 2.375
| 2
|
scripts/soccer.py
|
jkurdys/ThinkBayes2
| 1,337
|
12774457
|
<gh_stars>1000+
"""This file contains code for use with "Think Bayes",
by <NAME>, available from greenteapress.com
Copyright 2014 <NAME>
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import numpy
import thinkbayes2
import thinkplot
class Soccer(thinkbayes2.Suite):
"""Represents hypotheses about."""
def Likelihood(self, data, hypo):
"""Computes the likelihood of the data under the hypothesis.
hypo:
data:
"""
like = 1
return like
def PredRemaining(self, rem_time, score):
"""Plots the predictive distribution for final number of goals.
rem_time: remaining time in the game in minutes
score: number of goals already scored
"""
# TODO: fill this in
def main():
hypos = numpy.linspace(0, 12, 201)
suite = Soccer(hypos)
thinkplot.Pdf(suite, label='prior')
print('prior mean', suite.Mean())
suite.Update(11)
thinkplot.Pdf(suite, label='posterior 1')
print('after one goal', suite.Mean())
thinkplot.Show()
if __name__ == '__main__':
main()
| 3.140625
| 3
|
inverted.py
|
rafaelscnunes/COS738-IMIR-VSM
| 0
|
12774458
|
<gh_stars>0
#!/Library/Frameworks/Python.framework/Versions/3.6/bin/Python3.6
# -*- coding: utf-8 -*-
"""
Created: 2017-07-05
@title: IMIR-VSM (In Memory Information Retrieval - Vector Space Model)
@module: inverted.py
@author: <NAME> - <EMAIL>
"""
# Gerador de Lista Invertida - A função desse módulo é criar as listas
# invertidas simples.
# DONE: 1) O Gerador Lista Invertida deverá ler um arquivo de configuração
# a. O nome do arquivo é GLI.CFG
# b. Ele contém dois tipos de instruções
# i. LEIA=<nome de arquivo>
# ii. ESCREVA=<nome de arquivo>
# iii. Podem ser uma ou mais instruções LEIA
# iv. Deve haver uma e apenas uma instrução ESCREVA
# v. A instrução ESCREVA aparece depois de todas as instruções LEIA
# DONE: 2) O Gerador Lista Invertida deverá ler um conjunto de arquivos em
# formato XML
# a. Os arquivos a serem lidos serão indicados pela instrução LEIA no arquivo
# de configuração
# b. O formato é descrito pelo arquivo cfc2.dtd.
# c. O conjunto de arquivos será definido por um arquivo de configuração
# d. Os arquivos a serem lidos são os fornecidos na coleção
# DONE: 3) Só serão usados os campos RECORDNUM, que contém identificador do
# texto e ABSTRACT, que contém o texto a ser classificado
# a. Atenção: Se o registro não contiver o campo ABSTRACT deverá ser usado o
# campo EXTRACT
# DONE: 4) O Gerador Lista Invertida deverá gerar um arquivo
# a. O arquivo a ser gerado será indicado na instrução ESCREVA do arquivo de
# configuração
# b. O arquivo deverá ser no formato cvs
# i. O caractere de separação será o “;”, ponto e vírgula
# c. Cada linha representará uma palavra
# d. O primeiro campo de cada linha conterá a palavra em letras maiúsculas,
# sem acento
# e. O segundo campo de cada linha apresentará uma lista (Python) de
# identificadores de documentos onde a palavra aparece
# f. Se uma palavra aparece mais de uma vez em um documento, o número do
# documento aparecerá o mesmo número de vezes na lista
# g. Exemplo de uma linha:
# i. FIBROSIS ; [1,2,2,3,4,5,10,15,21,21,21]
# DONE: 5) Todos os módulos devem possuir um LOG que permita pelo menos a um
# programador posterior, usando o módulo logging de Python:
# 1. Identificar quando iniciaram suas operações
# 2. Identificar quando iniciam cada parte de seu processamento
# a. Ler arquivo de configuração
# b. Ler arquivo de dados
# 3. Identificar quantos dados foram lidos
# 4. Identificar quando terminaram os processamentos
# 5. Calcular os tempos médios de processamento de consultas, documento e palavras, de acordocom o programa sendo usado
# 6. Identificar erros no processamento, caso aconteçam.
import os
import re
import operator
import logging as log
import xml.etree.cElementTree as ET
from nltk.corpus import stopwords
if not stopwords: nltk.download('stopwords')
import vsm
# os.chdir('/Users/rafaenune/Documents/PESC-EDC/COS738 - Busca e Recuperação '
# 'da Informação/GitHub/')
# log.basicConfig(level=log.DEBUG,
# format='%(asctime)s|%(levelname)s|%(name)s|%(funcName)s'
# '|%(message)s',
# filename=__file__.split('.')[0]+'.log',
# filemode='w')
logger = log.getLogger(__file__.split('/')[-1])
CORPORA_FILE = 'corpora.csv'
CONFIG_FILE = 'GLI.CFG'
SEP = ';'
MIN_WORD_LENGHT = 2
STOPWORDS = 0
# 1 - homemade stop_words list;
# 2 - nltk stop_words;
# any other value - no use of stop_words.
class paperRecords:
""" Classe para armazenar os registros lidos do .xml """
def __init__(self):
self.PaperNum = ''
self.Citations = []
self.RecordNum = 0
self.MedlineNum = 0
self.Authors = []
self.Title = ''
self.Source = ''
self.MajorSubJ_Topics = []
self.MinorSubJ_Topics = []
self.Abstract = ''
self.References = []
def __repr__(self):
return '{}: {} {} {} {} {} {}' \
' {} {} {} {} {}'.format(self.__class__.__name__,
self.PaperNum,
self.Citations,
self.RecordNum,
self.MedlineNum,
self.Authors,
self.Title,
self.Source,
self.MajorSubJ_Topics,
self.MinorSubJ_Topics,
self.Abstract,
self.References)
logger.info('Started %s' % __file__)
files = []
papers = []
count = 0
if os.path.isfile(CONFIG_FILE):
logger.info('Reading configuration from ' + CONFIG_FILE + '...')
for line in open(CONFIG_FILE, 'r'):
if line.rstrip('\n').split('=')[0] == 'LEIA':
files.append(line.rstrip('\n').split('=')[1])
count += 1
elif line.rstrip('\n').split('=')[0] == 'ESCREVA':
file_out = line.rstrip('\n').split('=')[1]
logger.info('Gracefully stopped reading configuration file ' +
CONFIG_FILE + ', ESCREVA parameter found.')
break
else:
logger.error('Invalid parameter found reading configuration. ')
if count > 0: logger.info('Found %d .xml files to parse' % count)
if files and file_out:
logger.info('All set! Configuration successfully read!')
else:
logger.error('Error reading configuration files!')
logger.info('Parsing .xmls...')
total_count = 0
total_fails = 0
for file in files:
count = 0
fails = 0
logger.info('Parsing file %s' % file)
tree = ET.parse(file)
root = tree.getroot()
if root:
for RECORD in root.findall('RECORD'):
paper = paperRecords()
count += 1
try:
paper.PaperNum = RECORD.find('PAPERNUM').text
except TypeError:
logger.warning('Missing PAPERNUM attribute')
pass
try:
for cite in RECORD.find('CITATIONS'):
paper.Citations.append(cite.attrib)
except TypeError:
# logger.warning('Record: ' + paper.PaperNum +
# ' at file: ' + file + ' has none'
# ' citations.')
pass
try:
paper.RecordNum = int(RECORD.find('RECORDNUM').text)
except TypeError:
logger.error('Missing RECORDNUM attribute')
fails += 1
continue
try:
paper.MedlineNum = int(RECORD.find('MEDLINENUM').text)
except TypeError:
# logger.warning('Missing MEDLINENUM attribute')
pass
try:
for author in RECORD.find('AUTHORS'):
paper.Authors.append(author.text)
except TypeError:
# logger.warning('No authors found')
pass
try:
paper.Title = RECORD.find('TITLE').text
words = re.sub('[^a-zA-Z]', ' ', paper.Title)
words = words.split()
paper.Title = ' '.join(words).lower()
except TypeError:
# logger.warning('Record has no title')
pass
try:
paper.Source = RECORD.find('SOURCE').text
except TypeError:
# logger.warning('Missing SOURCE attribute')
pass
try:
for topic in RECORD.find('MAJORSUBJ'):
paper.MajorSubJ_Topics.append(topic.text)
except TypeError:
# logger.warning('MIssing MAJORSUBJ attribute')
pass
try:
for topic in RECORD.find('MINORSUBJ'):
paper.MinorSubJ_Topics.append((topic.text))
except TypeError:
# logger.warning('Missing MINORSUBJ attribute')
pass
try:
paper.Abstract = RECORD.find('ABSTRACT').text
except AttributeError:
# logger.warning('Record: ' + paper.PaperNum +
# ' at file: ' + file + ' has no'
# ' ABSTRACT. Searching for EXTRACT...')
try:
paper.Abstract = RECORD.find('EXTRACT').text
except:
logger.error('There is no ABSTRACT nor EXTRACT at '
'record %s of file %s, ignoring record'
% (paper.PaperNum, file))
fails += 1
continue
finally:
words = re.sub('[^a-zA-Z]', ' ', paper.Abstract)
words = words.split()
paper.Abstract = ' '.join(words).lower()
try:
for cite in RECORD.find('REFERENCES'):
paper.References.append(cite.attrib)
except:
# logger.warning('Record: ' + paper.PaperNum +
# ' at file: ' + file + ' has none'
# ' references.')
pass
papers.append(paper)
logger.info('%s - %d records successfully imported, '
'%d records ignored => total parsed: %d'
% (file, count-fails, fails, count))
else:
logger.error('Failed parsing file ' + file)
total_count += count
total_fails += fails
logger.info('Parsed all .xmls - Successfully imported %d records out of'
' %d parsed.' % (len(papers), total_count))
logger.info('Sorting papers array...')
papers = sorted(papers, key = operator.attrgetter('RecordNum'))
logger.info('Papers array sorted by RecordNum.')
logger.info('Generating inverted index and saving to %s...' % file_out)
index = dict()
if STOPWORDS == 1:
stop_words = ['this','not','from','how','what','why','when','where',
'which', 'who', 'with']
logger.info('Using homemade stop_words list.')
elif STOPWORDS == 2:
stop_words = set(stopwords.words('english'))
logger.info('Using nltk standard stop_words.')
else:
stop_words = []
logger.info('Not using stop_words.')
for i in range(0, len(papers)):
words = re.sub('[^a-zA-Z]', ' ', papers[i].Abstract)
words = words.split()
words = [word.upper() for word in words if not word in stop_words
and len(word) >= MIN_WORD_LENGHT]
for word in words:
if word in index:
index[word].append(papers[i].RecordNum)
else:
index[word] = [papers[i].RecordNum]
logger.info('Inverted index generated in memory'
' with %d words.' % len(index))
f_out = open(file_out, 'w', encoding = 'utf-8')
f_out.write('Word' + SEP + 'Documents\n')
for word, docs in sorted(index.items()):
f_out.write(str(word) + SEP + str(docs) + '\n')
f_out.close()
logger.info('Inverted index saved as %s' %file_out)
logger.info('Finished %s' % __file__)
logger.info('Exporting corpora {\'RecordNum\' : \'Abstract\'} to %s'
% CORPORA_FILE)
f_out = open(CORPORA_FILE, 'w', encoding = 'utf-8')
f_out.write('corpus' + SEP + 'text\n')
for i in range(0, len(papers)):
f_out.write(str(papers[i].RecordNum) + SEP + papers[i].Abstract + '\n')
f_out.close()
logger.info('%s created with %d corpus.' % (CORPORA_FILE,
len(papers)))
else:
logger.error(CONFIG_FILE + ' not found!')
print(CONFIG_FILE + ' not found! Execution aborted.')
logger.error('Execution aborted.')
| 2.0625
| 2
|
stat_key_browser/tagger.py
|
Isilon/isilon_stat_browser
| 10
|
12774459
|
"""
Provide access to the tag definitions and utilities.
Reads and parses into a dict the json tag def file. Provides access to this dict.
Takes a key_dict and applies tags per the tag definations.
"""
import logging
import json
import os
import re
import sys
import stat_key_browser
KEY_TAG_DEFS_FILENAME = 'key_tags.json'
EXTRA_ATTRS = 'xtra_attrs'
def dedupe_list(l):
s = set(l)
deduped_l = [x for x in s]
return deduped_l
class Tagger(object):
def __init__(self, defs=None):
if defs is None:
def_path = self.get_defs_path()
try:
with open(def_path, 'r') as def_file:
defs = json.load(def_file)
except IOError as err:
logging.error('Unable to open {0}: {1}'.format(def_path, err))
logging.error("Try running 'make tags' to create the tag file")
sys.exit(1)
self.tag_defs = defs
def _add_tags(self, key, tags):
key.setdefault('tags', [])
key['tags'] += tags
def _dedupe_tag_lists(self, key_dict):
for data in key_dict.values():
if 'tags' in data:
data['tags'] = dedupe_list(data['tags'])
return key_dict
def _pop_keys(self, dictionary, *args):
di = dictionary.copy()
for key in args:
try:
di.pop(key)
except KeyError:
pass
return di
def _get_extra_attrs(self, defin):
arb_attrs = self._pop_keys(defin.copy(), 'keys', 're-keys', 'tags')
for (extra_attr, val) in arb_attrs.items():
if len(val) != 1:
msg = 'Extra attibute must have a single value. {0} has value {1}'
raise ValueError(msg.format(extra_attr, val))
return arb_attrs
def _add_extra_attrs(self, key, extra_attrs):
"""Add extra attrs to a key."""
for (attr_name, val) in extra_attrs.items():
key.setdefault(EXTRA_ATTRS, {})
key[EXTRA_ATTRS][attr_name] = '\n'.join(val)
def get_defs_path(self):
"""Return path to tag definitions file."""
basedir = stat_key_browser.__path__[0]
defs_path = os.path.join(basedir, 'data', KEY_TAG_DEFS_FILENAME)
logging.debug('Expect key tag definitions at ', path=defs_path)
return defs_path
def tag_list(self):
"""Return a list of all the tags that appear in the definations."""
tags = []
for defin in self.tag_defs:
tags += defin['tags']
tags = dedupe_list(tags)
tags.sort()
return tags
def tag_keys(self, key_dict):
"""Apply tags to keys in key_dict."""
for defin in self.tag_defs:
extra_attrs = self._get_extra_attrs(defin)
for key in defin.get('keys', []):
self._add_tags(key_dict[key], defin['tags'])
self._add_extra_attrs(key_dict[key], extra_attrs)
if 're-keys' in defin:
for (key, data) in key_dict.items():
for re_key in defin['re-keys']:
if re.search(re_key, key):
self._add_tags(data, defin['tags'])
self._add_extra_attrs(key_dict[key], extra_attrs)
# Fix multiply matching keys that have duplicated tags.
key_dict = self._dedupe_tag_lists(key_dict)
return key_dict
| 3.234375
| 3
|
10_movie_search/program.py
|
CarlosJimeno/Python-JumpStart-by-Building-10-apps
| 0
|
12774460
|
import movie_service
import requests.exceptions
def print_header():
print("------------------------------------------------")
print(" MOVIE SEARCH APP")
print("------------------------------------------------")
def run_search_loop():
exit_cmds = ['x', 'exit', 'quit', 'q']
search_term = "Search term"
while search_term.lower() not in exit_cmds:
search_term = input("\nWhat movie do you want to search for? ")
if search_term not in exit_cmds:
try:
movies = movie_service.search_movie(search_term)
movie_service.print_movies(movies)
except requests.exceptions.ConnectionError:
print('Error: your connection is down.')
except ValueError:
print('ValueError: Inappropriate argument value.')
except Exception as e:
print(type(e))
print(e.__cause__)
print('exiting...')
def main():
print_header()
run_search_loop()
if __name__ == '__main__':
main()
| 3.3125
| 3
|
PyPoll/main-PyPoll.py
|
designergal3002/Python-Challenge
| 0
|
12774461
|
<gh_stars>0
import os
import csv
candidates = []
num_votes = 0
vote_counts = []
# Determine path for the CSV file to access
poll_path = os.path.join('..', '..', 'Resources', 'election_data.csv')
# Read the CSV file
with open(poll_path,newline="") as csvfile:
csvreader = csv.reader(csvfile)
# Remove the header of the CSV file
line = next(csvreader,None)
# For loop to process votes
for line in csvreader:
num_votes = num_votes + 1
# Candidate
candidate = line[2]
if candidate in candidates:
candidate_index = candidates.index(candidate)
vote_counts[candidate_index] = vote_counts[candidate_index] + 1
else:
candidates.append(candidate)
vote_counts.append(1)
percentages = []
max_votes = vote_counts[0]
max_index = 0
# Calculate the winner
for count in range(len(candidates)):
vote_percentage = vote_counts[count]/num_votes*100
percentages.append(vote_percentage)
if vote_counts[count] > max_votes:
max_votes = vote_counts[count]
print(max_votes)
max_index = count
winner = candidates[max_index]
# Print out Election Results
print("Election Results")
print("--------------------------")
print(f"Total Votes: {num_votes}")
for count in range(len(candidates)):
print(f"{candidates[count]}: {percentages[count]}% ({vote_counts[count]})")
print("---------------------------")
print(f"Winner: {winner}")
print("---------------------------")
write_file = f"pypoll_results_summary.txt"
filewriter = open(write_file, mode = 'w')
# Print output to external file
filewriter.write("Election Results\n")
filewriter.write("--------------------------\n")
filewriter.write(f"Total Votes: {num_votes}\n")
for count in range(len(candidates)):
filewriter.write(f"{candidates[count]}: {percentages[count]}% ({vote_counts[count]})\n")
filewriter.write("---------------------------\n")
filewriter.write(f"Winner: {winner}\n")
filewriter.write("---------------------------\n")
_________________________________________________________________________________________________________________________________________________________________________
Election Results
-------------------------
Total Votes: 3521001
-------------------------
Khan: 63.000% (2218231)
Correy: 20.000% (704200)
Li: 14.000% (492940)
O'Tooley: 3.000% (105630)
-------------------------
Winner: Khan
-------------------------
| 3.578125
| 4
|
alembic/versions/2019102221_add_shared_file_system_column__75d4288ae265.py
|
kl-chou/codalab-worksheets
| 236
|
12774462
|
<filename>alembic/versions/2019102221_add_shared_file_system_column__75d4288ae265.py
"""Add shared-file-system column to workers
Revision ID: 75d4288ae265
Revises: <PASSWORD>
Create Date: 2019-10-22 21:05:26.580918
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = 'd0dd45f443b6'
def upgrade():
op.add_column('worker', sa.Column('shared_file_system', sa.Boolean(), nullable=False, server_default='0'))
def downgrade():
op.drop_column('worker', 'shared_file_system')
| 1.296875
| 1
|
buildwatch/main.py
|
InvictrixRom/updater_server
| 0
|
12774463
|
<reponame>InvictrixRom/updater_server
import inotify.adapters
import time
import hashlib
import json
import re
import os
import zipfile
import datetime
def load_json():
with open('/root/builds.json') as json_data:
global builds
builds = json.load(json_data)
def save_json():
with open('/root/builds.json', 'w') as json_data:
json.dump(builds, json_data, sort_keys=True, indent=4)
def add_build(path, name):
try:
with zipfile.ZipFile(path, 'r') as update_zip:
build_prop = update_zip.read('system/build.prop').decode('utf-8')
timestamp = int(re.findall('ro.build.date.utc=([0-9]+)', build_prop)[0])
version = re.findall('ro.invictrix.build.version=([a-zA-Z0-9\-\_\.]+)', build_prop)[0]
buildtype = re.findall('ro.invictrix.buildtype=([a-zA-Z0-9\-\_\.]+)', build_prop)[0]
device = re.findall('ro.invictrix.device=([a-zA-Z0-9\-\_\.]+)', build_prop)[0]
except Exception as e:
print(e)
timestamp = 1
version = None
buildtype = None
device = "??"
buildinfo = {}
buildinfo['sha256'] = sha256_checksum(path)
buildinfo['sha1'] = sha1_checksum(path)
buildinfo['size'] = os.path.getsize(path)
buildinfo['date'] = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d')
buildinfo['datetime'] = timestamp
buildinfo['filename'] = name
buildinfo['filepath'] = 'builds/full/{}/{}'.format(device, name)
buildinfo['version'] = version
buildinfo['type'] = buildtype
buildinfo['incremental'] = False
builds[device].append(buildinfo)
save_json()
def sha256_checksum(filename, block_size=257152):
sha256 = hashlib.sha256()
with open(filename, 'rb') as f:
for block in iter(lambda: f.read(block_size), b''):
sha256.update(block)
return sha256.hexdigest()
def sha1_checksum(filename, block_size=257152):
sha1 = hashlib.sha1()
with open(filename, 'rb') as f:
for block in iter(lambda: f.read(block_size), b''):
sha1.update(block)
return sha1.hexdigest()
def _main():
try:
for event in i.event_gen():
(_, type_names, path, filename) = event
if type_names[0] == "IN_CLOSE_WRITE":
filePath = "{}/{}".format(path, filename)
add_build(filePath, filename)
except Exception as e:
print(e)
if __name__ == '__main__':
global i
i = inotify.adapters.InotifyTree('/var/www/html/builds')
load_json()
try:
while True:
_main()
time.sleep(5)
except KeyboardInterrupt:
print("Exiting")
| 2.1875
| 2
|
odk2stata/gui/worker.py
|
PMA-2020/odk2stata
| 2
|
12774464
|
import os.path
import threading
import wx
from ..dofile.do_file_collection import DoFileCollection
EVT_COMPLETE_ID = wx.NewId()
def evt_complete(win, func):
win.Connect(-1, -1, EVT_COMPLETE_ID, func)
class CompleteEvent(wx.PyEvent):
def __init__(self, message, success):
super().__init__()
self.SetEventType(EVT_COMPLETE_ID)
self.message = message
self.success = success
class Worker(threading.Thread):
def __init__(self, panel, xlsform_path, settings_path, output_dir):
super().__init__()
self.panel = panel
self.xlsform_path = xlsform_path
self.settings_path = settings_path
self.filename = os.path.splitext(os.path.basename(xlsform_path))[0] + '.do'
self.output_path = os.path.join(output_dir, self.filename)
def run(self):
do_files = DoFileCollection.from_file(self.xlsform_path,
settings_path=self.settings_path)
do_files.write_out(self.output_path)
message = f'Do file saved to "{self.output_path}"\n'
wx.PostEvent(self.panel, CompleteEvent(message, True))
| 2.390625
| 2
|
pubgate/utils/user.py
|
UndeadBeast/pubgate
| 0
|
12774465
|
import asyncio
from pubgate.crypto.key import get_key
from pubgate.utils.networking import deliver
class UserUtils:
@property
def key(self):
return get_key(self.uri)
@property
def following(self): return f"{self.uri}/following"
@property
def followers(self): return f"{self.uri}/followers"
@property
def inbox(self): return f"{self.uri}/inbox"
@property
def outbox(self): return f"{self.uri}/outbox"
async def forward_to_followers(self, activity):
recipients = await self.followers_get()
try:
recipients.remove(activity["actor"])
except ValueError:
pass
asyncio.ensure_future(deliver(self.key, activity, recipients))
| 2.15625
| 2
|
tests/img_metadata_lib/test_image.py
|
Austin-Schmidli/Image-Metadata-API
| 0
|
12774466
|
import pytest
from tests.tools.tools import load_test_images
from img_metadata_lib.image import fetch_image
from img_metadata_lib.image import extract_metadata
@pytest.fixture(params=load_test_images())
def image(request):
return request.param
def test_extract_metadata_returns_dict(image):
assert isinstance(extract_metadata(image), dict)
| 2.0625
| 2
|
v2x_solution/road/models.py
|
Michaelwwgo/V2X_Project
| 1
|
12774467
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from v2x_solution.users import models as user_models
@python_2_unicode_compatible
class TimeStampedModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
@python_2_unicode_compatible
class Road(TimeStampedModel):
""" Road Model """
name = models.CharField(max_length=140)
location = models.CharField(max_length=140)
speed = models.SmallIntegerField(null=True)
def __str__(self):
return '{} - {}'.format(self.name, self.location)
@python_2_unicode_compatible
class Situation(TimeStampedModel):
""" Situation Model """
road = models.ForeignKey(Road, null=True, on_delete=models.CASCADE)
isimpassable = models.BooleanField()
message = models.CharField(max_length=140)
startTime = models.DateTimeField()
endTime = models.DateTimeField()
creator = models.ForeignKey(user_models.User, null=True, on_delete=models.CASCADE)
def __str__(self):
return '{} - {}'.format(self.isimpassable, self.message)
| 2.453125
| 2
|
tests/linalg_symmetrize.py
|
aroig/nnutil2
| 0
|
12774468
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# nnutil2 - Tensorflow utilities for training neural networks
# Copyright (c) 2019, <NAME> <<EMAIL>>
#
# This file is part of 'nnutil2'.
#
# This file may be modified and distributed under the terms of the 3-clause BSD
# license. See the LICENSE file for details.
import unittest
import tensorflow as tf
import nnutil2 as nnu
class LinalgSymmetrize(tf.test.TestCase):
def setUp(self):
pass
def test_linalg_symmetrize_1(self):
N = 32
batch_size = 4
shape = (batch_size, N, N)
A = tf.random.normal(shape=shape)
A_sym = nnu.linalg.symmetrize(A, axis=[-1, -2])
self.assertAllClose(A_sym, tf.linalg.matrix_transpose(A_sym))
A_sym2 = nnu.linalg.symmetrize(A_sym, axis=[-1, -2])
self.assertAllClose(A_sym, A_sym2)
def test_linalg_antisymmetrize_1(self):
N = 32
batch_size = 4
shape = (batch_size, N, N)
A = tf.random.normal(shape=shape)
A_ant = nnu.linalg.antisymmetrize(A, axis=[-1, -2])
self.assertAllClose(A_ant, -tf.linalg.matrix_transpose(A_ant))
A_ant2 = nnu.linalg.antisymmetrize(A_ant, axis=[-1, -2])
self.assertAllClose(A_ant, A_ant2)
if __name__ == '__main__':
tf.test.main()
| 2.65625
| 3
|
diversos/dicionario.py
|
lcarlin/guppe
| 1
|
12774469
|
<reponame>lcarlin/guppe<gh_stars>1-10
dicionario_sites = {"Diego": "diegomariano.com"}
print(dicionario_sites['Diego'])
dicionario_sites = {"Diego": "diegomariano.com", "Google": "google.com", "Udemy": "udemy.com", "<NAME>" : "luizcarlin.com.br"}
print ("-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=")
for chave in dicionario_sites:
print (chave + " -:- " +dicionario_sites[chave])
print(dicionario_sites[chave])
print ("-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=")
for i in dicionario_sites.items():
print(i)
print ("-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=")
for i in dicionario_sites.values():
print(i)
print ("-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=-=+=")
for i in dicionario_sites.keys():
print(i)
| 3.671875
| 4
|
confme/core/env_overwrite.py
|
iwanbolzern/ConfMe
| 21
|
12774470
|
import os
from pydantic.main import ModelMetaclass
from confme.utils.dict_util import flatten, InfiniteDict
from confme.utils.typing import get_schema
def env_overwrite(config_cls: ModelMetaclass):
# extract possible parameters
config_dict = get_schema(config_cls)
parameters, _ = flatten(config_dict)
# make env variables case insensitive
keys, values = zip(*os.environ.items())
keys = [k.casefold() for k in keys]
# find passed arguments and fill it into the dict structure
infinite_dict = InfiniteDict()
for p in parameters:
if p.casefold() in keys:
i = keys.index(p.casefold())
infinite_dict.expand(p.split('.'), values[i])
return infinite_dict
| 2.5
| 2
|
pRestore/worker.py
|
snaiperskaya96/pRestore
| 0
|
12774471
|
import subprocess
from threading import Thread
import file_handler
class Worker(Thread):
def __init__(self, directory, parent):
Thread.__init__(self)
self.daemon = True
self.directory = directory
self.parent = parent
self.done = False
def run(self):
process = subprocess.Popen(['ls', '-lah', self.directory], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.communicate()[0].split('\n')
if process.returncode != 0:
self.done = True
else:
self.send_to_file(output)
for line in output[3:]:
f = file_handler.File(line)
if f.is_directory and not f.is_empty:
self.parent.add_to_queue(self.directory + f.name)
self.done = True
def send_to_file(self, output):
if len(output) < 3:
return
first_file = file_handler.File(output[3])
first_file.path = self.directory + first_file.name
files = [first_file]
should_use_wildcard = True
permissions = first_file.permissions + first_file.owner + first_file.group
for line in output[3:]:
f = file_handler.File(line)
f.path = self.directory + f.name
if f.name == '':
continue
files.append(f)
if permissions != f.permissions + f.owner + f.group:
should_use_wildcard = False
if should_use_wildcard:
first_file.path = self.directory + '*'
self.parent.write(first_file)
else:
for f in files:
self.parent.write(f)
| 2.8125
| 3
|
code_files/Diabetes_app.py
|
zuz201/Diabetes
| 0
|
12774472
|
<reponame>zuz201/Diabetes
# -*- coding: utf-8 -*-
import pandas as pd
import streamlit as st
from PIL import Image
import streamlit.components.v1 as components
import pickle
from joblib import load
import codecs
import matplotlib.pyplot as plt
import seaborn as sns
#The best model loaded
with open("C:/Users/zuzan/Documents/Diabetes/Diabetes/notebooks/model.pkl", 'rb') as file:
model = pickle.load(file)
file.close()
#Standard Scaler model loaded
sc = load("C:/Users/zuzan/Documents/Diabetes/Diabetes/notebooks/scaler.joblib")
#Function to display weetviz report
def display_sweetviz(report_html, width = 1000, height=1000):
report_file = codecs.open(report_html, 'r')
page = report_file.read()
components.html(page, width = width, height = height, scrolling = True)
#Function to display histogram. Function return created figure.
def hist_display(data,user_data, i):
X = data.iloc[:,i]
fig= plt.figure(figsize=(9,9))
sns.set_style('darkgrid')
ax = sns.distplot(X, vertical = True)
low_border = ax.containers[0][0].get_xy()[1]
for bar in ax.containers[0]:
if user_data.iloc[:,i].values[0] <= (low_border + bar.get_height()) and user_data.iloc[:,i].values[0] >=low_border:
bar.set_color('crimson')
low_border = low_border + bar.get_height()
return fig
#Function to get data inputed by user. Function return user data saved as Data Frame.
def get_imput():
age = st.sidebar.slider('Age', 21, 81, 21)
weight = st.sidebar.slider('Weight (kg)', 40,120,55)
height = st.sidebar.slider('Height (m)',1.4, 2.2, 1.75)
pregnancies = st.sidebar.slider('Number of Pregnancies', 0, 17, 1)
glucose = st.sidebar.slider('Plasma glucose concentration', 5, 200, 120)
blood = st.sidebar.slider('Blood Pressure (mm Hg)', 5, 130, 70)
skin = st.sidebar.slider('SkinThickness (mm)', 1, 60, 20)
insulin = st.sidebar.slider('2-Hour serum insulin (mu U/ml)', 1, 900, 80)
DiabetesPedigreeFunction = st.sidebar.slider('Diabetes Pedigree Function', 0.0, 3.0, 0.5)
data_from_user = {'Pregnancies': pregnancies,
'Glucose': glucose,
'BloodPressure': blood,
'SkinThickness': skin,
'Insulin': insulin,
'BMI': weight/(height)**2,
'DiabetesPedigreeFunction': DiabetesPedigreeFunction,
'Age': age,
}
user_features = pd.DataFrame(data_from_user, index =[0])
return user_features
# Function to create windowns of appliaction
def main():
#Created menu
menu = ["Home", "What is Diabetes?","Test", "Statistics"]
choice = st.sidebar.selectbox("Menu", menu)
# Home view (deafult view)
if choice == "Home":
html_temp = """
<div style = "background-color: rgb(153, 204, 255);padding:10px;border-radius:10px">
<h1 style = "color:white; text-align:center;">DIABETES DETECTION APPLICATION
</div>
"""
components.html(html_temp)
col1, col2 = st.beta_columns(2)
with col1:
image = Image.open('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/images/diabetes.png')
s = 600,600
image.thumbnail(s)
st.image(image)
with col2:
image = Image.open('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/images/heart.png')
s = 600,600
image.thumbnail(s)
st.image(image)
# "What is Diabetes?" - vido with short intro
elif choice == "What is Diabetes?":
html_temp4 = """
<div style = "background-color: rgb(153, 204, 255);padding:10px;border-radius:10px">
<h1 style = "color:white; text-align:center;">What is Diabetes?
</div>
"""
components.html(html_temp4)
st.video("https://youtu.be/ObpeolfZMPs")
# Test view. User can input values and check if she might be suffering from Diabetes.
elif choice == "Test":
user_param = get_imput()
X = sc.transform(user_param)
prediction = 9
if st.sidebar.button("Test your result"):
prediction = model.predict(X)
else:
prediction = 9
html_temp2 = """
<div style = "background-color: rgb(153, 204, 255);padding:10px;border-radius:10px">
<h1 style = "color:white; text-align:center;">Parameters
</div>
"""
html_temp3 = """
<div style = "background-color: #ccccff;padding:10px;border-radius:10px">
<h1 style = "color:white; text-align:center;">Negative Result
</div>
"""
html_temp5 = """
<div style = "background-color: rgb(255, 102, 102);padding:10px;border-radius:10px">
<h1 style = "color:white; text-align:center;">Positive Result
</div>
"""
if prediction == 0.0:
components.html(html_temp3)
st.subheader("You are not supposed to suffer from Diabetes.")
st.write("")
col1, col2 = st.beta_columns([3,1])
with col1:
with st.beta_expander("Prevention"):
with st.beta_container():
st.write("Remember about prevention. At present, type 1 diabetes cannot be prevented, but there are a number of factors that influence the development of type 2 diabetes")
st.write("Studies from different parts of the world have established that lifestyle modification with physical activity and/or healthy diet can delay or prevent the onset of type 2 diabetes.")
st.write("Taking a life course perspective is essential for preventing type 2 diabetes and its complications.")
with st.beta_expander("Recommendations"):
with st.beta_container():
st.write("IDF recommendations for a healthy diet for the general population")
st.write("1. Choosing water, coffee or tea instead of fruit juice, soda, or other sugar sweetened beverages.")
st.write("2. Eating at least three servings of vegetable every day, including green leafy vegetables.")
st.write("3. Eating up to three servings of fresh fruit every day.")
st.write("4. Limiting alcohol intake to a maximum of two standard drinks per day.")
st.write("5. Choosing lean cuts of white meat, poultry or seafood instead of red or processed meat.")
st.write("6. Choosing peanut butter instead of chocolate spread or jam.")
st.write("7. Choosing whole-grain bread, rice, or pasta instead of white bread, rice, or pasta.")
st.write("8. Choosing unsaturated fats (olive oil, canola oil, corn oil, or sunflower oil) instead of saturated fats (butter, ghee, animal fat, coconut oil or palm oil.")
with col2:
image3 = Image.open('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/images/right.png')
s = 200,200
image3.thumbnail(s)
st.image(image3)
elif prediction == 1.0:
components.html(html_temp5)
st.subheader("You might be suffering from Diabetes")
st.write("")
col1, col2 = st.beta_columns([3,1])
with col1:
with st.beta_expander("Types of diabetes"):
with st.beta_container():
st.write("There are three main types of diabetes – type 1, type 2 and gestational.")
st.write("Type 1 diabetes can develop at any age, but occurs most frequently in children and adolescents.")
st.write("Type 2 diabetes is more common in adults and accounts for around 90% of all diabetes cases")
st.write("Gestational diabetes (GDM) is a type of diabetes that consists of high blood glucose during pregnancy and is associated with complications to both mother and child.")
with st.beta_expander("Complications"):
with st.beta_container():
st.write("People with diabetes have an increased risk of developing a number of serious health problems.")
st.write("High blood glucose levels can lead to serious diseases affecting the heart and blood vessels, eyes, kidneys, nerves and teeth.")
st.write("People with diabetes also have a higher risk of developing infections. ")
st.write("diabetes is a leading cause of cardiovascular disease, blindness, kidney failure, and lower limb amputation.")
st.write("Maintaining blood glucose levels, blood pressure, and cholesterol at or close to normal can help delay or prevent diabetes complications. ")
st.write("People with diabetes need regular monitoring")
with col2:
image3 = Image.open('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/images/wrong.png')
s = 200,200
image3.thumbnail(s)
st.image(image3)
else:
components.html(html_temp2)
col1, col2 = st.beta_columns(2)
with col1:
with st.beta_container():
st.write("")
st.write("")
st.write("")
st.write("")
st.write("Pregnancies - number of times pregnant")
st.write("Glucose - plasma glucose concentration a 2 hours in an oral glucose tolerance test")
st.write("Blood Pressure - diastolic blood pressure (mm Hg)")
st.write("SkinThickness - triceps skin fold thickness (mm)")
st.write("Insulin - 2-Hour serum insulin (mu U/ml)")
st.write("BMI - body mass index (weight in kg/(height in m)^2)")
st.write("DiabetesPedigreeFunction - diabetes pedigree function")
st.write("Age - age (years)")
with col2:
image3 = Image.open('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/images/the-world-day-of.png')
s = 600,600
image3.thumbnail(s)
st.image(image3)
# Statistic view was created to check distribution of each variable and can compare user result with others.
else:
html_temp6 = """
<div style = "background-color: rgb(153, 204, 255);padding:10px;border-radius:10px">
<h1 style = "color:white; text-align:center;">Statistics
</div>
"""
components.html(html_temp6)
if st.sidebar.button("Show Report") and not st.sidebar.button("Show charts"):
temp = 1
else:
temp = 0
if temp == 1:
display_sweetviz('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/SWEETVIZ_REPORT.html')
else:
data_file = pd.read_csv('C:/Users/zuzan/Documents/Diabetes/Diabetes/code_files/data/export_dataframe.csv')
user_param2 = get_imput()
charts_number = len(data_file.columns) - 1
cols = st.beta_columns(4)
for i in range(charts_number):
if i < 4:
with cols[i]:
st.write(data_file.columns[i])
st.pyplot(hist_display(data_file, user_param2, i))
else:
with cols[i - 4]:
st.write(data_file.columns[i])
st.pyplot(hist_display(data_file, user_param2, i))
if __name__ =='__main__':
main()
| 2.96875
| 3
|
SentiWordNetforsentiment.py
|
dienhuynhphong/RAM-W
| 1
|
12774473
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
import codes
def ReadFileSentiWordNet(filename):
senti_word = []
senti_pos = []
senti_neg = []
file = codes.open(filename,'r','utf-8')
full_data = file.read().splitlines()
for i in range(len(full_data)): # Với mỗi dòng trong sentiwordnet
columns = full_data[i].split('\t')
words = columns[4].split(' ')
# Xét mỗi từ
for i in range(len(words)):
# Bỏ 2 ký tự cuối
words[i] = words[i][:-2]
# Xét coi có trong senti_word chưa, nếu chưa có thêm vào
if (words[i] not in senti_word):
senti_word.append(words[i])
senti_pos.append(float(columns[2]))
senti_neg.append(float(columns[3]))
return senti_word,senti_pos,senti_neg
if __name__ == "__main__":
# Load file SentiWordNet lên
print "Loading SentiWordNet"
senti_words, senti_pos, senti_neg = ReadFileSentiWordNet('SentiWordNet_3.0.0.txt')
with open('sentiwordnet_en.txt', 'w') as f:
for w in senti_words:
f.write('%s\n' % w.encode('utf-8'))
| 2.84375
| 3
|
scripts/extract_reads_from_fastq.py
|
rlorigro/overlap_analysis
| 0
|
12774474
|
from modules.Fastx import *
from subprocess import run
import argparse
import struct
import mmap
import sys
import os
def load_query_ids(query_ids_path):
ids = list()
with open(query_ids_path, 'r') as file:
for line in file:
if line == '\n':
continue
ids.append(line.strip())
return ids
def main(fastq_path, query_ids_path):
faidx_path = build_index(fastq_path)
name_to_offset, index_elements = load_fastq_index(faidx_path=faidx_path)
queries = load_query_ids(query_ids_path=query_ids_path)
output_path = os.path.splitext(fastq_path)[0] + "_" + os.path.splitext(os.path.basename(query_ids_path))[0] + ".fastq"
sys.stderr.write("Writing to: " + output_path + '\n')
with open(fastq_path, 'rb') as input_file, open(output_path, 'wb') as output_file:
mm = mmap.mmap(input_file.fileno(), 0, prot=mmap.PROT_READ)
for name in queries:
print("fetching %s" % name)
if name in name_to_offset:
offset_index = name_to_offset[name]
else:
exit("ERROR: read name not found in fastq index")
if offset_index < len(index_elements):
index_element = index_elements[offset_index]
else:
exit("ERROR: attempted to access fastq index element " + offset_index + " which is greater than the "
"size of the list of indexes")
s = extract_bytes_from_file(mmap_file_object=mm,
offset=index_element.sequence_offset,
n_bytes=index_element.length)
q = extract_bytes_from_file(mmap_file_object=mm,
offset=index_element.quality_offset,
n_bytes=index_element.length)
output_file.write(b'@')
output_file.write(name.encode('utf-8'))
output_file.write(b'\n')
output_file.write(s)
output_file.write(b'\n')
output_file.write(b'+')
output_file.write(b'\n')
output_file.write(q)
output_file.write(b'\n')
return
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--fastq",
type=str,
required=True,
help="path of file containing FASTA/FASTQ sequence"
)
parser.add_argument(
"--ids",
type=str,
required=True,
help="path of file containing 1 id per line to be queried"
)
args = parser.parse_args()
main(
fastq_path=args.fastq,
query_ids_path=args.ids,
)
| 2.640625
| 3
|
src/test/sharestore_lib/admin/ttypes.py
|
daimashusheng/SHAREdis
| 0
|
12774475
|
<reponame>daimashusheng/SHAREdis
#
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from thrift.transport import TTransport
all_structs = []
class AdminErrorCode(object):
DB_NOT_FOUND = 1
DB_EXIST = 2
INVALID_DB_ROLE = 3
INVALID_UPSTREAM = 4
DB_ADMIN_ERROR = 5
DB_ERROR = 6
_VALUES_TO_NAMES = {
1: "DB_NOT_FOUND",
2: "DB_EXIST",
3: "INVALID_DB_ROLE",
4: "INVALID_UPSTREAM",
5: "DB_ADMIN_ERROR",
6: "DB_ERROR",
}
_NAMES_TO_VALUES = {
"DB_NOT_FOUND": 1,
"DB_EXIST": 2,
"INVALID_DB_ROLE": 3,
"INVALID_UPSTREAM": 4,
"DB_ADMIN_ERROR": 5,
"DB_ERROR": 6,
}
class DBMetaData(object):
"""
Attributes:
- db_name
- s3_bucket
- s3_path
"""
def __init__(self, db_name=None, s3_bucket=None, s3_path=None,):
self.db_name = db_name
self.s3_bucket = s3_bucket
self.s3_path = s3_path
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.s3_bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.s3_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DBMetaData')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.s3_bucket is not None:
oprot.writeFieldBegin('s3_bucket', TType.STRING, 2)
oprot.writeString(self.s3_bucket.encode('utf-8') if sys.version_info[0] == 2 else self.s3_bucket)
oprot.writeFieldEnd()
if self.s3_path is not None:
oprot.writeFieldBegin('s3_path', TType.STRING, 3)
oprot.writeString(self.s3_path.encode('utf-8') if sys.version_info[0] == 2 else self.s3_path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AdminException(TException):
"""
Attributes:
- message
- errorCode
"""
def __init__(self, message=None, errorCode=None,):
self.message = message
self.errorCode = errorCode
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.errorCode = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AdminException')
if self.message is not None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message)
oprot.writeFieldEnd()
if self.errorCode is not None:
oprot.writeFieldBegin('errorCode', TType.I32, 2)
oprot.writeI32(self.errorCode)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.message is None:
raise TProtocolException(message='Required field message is unset!')
if self.errorCode is None:
raise TProtocolException(message='Required field errorCode is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AddDBRequest(object):
"""
Attributes:
- db_name
- upstream_ip
- overwrite
"""
def __init__(self, db_name=None, upstream_ip=None, overwrite=False,):
self.db_name = db_name
self.upstream_ip = upstream_ip
self.overwrite = overwrite
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.upstream_ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.overwrite = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.upstream_ip is not None:
oprot.writeFieldBegin('upstream_ip', TType.STRING, 2)
oprot.writeString(self.upstream_ip.encode('utf-8') if sys.version_info[0] == 2 else self.upstream_ip)
oprot.writeFieldEnd()
if self.overwrite is not None:
oprot.writeFieldBegin('overwrite', TType.BOOL, 3)
oprot.writeBool(self.overwrite)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.upstream_ip is None:
raise TProtocolException(message='Required field upstream_ip is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AddDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BackupDBRequest(object):
"""
Attributes:
- db_name
- hdfs_backup_dir
- limit_mbs
"""
def __init__(self, db_name=None, hdfs_backup_dir=None, limit_mbs=0,):
self.db_name = db_name
self.hdfs_backup_dir = hdfs_backup_dir
self.limit_mbs = limit_mbs
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.hdfs_backup_dir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.limit_mbs = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BackupDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.hdfs_backup_dir is not None:
oprot.writeFieldBegin('hdfs_backup_dir', TType.STRING, 2)
oprot.writeString(self.hdfs_backup_dir.encode('utf-8') if sys.version_info[0] == 2 else self.hdfs_backup_dir)
oprot.writeFieldEnd()
if self.limit_mbs is not None:
oprot.writeFieldBegin('limit_mbs', TType.I32, 3)
oprot.writeI32(self.limit_mbs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.hdfs_backup_dir is None:
raise TProtocolException(message='Required field hdfs_backup_dir is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BackupDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BackupDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RestoreDBRequest(object):
"""
Attributes:
- db_name
- hdfs_backup_dir
- upstream_ip
- upstream_port
- limit_mbs
- db_role
"""
def __init__(self, db_name=None, hdfs_backup_dir=None, upstream_ip=None, upstream_port=None, limit_mbs=0, db_role=None,):
self.db_name = db_name
self.hdfs_backup_dir = hdfs_backup_dir
self.upstream_ip = upstream_ip
self.upstream_port = upstream_port
self.limit_mbs = limit_mbs
self.db_role = db_role
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.hdfs_backup_dir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.upstream_ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I16:
self.upstream_port = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.limit_mbs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.db_role = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('RestoreDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.hdfs_backup_dir is not None:
oprot.writeFieldBegin('hdfs_backup_dir', TType.STRING, 2)
oprot.writeString(self.hdfs_backup_dir.encode('utf-8') if sys.version_info[0] == 2 else self.hdfs_backup_dir)
oprot.writeFieldEnd()
if self.upstream_ip is not None:
oprot.writeFieldBegin('upstream_ip', TType.STRING, 3)
oprot.writeString(self.upstream_ip.encode('utf-8') if sys.version_info[0] == 2 else self.upstream_ip)
oprot.writeFieldEnd()
if self.upstream_port is not None:
oprot.writeFieldBegin('upstream_port', TType.I16, 4)
oprot.writeI16(self.upstream_port)
oprot.writeFieldEnd()
if self.limit_mbs is not None:
oprot.writeFieldBegin('limit_mbs', TType.I32, 5)
oprot.writeI32(self.limit_mbs)
oprot.writeFieldEnd()
if self.db_role is not None:
oprot.writeFieldBegin('db_role', TType.STRING, 6)
oprot.writeString(self.db_role.encode('utf-8') if sys.version_info[0] == 2 else self.db_role)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.hdfs_backup_dir is None:
raise TProtocolException(message='Required field hdfs_backup_dir is unset!')
if self.upstream_ip is None:
raise TProtocolException(message='Required field upstream_ip is unset!')
if self.upstream_port is None:
raise TProtocolException(message='Required field upstream_port is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RestoreDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('RestoreDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BackupDBToObjectRequest(object):
"""
Attributes:
- db_name
- bucket
- backup_dir
- limit_mbs
- platform
- region
"""
def __init__(self, db_name=None, bucket=None, backup_dir=None, limit_mbs=0, platform=None, region=None,):
self.db_name = db_name
self.bucket = bucket
self.backup_dir = backup_dir
self.limit_mbs = limit_mbs
self.platform = platform
self.region = region
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.backup_dir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.limit_mbs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.platform = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.region = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BackupDBToObjectRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.bucket is not None:
oprot.writeFieldBegin('bucket', TType.STRING, 2)
oprot.writeString(self.bucket.encode('utf-8') if sys.version_info[0] == 2 else self.bucket)
oprot.writeFieldEnd()
if self.backup_dir is not None:
oprot.writeFieldBegin('backup_dir', TType.STRING, 3)
oprot.writeString(self.backup_dir.encode('utf-8') if sys.version_info[0] == 2 else self.backup_dir)
oprot.writeFieldEnd()
if self.limit_mbs is not None:
oprot.writeFieldBegin('limit_mbs', TType.I32, 4)
oprot.writeI32(self.limit_mbs)
oprot.writeFieldEnd()
if self.platform is not None:
oprot.writeFieldBegin('platform', TType.STRING, 5)
oprot.writeString(self.platform.encode('utf-8') if sys.version_info[0] == 2 else self.platform)
oprot.writeFieldEnd()
if self.region is not None:
oprot.writeFieldBegin('region', TType.STRING, 6)
oprot.writeString(self.region.encode('utf-8') if sys.version_info[0] == 2 else self.region)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.bucket is None:
raise TProtocolException(message='Required field bucket is unset!')
if self.backup_dir is None:
raise TProtocolException(message='Required field backup_dir is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BackupDBToObjectResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BackupDBToObjectResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RestoreDBFromObjectRequest(object):
"""
Attributes:
- db_name
- bucket
- backup_dir
- upstream_ip
- upstream_port
- limit_mbs
- db_role
- platform
- region
"""
def __init__(self, db_name=None, bucket=None, backup_dir=None, upstream_ip=None, upstream_port=None, limit_mbs=0, db_role=None, platform=None, region=None,):
self.db_name = db_name
self.bucket = bucket
self.backup_dir = backup_dir
self.upstream_ip = upstream_ip
self.upstream_port = upstream_port
self.limit_mbs = limit_mbs
self.db_role = db_role
self.platform = platform
self.region = region
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.backup_dir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.upstream_ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I16:
self.upstream_port = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.limit_mbs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.db_role = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.platform = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRING:
self.region = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('RestoreDBFromObjectRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.bucket is not None:
oprot.writeFieldBegin('bucket', TType.STRING, 2)
oprot.writeString(self.bucket.encode('utf-8') if sys.version_info[0] == 2 else self.bucket)
oprot.writeFieldEnd()
if self.backup_dir is not None:
oprot.writeFieldBegin('backup_dir', TType.STRING, 3)
oprot.writeString(self.backup_dir.encode('utf-8') if sys.version_info[0] == 2 else self.backup_dir)
oprot.writeFieldEnd()
if self.upstream_ip is not None:
oprot.writeFieldBegin('upstream_ip', TType.STRING, 4)
oprot.writeString(self.upstream_ip.encode('utf-8') if sys.version_info[0] == 2 else self.upstream_ip)
oprot.writeFieldEnd()
if self.upstream_port is not None:
oprot.writeFieldBegin('upstream_port', TType.I16, 5)
oprot.writeI16(self.upstream_port)
oprot.writeFieldEnd()
if self.limit_mbs is not None:
oprot.writeFieldBegin('limit_mbs', TType.I32, 6)
oprot.writeI32(self.limit_mbs)
oprot.writeFieldEnd()
if self.db_role is not None:
oprot.writeFieldBegin('db_role', TType.STRING, 7)
oprot.writeString(self.db_role.encode('utf-8') if sys.version_info[0] == 2 else self.db_role)
oprot.writeFieldEnd()
if self.platform is not None:
oprot.writeFieldBegin('platform', TType.STRING, 8)
oprot.writeString(self.platform.encode('utf-8') if sys.version_info[0] == 2 else self.platform)
oprot.writeFieldEnd()
if self.region is not None:
oprot.writeFieldBegin('region', TType.STRING, 9)
oprot.writeString(self.region.encode('utf-8') if sys.version_info[0] == 2 else self.region)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.bucket is None:
raise TProtocolException(message='Required field bucket is unset!')
if self.backup_dir is None:
raise TProtocolException(message='Required field backup_dir is unset!')
if self.upstream_ip is None:
raise TProtocolException(message='Required field upstream_ip is unset!')
if self.upstream_port is None:
raise TProtocolException(message='Required field upstream_port is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RestoreDBFromObjectResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('RestoreDBFromObjectResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseDBRequest(object):
"""
Attributes:
- db_name
"""
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CloseDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CloseDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CheckDBRequest(object):
"""
Attributes:
- db_name
"""
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CheckDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CheckDBResponse(object):
"""
Attributes:
- seq_num
- wal_ttl_seconds
- last_update_timestamp_ms
- is_master
"""
def __init__(self, seq_num=0, wal_ttl_seconds=0, last_update_timestamp_ms=0, is_master=False,):
self.seq_num = seq_num
self.wal_ttl_seconds = wal_ttl_seconds
self.last_update_timestamp_ms = last_update_timestamp_ms
self.is_master = is_master
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.seq_num = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.wal_ttl_seconds = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.last_update_timestamp_ms = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.is_master = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CheckDBResponse')
if self.seq_num is not None:
oprot.writeFieldBegin('seq_num', TType.I64, 1)
oprot.writeI64(self.seq_num)
oprot.writeFieldEnd()
if self.wal_ttl_seconds is not None:
oprot.writeFieldBegin('wal_ttl_seconds', TType.I64, 2)
oprot.writeI64(self.wal_ttl_seconds)
oprot.writeFieldEnd()
if self.last_update_timestamp_ms is not None:
oprot.writeFieldBegin('last_update_timestamp_ms', TType.I64, 3)
oprot.writeI64(self.last_update_timestamp_ms)
oprot.writeFieldEnd()
if self.is_master is not None:
oprot.writeFieldBegin('is_master', TType.BOOL, 4)
oprot.writeBool(self.is_master)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ChangeDBRoleAndUpstreamRequest(object):
"""
Attributes:
- db_name
- new_role
- upstream_ip
- upstream_port
"""
def __init__(self, db_name=None, new_role=None, upstream_ip=None, upstream_port=None,):
self.db_name = db_name
self.new_role = new_role
self.upstream_ip = upstream_ip
self.upstream_port = upstream_port
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.new_role = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.upstream_ip = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I16:
self.upstream_port = iprot.readI16()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ChangeDBRoleAndUpstreamRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.new_role is not None:
oprot.writeFieldBegin('new_role', TType.STRING, 2)
oprot.writeString(self.new_role.encode('utf-8') if sys.version_info[0] == 2 else self.new_role)
oprot.writeFieldEnd()
if self.upstream_ip is not None:
oprot.writeFieldBegin('upstream_ip', TType.STRING, 3)
oprot.writeString(self.upstream_ip.encode('utf-8') if sys.version_info[0] == 2 else self.upstream_ip)
oprot.writeFieldEnd()
if self.upstream_port is not None:
oprot.writeFieldBegin('upstream_port', TType.I16, 4)
oprot.writeI16(self.upstream_port)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.new_role is None:
raise TProtocolException(message='Required field new_role is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ChangeDBRoleAndUpstreamResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ChangeDBRoleAndUpstreamResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetSequenceNumberRequest(object):
"""
Attributes:
- db_name
"""
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetSequenceNumberRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetSequenceNumberResponse(object):
"""
Attributes:
- seq_num
"""
def __init__(self, seq_num=None,):
self.seq_num = seq_num
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.seq_num = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetSequenceNumberResponse')
if self.seq_num is not None:
oprot.writeFieldBegin('seq_num', TType.I64, 1)
oprot.writeI64(self.seq_num)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.seq_num is None:
raise TProtocolException(message='Required field seq_num is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ClearDBRequest(object):
"""
Attributes:
- db_name
- reopen_db
"""
def __init__(self, db_name=None, reopen_db=True,):
self.db_name = db_name
self.reopen_db = reopen_db
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.reopen_db = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ClearDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.reopen_db is not None:
oprot.writeFieldBegin('reopen_db', TType.BOOL, 2)
oprot.writeBool(self.reopen_db)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ClearDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ClearDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AddS3SstFilesToDBRequest(object):
"""
Attributes:
- db_name
- s3_bucket
- s3_path
- s3_download_limit_mb
- region
- overlapping
- should_compact
- ingest_behind
"""
def __init__(self, db_name=None, s3_bucket=None, s3_path=None, s3_download_limit_mb=64, region=None, overlapping=True, should_compact=True, ingest_behind=False,):
self.db_name = db_name
self.s3_bucket = s3_bucket
self.s3_path = s3_path
self.s3_download_limit_mb = s3_download_limit_mb
self.region = region
self.overlapping = overlapping
self.should_compact = should_compact
self.ingest_behind = ingest_behind
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.s3_bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.s3_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.s3_download_limit_mb = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.region = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.overlapping = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.BOOL:
self.should_compact = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.BOOL:
self.ingest_behind = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddS3SstFilesToDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.s3_bucket is not None:
oprot.writeFieldBegin('s3_bucket', TType.STRING, 2)
oprot.writeString(self.s3_bucket.encode('utf-8') if sys.version_info[0] == 2 else self.s3_bucket)
oprot.writeFieldEnd()
if self.s3_path is not None:
oprot.writeFieldBegin('s3_path', TType.STRING, 3)
oprot.writeString(self.s3_path.encode('utf-8') if sys.version_info[0] == 2 else self.s3_path)
oprot.writeFieldEnd()
if self.s3_download_limit_mb is not None:
oprot.writeFieldBegin('s3_download_limit_mb', TType.I32, 4)
oprot.writeI32(self.s3_download_limit_mb)
oprot.writeFieldEnd()
if self.region is not None:
oprot.writeFieldBegin('region', TType.STRING, 5)
oprot.writeString(self.region.encode('utf-8') if sys.version_info[0] == 2 else self.region)
oprot.writeFieldEnd()
if self.overlapping is not None:
oprot.writeFieldBegin('overlapping', TType.BOOL, 6)
oprot.writeBool(self.overlapping)
oprot.writeFieldEnd()
if self.should_compact is not None:
oprot.writeFieldBegin('should_compact', TType.BOOL, 7)
oprot.writeBool(self.should_compact)
oprot.writeFieldEnd()
if self.ingest_behind is not None:
oprot.writeFieldBegin('ingest_behind', TType.BOOL, 8)
oprot.writeBool(self.ingest_behind)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.s3_bucket is None:
raise TProtocolException(message='Required field s3_bucket is unset!')
if self.s3_path is None:
raise TProtocolException(message='Required field s3_path is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AddS3SstFilesToDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddS3SstFilesToDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DownloadS3SstFilesRequest(object):
"""
Attributes:
- db_name
- s3_bucket
- s3_path
- s3_download_limit_mb
- region
"""
def __init__(self, db_name=None, s3_bucket=None, s3_path=None, s3_download_limit_mb=64, region=None,):
self.db_name = db_name
self.s3_bucket = s3_bucket
self.s3_path = s3_path
self.s3_download_limit_mb = s3_download_limit_mb
self.region = region
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.s3_bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.s3_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.s3_download_limit_mb = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.region = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DownloadS3SstFilesRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.s3_bucket is not None:
oprot.writeFieldBegin('s3_bucket', TType.STRING, 2)
oprot.writeString(self.s3_bucket.encode('utf-8') if sys.version_info[0] == 2 else self.s3_bucket)
oprot.writeFieldEnd()
if self.s3_path is not None:
oprot.writeFieldBegin('s3_path', TType.STRING, 3)
oprot.writeString(self.s3_path.encode('utf-8') if sys.version_info[0] == 2 else self.s3_path)
oprot.writeFieldEnd()
if self.s3_download_limit_mb is not None:
oprot.writeFieldBegin('s3_download_limit_mb', TType.I32, 4)
oprot.writeI32(self.s3_download_limit_mb)
oprot.writeFieldEnd()
if self.region is not None:
oprot.writeFieldBegin('region', TType.STRING, 5)
oprot.writeString(self.region.encode('utf-8') if sys.version_info[0] == 2 else self.region)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.s3_bucket is None:
raise TProtocolException(message='Required field s3_bucket is unset!')
if self.s3_path is None:
raise TProtocolException(message='Required field s3_path is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DownloadS3SstFilesResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DownloadS3SstFilesResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IngestSstsToDBRequest(object):
"""
Attributes:
- db_name
- s3_bucket
- s3_path
"""
def __init__(self, db_name=None, s3_bucket=None, s3_path=None,):
self.db_name = db_name
self.s3_bucket = s3_bucket
self.s3_path = s3_path
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.s3_bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.s3_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('IngestSstsToDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.s3_bucket is not None:
oprot.writeFieldBegin('s3_bucket', TType.STRING, 2)
oprot.writeString(self.s3_bucket.encode('utf-8') if sys.version_info[0] == 2 else self.s3_bucket)
oprot.writeFieldEnd()
if self.s3_path is not None:
oprot.writeFieldBegin('s3_path', TType.STRING, 3)
oprot.writeString(self.s3_path.encode('utf-8') if sys.version_info[0] == 2 else self.s3_path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IngestSstsToDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('IngestSstsToDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StartMessageIngestionRequest(object):
"""
Attributes:
- db_name
- topic_name
- kafka_broker_serverset_path
- replay_timestamp_ms
"""
def __init__(self, db_name=None, topic_name=None, kafka_broker_serverset_path=None, replay_timestamp_ms=None,):
self.db_name = db_name
self.topic_name = topic_name
self.kafka_broker_serverset_path = kafka_broker_serverset_path
self.replay_timestamp_ms = replay_timestamp_ms
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.topic_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.kafka_broker_serverset_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.replay_timestamp_ms = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StartMessageIngestionRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.topic_name is not None:
oprot.writeFieldBegin('topic_name', TType.STRING, 2)
oprot.writeString(self.topic_name.encode('utf-8') if sys.version_info[0] == 2 else self.topic_name)
oprot.writeFieldEnd()
if self.kafka_broker_serverset_path is not None:
oprot.writeFieldBegin('kafka_broker_serverset_path', TType.STRING, 3)
oprot.writeString(self.kafka_broker_serverset_path.encode('utf-8') if sys.version_info[0] == 2 else self.kafka_broker_serverset_path)
oprot.writeFieldEnd()
if self.replay_timestamp_ms is not None:
oprot.writeFieldBegin('replay_timestamp_ms', TType.I64, 4)
oprot.writeI64(self.replay_timestamp_ms)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.topic_name is None:
raise TProtocolException(message='Required field topic_name is unset!')
if self.kafka_broker_serverset_path is None:
raise TProtocolException(message='Required field kafka_broker_serverset_path is unset!')
if self.replay_timestamp_ms is None:
raise TProtocolException(message='Required field replay_timestamp_ms is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StartMessageIngestionResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StartMessageIngestionResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StopMessageIngestionRequest(object):
"""
Attributes:
- db_name
"""
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StopMessageIngestionRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StopMessageIngestionResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StopMessageIngestionResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SetDBOptionsRequest(object):
"""
Attributes:
- options
- db_name
"""
def __init__(self, options=None, db_name=None,):
self.options = options
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.options = {}
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
for _i4 in range(_size0):
_key5 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val6 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.options[_key5] = _val6
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetDBOptionsRequest')
if self.options is not None:
oprot.writeFieldBegin('options', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.options))
for kiter7, viter8 in self.options.items():
oprot.writeString(kiter7.encode('utf-8') if sys.version_info[0] == 2 else kiter7)
oprot.writeString(viter8.encode('utf-8') if sys.version_info[0] == 2 else viter8)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 2)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.options is None:
raise TProtocolException(message='Required field options is unset!')
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SetDBOptionsResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetDBOptionsResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CompactDBRequest(object):
"""
Attributes:
- db_name
"""
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CompactDBRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CompactDBResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CompactDBResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SetDbMetaDataRequest(object):
"""
Attributes:
- db_name
- s3_bucket
- s3_path
"""
def __init__(self, db_name=None, s3_bucket=None, s3_path=None,):
self.db_name = db_name
self.s3_bucket = s3_bucket
self.s3_path = s3_path
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.s3_bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.s3_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetDbMetaDataRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.s3_bucket is not None:
oprot.writeFieldBegin('s3_bucket', TType.STRING, 2)
oprot.writeString(self.s3_bucket.encode('utf-8') if sys.version_info[0] == 2 else self.s3_bucket)
oprot.writeFieldEnd()
if self.s3_path is not None:
oprot.writeFieldBegin('s3_path', TType.STRING, 3)
oprot.writeString(self.s3_path.encode('utf-8') if sys.version_info[0] == 2 else self.s3_path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SetDbMetaDataResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetDbMetaDataResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetDbMetaDataRequest(object):
"""
Attributes:
- db_name
"""
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetDbMetaDataRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetDbMetaDataResponse(object):
"""
Attributes:
- db_name
- s3_bucket
- s3_path
"""
def __init__(self, db_name=None, s3_bucket=None, s3_path=None,):
self.db_name = db_name
self.s3_bucket = s3_bucket
self.s3_path = s3_path
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.s3_bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.s3_path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetDbMetaDataResponse')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.s3_bucket is not None:
oprot.writeFieldBegin('s3_bucket', TType.STRING, 2)
oprot.writeString(self.s3_bucket.encode('utf-8') if sys.version_info[0] == 2 else self.s3_bucket)
oprot.writeFieldEnd()
if self.s3_path is not None:
oprot.writeFieldBegin('s3_path', TType.STRING, 3)
oprot.writeString(self.s3_path.encode('utf-8') if sys.version_info[0] == 2 else self.s3_path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CheckLoadsstRouterRequest(object):
"""
Attributes:
- segment
- version
- partitions_count
- replicas_count
"""
def __init__(self, segment=None, version=None, partitions_count=None, replicas_count=None,):
self.segment = segment
self.version = version
self.partitions_count = partitions_count
self.replicas_count = replicas_count
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.segment = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.version = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.partitions_count = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.replicas_count = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CheckLoadsstRouterRequest')
if self.segment is not None:
oprot.writeFieldBegin('segment', TType.STRING, 1)
oprot.writeString(self.segment.encode('utf-8') if sys.version_info[0] == 2 else self.segment)
oprot.writeFieldEnd()
if self.version is not None:
oprot.writeFieldBegin('version', TType.I32, 2)
oprot.writeI32(self.version)
oprot.writeFieldEnd()
if self.partitions_count is not None:
oprot.writeFieldBegin('partitions_count', TType.I32, 3)
oprot.writeI32(self.partitions_count)
oprot.writeFieldEnd()
if self.replicas_count is not None:
oprot.writeFieldBegin('replicas_count', TType.I32, 4)
oprot.writeI32(self.replicas_count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.segment is None:
raise TProtocolException(message='Required field segment is unset!')
if self.version is None:
raise TProtocolException(message='Required field version is unset!')
if self.partitions_count is None:
raise TProtocolException(message='Required field partitions_count is unset!')
if self.replicas_count is None:
raise TProtocolException(message='Required field replicas_count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CheckLoadsstRouterResponse(object):
"""
Attributes:
- status
"""
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.status = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CheckLoadsstRouterResponse')
if self.status is not None:
oprot.writeFieldBegin('status', TType.BOOL, 1)
oprot.writeBool(self.status)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class MigrateSSTToObjectRequest(object):
"""
Attributes:
- db_name
- bucket
- backup_dir
- limit_mbs
- platform
- region
"""
def __init__(self, db_name=None, bucket=None, backup_dir=None, limit_mbs=0, platform=None, region=None,):
self.db_name = db_name
self.bucket = bucket
self.backup_dir = backup_dir
self.limit_mbs = limit_mbs
self.platform = platform
self.region = region
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.bucket = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.backup_dir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.limit_mbs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.platform = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.region = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('MigrateSSTToObjectRequest')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name.encode('utf-8') if sys.version_info[0] == 2 else self.db_name)
oprot.writeFieldEnd()
if self.bucket is not None:
oprot.writeFieldBegin('bucket', TType.STRING, 2)
oprot.writeString(self.bucket.encode('utf-8') if sys.version_info[0] == 2 else self.bucket)
oprot.writeFieldEnd()
if self.backup_dir is not None:
oprot.writeFieldBegin('backup_dir', TType.STRING, 3)
oprot.writeString(self.backup_dir.encode('utf-8') if sys.version_info[0] == 2 else self.backup_dir)
oprot.writeFieldEnd()
if self.limit_mbs is not None:
oprot.writeFieldBegin('limit_mbs', TType.I32, 4)
oprot.writeI32(self.limit_mbs)
oprot.writeFieldEnd()
if self.platform is not None:
oprot.writeFieldBegin('platform', TType.STRING, 5)
oprot.writeString(self.platform.encode('utf-8') if sys.version_info[0] == 2 else self.platform)
oprot.writeFieldEnd()
if self.region is not None:
oprot.writeFieldBegin('region', TType.STRING, 6)
oprot.writeString(self.region.encode('utf-8') if sys.version_info[0] == 2 else self.region)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.db_name is None:
raise TProtocolException(message='Required field db_name is unset!')
if self.bucket is None:
raise TProtocolException(message='Required field bucket is unset!')
if self.backup_dir is None:
raise TProtocolException(message='Required field backup_dir is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class MigrateSSTToObjectResponse(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('MigrateSSTToObjectResponse')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(DBMetaData)
DBMetaData.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 's3_bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 's3_path', 'UTF8', None, ), # 3
)
all_structs.append(AdminException)
AdminException.thrift_spec = (
None, # 0
(1, TType.STRING, 'message', 'UTF8', None, ), # 1
(2, TType.I32, 'errorCode', None, None, ), # 2
)
all_structs.append(AddDBRequest)
AddDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'upstream_ip', 'UTF8', None, ), # 2
(3, TType.BOOL, 'overwrite', None, False, ), # 3
)
all_structs.append(AddDBResponse)
AddDBResponse.thrift_spec = (
)
all_structs.append(BackupDBRequest)
BackupDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'hdfs_backup_dir', 'UTF8', None, ), # 2
(3, TType.I32, 'limit_mbs', None, 0, ), # 3
)
all_structs.append(BackupDBResponse)
BackupDBResponse.thrift_spec = (
)
all_structs.append(RestoreDBRequest)
RestoreDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'hdfs_backup_dir', 'UTF8', None, ), # 2
(3, TType.STRING, 'upstream_ip', 'UTF8', None, ), # 3
(4, TType.I16, 'upstream_port', None, None, ), # 4
(5, TType.I32, 'limit_mbs', None, 0, ), # 5
(6, TType.STRING, 'db_role', 'UTF8', None, ), # 6
)
all_structs.append(RestoreDBResponse)
RestoreDBResponse.thrift_spec = (
)
all_structs.append(BackupDBToObjectRequest)
BackupDBToObjectRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 'backup_dir', 'UTF8', None, ), # 3
(4, TType.I32, 'limit_mbs', None, 0, ), # 4
(5, TType.STRING, 'platform', 'UTF8', None, ), # 5
(6, TType.STRING, 'region', 'UTF8', None, ), # 6
)
all_structs.append(BackupDBToObjectResponse)
BackupDBToObjectResponse.thrift_spec = (
)
all_structs.append(RestoreDBFromObjectRequest)
RestoreDBFromObjectRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 'backup_dir', 'UTF8', None, ), # 3
(4, TType.STRING, 'upstream_ip', 'UTF8', None, ), # 4
(5, TType.I16, 'upstream_port', None, None, ), # 5
(6, TType.I32, 'limit_mbs', None, 0, ), # 6
(7, TType.STRING, 'db_role', 'UTF8', None, ), # 7
(8, TType.STRING, 'platform', 'UTF8', None, ), # 8
(9, TType.STRING, 'region', 'UTF8', None, ), # 9
)
all_structs.append(RestoreDBFromObjectResponse)
RestoreDBFromObjectResponse.thrift_spec = (
)
all_structs.append(CloseDBRequest)
CloseDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
)
all_structs.append(CloseDBResponse)
CloseDBResponse.thrift_spec = (
)
all_structs.append(CheckDBRequest)
CheckDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
)
all_structs.append(CheckDBResponse)
CheckDBResponse.thrift_spec = (
None, # 0
(1, TType.I64, 'seq_num', None, 0, ), # 1
(2, TType.I64, 'wal_ttl_seconds', None, 0, ), # 2
(3, TType.I64, 'last_update_timestamp_ms', None, 0, ), # 3
(4, TType.BOOL, 'is_master', None, False, ), # 4
)
all_structs.append(ChangeDBRoleAndUpstreamRequest)
ChangeDBRoleAndUpstreamRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'new_role', 'UTF8', None, ), # 2
(3, TType.STRING, 'upstream_ip', 'UTF8', None, ), # 3
(4, TType.I16, 'upstream_port', None, None, ), # 4
)
all_structs.append(ChangeDBRoleAndUpstreamResponse)
ChangeDBRoleAndUpstreamResponse.thrift_spec = (
)
all_structs.append(GetSequenceNumberRequest)
GetSequenceNumberRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
)
all_structs.append(GetSequenceNumberResponse)
GetSequenceNumberResponse.thrift_spec = (
None, # 0
(1, TType.I64, 'seq_num', None, None, ), # 1
)
all_structs.append(ClearDBRequest)
ClearDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.BOOL, 'reopen_db', None, True, ), # 2
)
all_structs.append(ClearDBResponse)
ClearDBResponse.thrift_spec = (
)
all_structs.append(AddS3SstFilesToDBRequest)
AddS3SstFilesToDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 's3_bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 's3_path', 'UTF8', None, ), # 3
(4, TType.I32, 's3_download_limit_mb', None, 64, ), # 4
(5, TType.STRING, 'region', 'UTF8', None, ), # 5
(6, TType.BOOL, 'overlapping', None, True, ), # 6
(7, TType.BOOL, 'should_compact', None, True, ), # 7
(8, TType.BOOL, 'ingest_behind', None, False, ), # 8
)
all_structs.append(AddS3SstFilesToDBResponse)
AddS3SstFilesToDBResponse.thrift_spec = (
)
all_structs.append(DownloadS3SstFilesRequest)
DownloadS3SstFilesRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 's3_bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 's3_path', 'UTF8', None, ), # 3
(4, TType.I32, 's3_download_limit_mb', None, 64, ), # 4
(5, TType.STRING, 'region', 'UTF8', None, ), # 5
)
all_structs.append(DownloadS3SstFilesResponse)
DownloadS3SstFilesResponse.thrift_spec = (
)
all_structs.append(IngestSstsToDBRequest)
IngestSstsToDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 's3_bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 's3_path', 'UTF8', None, ), # 3
)
all_structs.append(IngestSstsToDBResponse)
IngestSstsToDBResponse.thrift_spec = (
)
all_structs.append(StartMessageIngestionRequest)
StartMessageIngestionRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'topic_name', 'UTF8', None, ), # 2
(3, TType.STRING, 'kafka_broker_serverset_path', 'UTF8', None, ), # 3
(4, TType.I64, 'replay_timestamp_ms', None, None, ), # 4
)
all_structs.append(StartMessageIngestionResponse)
StartMessageIngestionResponse.thrift_spec = (
)
all_structs.append(StopMessageIngestionRequest)
StopMessageIngestionRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
)
all_structs.append(StopMessageIngestionResponse)
StopMessageIngestionResponse.thrift_spec = (
)
all_structs.append(SetDBOptionsRequest)
SetDBOptionsRequest.thrift_spec = (
None, # 0
(1, TType.MAP, 'options', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
(2, TType.STRING, 'db_name', 'UTF8', None, ), # 2
)
all_structs.append(SetDBOptionsResponse)
SetDBOptionsResponse.thrift_spec = (
)
all_structs.append(CompactDBRequest)
CompactDBRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
)
all_structs.append(CompactDBResponse)
CompactDBResponse.thrift_spec = (
)
all_structs.append(SetDbMetaDataRequest)
SetDbMetaDataRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 's3_bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 's3_path', 'UTF8', None, ), # 3
)
all_structs.append(SetDbMetaDataResponse)
SetDbMetaDataResponse.thrift_spec = (
)
all_structs.append(GetDbMetaDataRequest)
GetDbMetaDataRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
)
all_structs.append(GetDbMetaDataResponse)
GetDbMetaDataResponse.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 's3_bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 's3_path', 'UTF8', None, ), # 3
)
all_structs.append(CheckLoadsstRouterRequest)
CheckLoadsstRouterRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'segment', 'UTF8', None, ), # 1
(2, TType.I32, 'version', None, None, ), # 2
(3, TType.I32, 'partitions_count', None, None, ), # 3
(4, TType.I32, 'replicas_count', None, None, ), # 4
)
all_structs.append(CheckLoadsstRouterResponse)
CheckLoadsstRouterResponse.thrift_spec = (
None, # 0
(1, TType.BOOL, 'status', None, None, ), # 1
)
all_structs.append(MigrateSSTToObjectRequest)
MigrateSSTToObjectRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', 'UTF8', None, ), # 1
(2, TType.STRING, 'bucket', 'UTF8', None, ), # 2
(3, TType.STRING, 'backup_dir', 'UTF8', None, ), # 3
(4, TType.I32, 'limit_mbs', None, 0, ), # 4
(5, TType.STRING, 'platform', 'UTF8', None, ), # 5
(6, TType.STRING, 'region', 'UTF8', None, ), # 6
)
all_structs.append(MigrateSSTToObjectResponse)
MigrateSSTToObjectResponse.thrift_spec = (
)
fix_spec(all_structs)
del all_structs
| 1.84375
| 2
|
main.py
|
SwapiTeamsStudios/AutomodBot
| 0
|
12774476
|
import discord
from discord.ext import commands
import json
import datetime
import re
with open('Percorso specificato (usate il \)', 'r') as settings:
options = json.load(settings)
client = commands.Bot(command_prefix="!")
@client.event
async def on_ready():
print(f"SwapiTeams Automoderation Bot, activated! Loggato come: {client.user}. Ore: {datetime.datetime.utcnow()}. Contact SwapiTeams for support: https://discord.gg/CJ8t5sgBaA. Versione: {options['version']}")
if options['automod'] == True:
print(f"Caricato il settings.json | Automod: Attivato")
elif options['automod'] == False:
print(f"Caricato il settings.json | Automod: Disattivato")
else:
print("Non sono riuscito a caricare il settings.json")
def msg_cont(message, word):
return re.search(fr'\b({word})\b', message) is not None
@client.event
async def on_message(message):
bannedwords = options['bannedwords']
if options['automod'] == True:
if bannedwords != None and (isinstance(message.channel, discord.channel.DMChannel) == False):
for bannedword in bannedwords:
if msg_cont(message.content.lower(), bannedword):
await message.delete()
embed = discord.Embed(
title="Automod",
description=f"{message.author} non puoi scrivere questa parola",
color=0x1d1d1d
)
await message.channel.send(embed=embed)
print(f"{message.author} ha scritto {message.content} in {message.channel}")
await client.process_commands(message)
client.run(f"{options['token']}")
| 2.34375
| 2
|
balena/logs.py
|
amirfuhrmann/balena-sdk-python
| 0
|
12774477
|
<gh_stars>0
from functools import wraps
import json
try: # Python 3 imports
from urllib.parse import urljoin
except ImportError: # Python 2 imports
from urlparse import urljoin
from collections import defaultdict
from threading import Thread
from twisted.internet import reactor, ssl
from twisted.internet.defer import Deferred
from twisted.internet.protocol import Protocol
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from twisted.internet.ssl import ClientContextFactory
from .base_request import BaseRequest
from . import exceptions
from .models.config import Config
from .models.device import Device
from .settings import Settings
class WebClientContextFactory(ClientContextFactory):
"""
This is low level class and is not meant to be used by end users directly.
"""
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
class StreamingParser(Protocol):
"""
This is low level class and is not meant to be used by end users directly.
"""
def __init__(self, callback, error):
self.callback = callback
self.error = error
self.pending = b''
def dataReceived(self, data):
obj = {}
self.pending += data
lines = self.pending.split(b'\n')
self.pending = lines.pop()
for line in lines:
try:
if line:
obj = json.loads(line)
except Exception as e:
self.transport.stopProducing()
self.transport.loseConnection()
if self.error:
self.error(e)
break
self.callback(obj)
def connectionLost(self, reason):
pass
def cbRequest(response, callback, error):
protocol = StreamingParser(callback, error)
response.deliverBody(protocol)
return protocol
def cbDrop(protocol):
protocol.transport.stopProducing()
protocol.transport.loseConnection()
class Subscription(object):
"""
This is low level class and is not meant to be used by end users directly.
"""
def __init__(self):
self.context_factory = WebClientContextFactory()
self.settings = Settings()
def add(self, uuid, callback, error=None, count=None):
query = 'stream=1'
if count:
query = 'stream=1&count={}'.format(count)
url = urljoin(
self.settings.get('api_endpoint'),
'/device/v2/{uuid}/logs?{query}'.format(uuid=uuid, query=query)
)
headers = {}
headers[b'Authorization'] = ['Bearer {:s}'.format(self.settings.get('token')).encode()]
agent = Agent(reactor, self.context_factory)
d = agent.request(b'GET', url.encode(), Headers(headers), None)
d.addCallback(cbRequest, callback, error)
self.run()
return d
def run(self):
if not reactor.running:
Thread(target=reactor.run, args=(False,)).start()
def stop(self, d):
reactor.callFromThread(d.addCallback, cbDrop)
class Logs(object):
"""
This class implements functions that allow processing logs from device.
"""
subscriptions = defaultdict(list)
def __init__(self):
self.base_request = BaseRequest()
self.config = Config()
self.device = Device()
self.settings = Settings()
self.subscription_handler = Subscription()
def __exit__(self, exc_type, exc_value, traceback):
reactor.stop()
def subscribe(self, uuid, callback, error=None, count=None):
"""
Subscribe to device logs.
Args:
uuid (str): device uuid.
callback (function): this callback is called on receiving a message.
error (Optional[function]): this callback is called on an error event.
count (Optional[int]): number of historical messages to include.
Returns:
dict: a log entry will contain the following keys: `isStdErr, timestamp, message, isSystem, createdAt`.
"""
self.device.get(uuid)
self.subscriptions[uuid].append(self.subscription_handler.add(uuid, callback, error, count))
def history(self, uuid, count=None):
"""
Get device logs history.
Args:
uuid (str): device uuid.
count (Optional[int]): this callback is called on receiving a message from the channel.
"""
raw_query = ''
if count:
raw_query = 'count={}'.format(count)
return self.base_request.request(
'/device/v2/{uuid}/logs'.format(uuid=uuid), 'GET', raw_query=raw_query,
endpoint=self.settings.get('api_endpoint')
)
def unsubscribe(self, uuid):
"""
Unsubscribe from device logs for a specific device.
Args:
uuid (str): device uuid.
"""
if uuid in self.subscriptions:
for d in self.subscriptions[uuid]:
self.subscription_handler.stop(d)
del self.subscriptions[uuid]
def unsubscribe_all(self):
"""
Unsubscribe all subscribed devices.
"""
for device in self.subscriptions:
for d in self.subscriptions[device]:
self.subscription_handler.stop(d)
self.subscriptions = {}
| 2.25
| 2
|
bin/ek.py
|
alfa-bravo/ekstrakto
| 0
|
12774478
|
<reponame>alfa-bravo/ekstrakto
#!/usr/bin/env python3
import sys
sys.path.append('../ekstrakto')
from ekstrakto.cli import entrypoint
entrypoint()
| 1.09375
| 1
|
dict/util.py
|
meyersbs/phonetta-cli
| 0
|
12774479
|
#################################################################################
# @PROJECT: PhonTA - Phonetic Transcription Assistant #
# @VERSION: #
# @AUTHOR: <NAME> #
# @EMAIL: <EMAIL> #
# @LICENSE: MIT #
#################################################################################
##### IMPORTS ###################################################################
import os, subprocess, re
##### VARIABLES #################################################################
curr_dir = os.path.dirname(__file__)
default_dictionary = curr_dir + "/english"
arpa_to_ipa = { "AA":u'\u0251', "AA0":u'\u0251', "AA1":u'\u0251', "AA2":u'\u0251',
"AE":u'\u00E6', "AE0":u'\u00E6', "AE1":u'\u00E6', "AE2":u'\u00E6',
"AH":u'\u028C', "AH0":u'\u028C', "AH1":u'\u028C', "AH2":u'\u028C',
"AO":u'\u0254', "AO0":u'\u0254', "AO1":u'\u0254', "AO2":u'\u0254',
"AW":u'\u0061\u028A', "AW0":u'\u0061\u028A', "AW1":u'\u0061\u028A', "AW2":u'\u0061\u028A',
"AY":u'\u0061\u026A', "AY0":u'\u0061\u026A', "AY1":u'\u0061\u026A', "AY2":u'\u0061\u026A',
"B":u'\u0062',
"CH":u'\u0074\u0283',
"D":u'\u0064', "DH":u'\u00F0',
"EH":u'\u025B', "EH0":u'\u025B', "EH1":u'\u025B', "EH2":u'\u025B',
"ER":u'\u0279', "ER0":u'\u0279', "ER1":u'\u0279', "ER2":u'\u0279',
"EY":u'\u0065\u026A', "EY0":u'\u0065\u026A', "EY1":u'\u0065\u026A', "EY2":u'\u0065\u026A',
"F":u'\u0066',
"G":u'\u0067',
"HH":u'\u0068',
"IH":u'\u026A', "IH0":u'\u026A', "IH1":u'\u026A', "IH2":u'\u026A',
"IY":u'\u0069', "IY0":u'\u0069', "IY1":u'\u0069', "IY2":u'\u0069',
"JH":u'\u0064\u0292',
"K":u'\u006B',
"L":u'\u006C',
"M":u'\u006D',
"N":u'\u006E', "NG":u'\u014B',
"OW":u'\u006F\u028A', "OW0":u'\u006F\u028A', "OW1":u'\u006F\u028A', "OW2":u'\u006F\u028A',
"OY":u'\u0254\u026A', "OY0":u'\u0254\u026A', "OY1":u'\u0254\u026A', "OY2":u'\u0254\u026A',
"P":u'\u0070',
"R":u'\u0072',
"S":u'\u0073', "SH":u'\u0283',
"T":u'\u0074', "TH":u'\u03B8',
"UH":u'\u028A', "UH0":u'\u028A', "UH1":u'\u028A', "UH2":u'\u028A',
"UW":u'\u0075', "UW0":u'\u0075', "UW1":u'\u0075', "UW2":u'\u0075',
"V":u'\u0076',
"W":u'\u0077',
"Y":u'\u0079',
"Z":u'\u007A', "ZH":u'\u0292'}
def download_cmudict():
print("Downloading cmudict...")
subprocess.call(['wget', 'http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b', '-P', curr_dir], stdout=None)
temp_dict = curr_dir + '/cmudict-0.7b'
subprocess.call(['cp', temp_dict, default_dictionary], stdout=None)
subprocess.call(['rm', temp_dict], stdout=None)
def format_cmudict():
""" Takes the given cmudict file and reformats it to CSV. """
new_dict = open("dictionary.psv", "a")
print("Formatting cmudict...")
if os.path.exists(default_dictionary):
with open(default_dictionary, "r") as f:
for line in f:
if ";;;" not in line and re.match(r'[A-Za-z\']', line[0]):
temp_line = re.split(r'\s\s', line)
new_dict.write(temp_line[0] + "|" + temp_line[1].strip("\n") + "\n")
# Add missing terms.
new_dict.write("&|AE2 N D\n")
new_dict.write("@|AE2 T\n")
# Close the file and cleanup the directory.
new_dict.close()
subprocess.call(['cp', "dictionary.psv", default_dictionary], stdout=None)
subprocess.call(['rm', "dictionary.psv"], stdout=None)
return True
return False
def sort_cmudict():
""" Takes the given dictionary and sorts it alphabetically. """
print("Sorting cmudict...")
if os.path.exists(default_dictionary):
subprocess.call(['sort', '-V', default_dictionary, '-o', default_dictionary], stdout=None)
return True
else:
return False
def update_cmudict():
""" Converts the ARPABET given in the cmudict into IPA. """
print("Updating cmudict...")
new_dict = open("dictionary.psv", "wb")
if os.path.exists(default_dictionary):
with open(default_dictionary, "r") as f:
for line in f:
elems = line.split("|")
temp_line = ""
for item in elems[1].split():
if item not in arpa_to_ipa:
print("UNKNOWN: " + item)
pass
else:
temp_line += arpa_to_ipa[item] + " "
new_dict.write(line.strip("\n") + "|" + temp_line.encode('utf8').rstrip() + "\n")
new_dict.close()
subprocess.call(['cp', "dictionary.psv", default_dictionary], stdout=None)
subprocess.call(['rm', "dictionary.psv"], stdout=None)
return True
return False
def upgrade_cmudict():
download_cmudict()
format_cmudict()
sort_cmudict()
update_cmudict()
return "Successfully upgraded to the newest cmudict!"
def search_cmudict(term, encoding="IPA"):
with open(default_dictionary, "r") as f:
for line in f:
temp_line = line.split("|")
if temp_line[0] == term.upper().strip(".").strip("!").strip("?"):
if encoding == "IPA":
return temp_line[2].rstrip().strip("\n")
else:
return temp_line[1].rstrip().strip("\n")
return "ERROR"
def add_to_cmudict(term, arpabet, ipa="NONE"):
if search_cmudict(term) != "ERROR":
return "Already in cmudict."
else:
if ipa == "NONE":
ipa = search_cmudict(term, "IPA")
else:
pass
with open(default_dictionary, "ab") as f:
f.write(term + "|" + arpabet + "|" + ipa + "\n")
sort_cmudict()
return "Successfully added '" + term + "' to the cmudict."
def translate(text):
arpa = ""
ipa = ""
for term in text.split():
arpa += search_cmudict(term, "ARPABET") + " "
ipa += search_cmudict(term, "IPA") + " "
print("ARPABET:\t" + arpa)
print("IPA:\t\t" + ipa)
return ""
| 2.15625
| 2
|
sotaai/rl/rllib_wrapper.py
|
stateoftheartai/sotaai
| 23
|
12774480
|
# -*- coding: utf-8 -*-
# Author: <NAME> <<EMAIL>>
# Copyright: Stateoftheart AI PBC 2021.
'''RLlib's library wrapper.'''
SOURCE_METADATA = {
'name': 'rllib',
'original_name': 'RLlib',
'url': 'https://docs.ray.io/en/master/rllib.html'
}
MODELS = {
'discrete': [
'A2C', 'A3C', 'ARS', 'BC', 'ES', 'DQN', 'Rainbow', 'APEX-DQN', 'IMPALA',
'MARWIL', 'PG', 'PPO', 'APPO', 'R2D2', 'SAC', 'SlateQ', 'LinUCB',
'LinTS', 'AlphaZero', 'QMIX', 'MADDPG', 'Curiosity'
],
'continuous': [
'A2C',
'A3C',
'ARS',
'BC',
'CQL',
'ES',
# 'DDPG',
'TD3',
'APEX-DDPG',
'Dreamer',
'IMPALA',
'MAML',
'MARWIL',
'MBMPO',
'PG',
'PPO',
'APPO',
'SAC',
'MADDPG'
],
'multi-agent': [
'A2C',
'A3C',
'BC',
# 'DDPG',
'TD3',
'APEX-DDPG',
'DQN',
'Rainbow',
'APEX-DQN',
'IMPALA',
'MARWIL',
'PG',
'PPO',
'APPO',
'R2D2',
'SAC',
'LinUCB',
'LinTS',
'QMIX',
'MADDPG',
'Curiosity'
],
'unknown': [
'ParameterSharing', 'FullyIndependentLearning', 'SharedCriticMethods'
],
}
def load_model(name: str) -> dict:
return {'name': name, 'source': 'rllib'}
| 1.78125
| 2
|
unit_tests/tests_viewer_wrappers.py
|
inpho/vsm
| 31
|
12774481
|
<reponame>inpho/vsm<filename>unit_tests/tests_viewer_wrappers.py
import unittest2 as unittest
import numpy as np
from vsm.viewer.wrappers import *
from vsm.viewer.labeleddata import *
class TestViewerWrappers(unittest.TestCase):
# TODO: Rewrite these to be independent of LDA
pass
# def setUp(self):
# from vsm.corpus.util.corpusbuilders import random_corpus
# from vsm.model.ldacgsseq import LdaCgsSeq
# self.c = random_corpus(1000, 50, 0, 20, context_type='sentence',
# metadata=True)
# self.m = LDAGibbs(self.c, 'sentence', K=40)
# self.m.train(n_iterations=50)
# def test_dist_(self):
# li = [0,1]
# swt = dist_word_top(self.c, self.m.word_top.T, '0')
# swtl = dist_word_top(self.c, self.m.word_top.T, ['0','1'], order='i')
# sww = dist_word_word(self.c, self.m.word_top, '0')
# swwl = dist_word_word(self.c, self.m.word_top, ['0','1'], order='i')
# std = dist_top_doc(self.c, self.m.top_doc.T, 0, 'sentence', order='i')
# stdl = dist_top_doc(self.c, self.m.top_doc.T, li, 'sentence')
# sdd = dist_doc_doc(self.c, self.m.top_doc, self.m.context_type, 0)
# sddl = dist_doc_doc(self.c, self.m.top_doc, self.m.context_type, li)
# stt = dist_top_top(self.m.word_top.T, 1)
# sttl = dist_top_top(self.m.word_top.T, li)
# self.assertEqual(type(swt), LabeledColumn)
# self.assertEqual(type(swtl), LabeledColumn)
# self.assertEqual(type(sww), LabeledColumn)
# self.assertEqual(type(swwl), LabeledColumn)
# self.assertEqual(type(std), LabeledColumn)
# self.assertEqual(type(stdl), LabeledColumn)
# self.assertEqual(type(sdd), LabeledColumn)
# self.assertEqual(type(sddl), LabeledColumn)
# self.assertEqual(type(stt), LabeledColumn)
# self.assertEqual(type(sttl), LabeledColumn)
# def test_dismat_(self):
# dismatw = dismat_word(['0','2','5'], self.c, self.m.word_top)
# dismatd = dismat_doc([0,1,2], self.c, self.m.context_type,
# self.m.top_doc)
# dismatt = dismat_top([0,1,2], self.m.word_top)
# self.assertEqual(type(dismatw), IndexedSymmArray)
# self.assertEqual(type(dismatd), IndexedSymmArray)
# self.assertEqual(type(dismatt), IndexedSymmArray)
#Define and run test suite
suite = unittest.TestLoader().loadTestsFromTestCase(TestViewerWrappers)
unittest.TextTestRunner(verbosity=2).run(suite)
| 2.203125
| 2
|
generators/_init-python/templates/__init__.tmpl.py
|
phovea/generator-phovea
| 1
|
12774482
|
###############################################################################
# Caleydo - Visualization for Molecular Biology - http://caleydo.org
# Copyright (c) The Caleydo Team. All rights reserved.
# Licensed under the new BSD license, available at http://caleydo.org/license
###############################################################################
def phovea(registry):
"""
register extension points
:param registry:
"""
# generator-phovea:begin
<% - sextensions.map((d) => ` registry.append('${d.type}', '${d.id}', '${name.toLowerCase()}.${d.module}', ${stringifyPython(d.extras, ' ')})`).join('\n\n')%>
# generator-phovea:end
pass
def phovea_config():
"""
:return: file pointer to config file
"""
from os import path
here = path.abspath(path.dirname(__file__))
config_file = path.join(here, 'config.json')
return config_file if path.exists(config_file) else None
| 1.921875
| 2
|
users/permissions.py
|
pantaLuc/Attendance-check-app-backend
| 0
|
12774483
|
<reponame>pantaLuc/Attendance-check-app-backend<gh_stars>0
from rest_framework import permissions
from .serializers import UsersSerializer
class ViewPermission(permissions.BasePermission):
def has_permission(self, request, view):
data = UsersSerializer(request.data).data
# donner le droit de voir
view_acces = any(
p['name'] == 'view_'+view.permission_object for p in data['role']['permissions'])
# donner le droit d' editer
edit_acces = any(
p['name'] == 'edit_'+view.permission_object for p in data['role']['permissions'])
if request.method == "GET":
return view_acces or edit_acces
return edit_acces
| 2.421875
| 2
|
wordweaver/tests/test_foma_access_shell.py
|
roedoejet/wordweaver-legacy
| 4
|
12774484
|
# -*- coding: utf-8 -*-
""" Test Access to Fomabin
"""
from unittest import TestCase
import os
from wordweaver.data import data_dir
from wordweaver.fst.utils.foma_access import foma_access
from wordweaver.log import logger
class TestFoma_access_shell(TestCase):
path_to_foma = None
foma_shell = None
fomabin_name = 'toy-kawe-stressed-markup.fomabin'
def setUp(self):
self.path_to_foma = os.path.join(data_dir, 'fomabins')
self.foma_shell = foma_access(os.path.join(self.path_to_foma, self.fomabin_name))
def test_up(self):
verb = "^PP-^ke^R-^'níkhons^H^"
res = self.foma_shell.up(verb)
if len(res) != 1:
self.fail("Expected one answer for '^PP-^ke^R-^'níkhons^H^'")
else:
for r in res:
logger.debug(r)
def test_down(self):
tags = 'Verb+Active+AgentSg1+PatSg3Neuter+7nikhon-r+Habitual'
res = self.foma_shell.down(tags)
if len(res) != 1:
self.fail("Excpected a single answer for Verb+Active+AgentSg1+PatSg3Neuter+7nikhon-r+Habitual")
elif res[0] != "^PP-^ke^R-^'níkhons^H^":
self.fail('Expected "^PP-^ke^R-^\'níkhons^H^", got ' + res[0])
else:
logger.debug(res[0])
def test_execute_foma_command(self):
res = self.foma_shell.execute_foma_command('random_upper')
for r in res:
logger.debug(r)
| 2.59375
| 3
|
settings.py
|
vchong/ibart
| 3
|
12774485
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging as log
import yaml
import os
def get_settings_yml_file():
yml_file = None
config_file = "configs/settings.yaml"
try:
with open(config_file, 'r') as yml:
yml_file = yaml.load(yml, Loader=yaml.SafeLoader)
except KeyError:
log.error("Couldn't find {}", config_file)
exit()
return yml_file
def config_path():
yml_file = get_settings_yml_file()
try:
return yml_file['config']['path']
except KeyError:
log.error("No config path in settings file")
return "Missing key!"
def repo_bin():
yml_file = get_settings_yml_file()
try:
return yml_file['repo']['bin']
except KeyError:
log.error("No repo bin in settings file")
return "Missing key!"
def repo_reference():
yml_file = get_settings_yml_file()
try:
return yml_file['repo']['reference']
except KeyError:
log.error("No repo reference in settings file")
return "Missing key!"
def aarch32_toolchain_path():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch32_path']
except KeyError:
log.error("No aarch32 toolchain in settings file")
return "Missing key!"
def aarch64_toolchain_path():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch64_path']
except KeyError:
log.error("No aarch64 toolchain in settings file")
return "Missing key!"
def aarch32_prefix():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch32_prefix']
except KeyError:
log.error("No aarch32 prefix in settings file")
return "Missing key!"
def aarch64_prefix():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch64_prefix']
except KeyError:
log.error("No aarch64 prefix in settings file")
return "Missing key!"
def workspace_path():
yml_file = get_settings_yml_file()
try:
return yml_file['workspace']['path']
except KeyError:
log.error("No workspace path in settings file")
return "Missing key!"
def log_dir():
try:
if os.environ['IBART_LOG_DIR']:
return os.environ['IBART_LOG_DIR']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['log']['dir']
except KeyError:
log.error("No log dir in settings file")
return "Missing key!"
def log_file():
try:
if os.environ['IBART_CORE_LOG']:
return os.environ['IBART_CORE_LOG']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['log']['file']
except KeyError:
log.error("No log file specified in settings file or env")
return "Missing key!"
def db_file():
try:
if os.environ['IBART_DB_FILE']:
return os.environ['IBART_DB_FILE']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['db']['file']
except KeyError:
log.error("No db file specified in settings file or env")
return "Missing key!"
def jobdefs_path():
try:
if os.environ['IBART_JOBDEFS']:
return os.environ['IBART_JOBDEFS']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['jobs']['path']
except KeyError:
log.error("No jobdefs folder specified in settings file or env")
return "Missing key!"
def remote_jobs():
yml_file = get_settings_yml_file()
my_jobs = []
try:
yml_iter = yml_file['jobs']['remotedefs']
for i in yml_iter:
my_jobs.append("{}".format(i))
except KeyError:
log.error("No remote jobdefs in settings file")
return "Missing key!"
return my_jobs
###############################################################################
# Everything below this line is just for debugging this
###############################################################################
def foo():
yml_file = get_settings_yml_file()
try:
return yml_file['foo']['aarch64_path']
except KeyError:
return "Missing key!"
def initialize():
log.info("Configure settings")
log.debug("config: {}".format(config_path()))
log.debug("repo binary: {}".format(repo_bin()))
log.debug("repo reference: {}".format(repo_reference()))
log.debug("aarch32_toolchain_path: {}".format(aarch32_toolchain_path()))
log.debug("aarch64_toolchain_path: {}".format(aarch64_toolchain_path()))
log.debug("aarch32_prefix: {}".format(aarch32_prefix()))
log.debug("aarch64_prefix: {}".format(aarch64_prefix()))
log.debug("workspace_path: {}".format(workspace_path()))
log.debug("log_dir: {}".format(log_dir()))
log.debug("log_file: {}".format(log_file()))
log.debug("db_file: {}".format(db_file()))
log.debug("config_path: {}".format(config_path()))
log.debug("remote_jobs: {}".format(remote_jobs()))
def initialize_logger():
LOG_FMT = ("[%(levelname)s] %(funcName)s():%(lineno)d %(message)s")
log.basicConfig(
# filename="core.log",
level=log.DEBUG,
format=LOG_FMT,
filemode='w')
if __name__ == "__main__":
initialize_logger()
initialize()
foo()
| 2.453125
| 2
|
src/sagemaker/jumpstart/notebook_utils.py
|
guoqiaoli1992/sagemaker-python-sdk
| 0
|
12774486
|
<filename>src/sagemaker/jumpstart/notebook_utils.py
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""This module stores notebook utils related to SageMaker JumpStart."""
from __future__ import absolute_import
from sagemaker.jumpstart import accessors
from sagemaker.jumpstart.constants import JUMPSTART_DEFAULT_REGION_NAME
def get_model_url(
model_id: str, model_version: str, region: str = JUMPSTART_DEFAULT_REGION_NAME
) -> str:
"""Retrieve web url describing pretrained model.
Args:
model_id (str): The model ID for which to retrieve the url.
model_version (str): The model version for which to retrieve the url.
region (str): Optional. The region from which to retrieve metadata.
(Default: JUMPSTART_DEFAULT_REGION_NAME)
"""
model_specs = accessors.JumpStartModelsAccessor.get_model_specs(
region=region, model_id=model_id, version=model_version
)
return model_specs.url
| 1.828125
| 2
|
startapp.py
|
eboyce452/django_conf
| 0
|
12774487
|
from importlib import import_module
import os
import re
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
class Command(TemplateCommand):
help = (
"Creates a Django app directory structure for the given app name in "
"the current directory or optionally in the given directory."
)
missing_args_message = "You must provide an application name."
def handle(self, **options):
app_name, target = options.pop('name'), options.pop('directory')
self.validate_name(app_name, "app")
# Check that the app_name cannot be imported.
try:
import_module(app_name)
except ImportError:
pass
else:
raise CommandError(
"%r conflicts with the name of an existing Python module and "
"cannot be used as an app name. Please try another name." % app_name
)
super(Command, self).handle('app', app_name, target, **options)
filepath_proj = os.path.join(os.getcwd(),os.getcwd().split(os.sep)[-1])
filepath_app = os.path.join(os.getcwd(), app_name)
filepath_base = os.getcwd()
template_path = os.path.join(filepath_base, 'templates', app_name)
css_path = os.path.join(filepath_base, 'static', 'css', app_name)
js_path = os.path.join(filepath_base, 'static', 'js', app_name)
try:
os.mkdir(template_path)
os.mkdir(css_path)
os.mkdir(js_path)
except:
raise
css_template = '/* Blank CSS Sheet - Reset CSS with CTRL + F5 to Clear Browser Cache*/'
with open(os.path.join(css_path, "{}.css".format(app_name)), 'w') as f:
f.write(css_template)
f.close()
js_template = '// Blank Javascript File - Reset Javascript with CTRL + F5 to Clear Browser Cache'
with open(os.path.join(js_path, '{}.js'.format(app_name)), 'w') as f:
f.write(js_template)
f.close()
base_template = '''<!DOCTYPE html>
{{% load static %}}
<html>
<head>
<title></title>
<link rel='stylesheet' href='{{% static "sitepackages/bootstrap.min.css" %}}'>
<link rel='stylesheet' href='{{% static "{}" %}}'>
<meta name='viewport' content='width=device-width, initial-scale=1, shrink-to-fit=no'>
</head>
<body>
<!-- Insert repeated body code here -->
<div>
{{% block body_block %}}
<!-- Anything outside of this will be inherited if you extend -->
{{% endblock %}}
</div>
<script src='{{% static "sitepackages/jquery.min.js" %}}'></script>
<script src='{{% static "sitepackages/popper.min.js" %}}'></script>
<script src='{{% static "sitepackages/bootstrap.min.js" %}}'></script>
<script src='{{% static "{}" %}}'></script>
</body>
</html>'''.format(os.path.join('css',app_name,'{}.css'.format(app_name)), os.path.join('js',app_name,'{}.js'.format(app_name)))
extension_template = '''{{% extends "{}" %}}
{{% load {}_custom_tags %}}
{{% block body_block %}}
<!-- Add in your body html for this page here -->
{{% endblock %}}'''.format(app_name, os.path.join(app_name,'{}_base.html'.format(app_name)))
with open(os.path.join(template_path, '{}_base.html'.format(app_name)), 'w') as f:
f.write(base_template)
f.close()
with open(os.path.join(template_path, '{}_extension.html'.format(app_name)), 'w') as f:
f.write(extension_template)
f.close()
with open(os.path.join(filepath_proj,'settings.py'), 'r') as f:
file_string = f.read()
f.close()
pattern = re.compile(r"INSTALLED_APPS\s=\s\[\n*\s*")
matches = pattern.finditer(file_string)
for match in matches:
stop = match.span()[-1]
new_file_string = file_string[:stop] + "'" + app_name + "'" + ',' + '\n\t' + file_string[stop:]
with open(os.path.join(filepath_proj,'settings.py'), 'w') as f:
f.write(new_file_string)
f.close()
with open(os.path.join(filepath_app, 'forms.py'), 'w') as f:
form_text = '''from django import forms
from django.core import validators
from django.contrib.auth.models import User
# from {}.models import model_name(s)
# In the HTML don't forget to add csrf_token!!
# class Form_Name(forms.Form):
# name = forms.CharField()
# email = forms.EmailField()
# text = forms.CharField(widget = forms.Textarea)
# botcatcher = forms.CharField(required = False, widget = forms.HiddenInput, validators=[validators.MaxLengthValidator(0)])
# class Form_From_Model(forms.ModelForm):
# class Meta:
# model = model_name
# ##Several options for how to specify fields:
# #Option 1
# fields = '__all__'
# #Option 2
# exclude = ['field_one', 'field_two'] ##include but all specified
# #Option 3
# fields = ('field_one', 'field_two') ##include only specified'''.format(app_name)
f.write(form_text)
f.close()
with open(os.path.join(filepath_app, 'urls.py'), 'w') as f:
urls_text = '''from django.conf.urls import url
from ''' + app_name + ' import views' + '\n\n' + '''app_name = '{}'
urlpatterns = [
]'''.format(app_name)
f.write(urls_text)
f.close()
with open(os.path.join(filepath_proj, 'urls.py'), 'r') as f:
main_urls_text = f.read()
f.close()
pattern = re.compile(r'from\s(.*|\s*)admin')
matches = pattern.finditer(main_urls_text)
for match in matches:
stop = match.span()[-1]
newstring = main_urls_text[:stop] + '\n' + 'from ' + app_name + ' import views' + main_urls_text[stop:]
with open(os.path.join(filepath_proj, 'urls.py'), 'w') as f:
f.write(newstring)
f.close()
try:
os.mkdir(os.path.join(filepath_app, 'templatetags'))
except:
raise
if os.path.exists(os.path.join(filepath_app, 'templatetags')) == True:
with open(os.path.join(filepath_app, 'templatetags', '__init__.py'), 'w') as f:
f.write('')
f.close()
with open(os.path.join(filepath_app, 'templatetags', '{}_custom_tags.py'.format(app_name)), 'w') as f:
tag_template = '''from django import template
register = template.Library()
#<EMAIL>(name = 'filtername')
#def example(value,arg):
#Do something to value based on arg here#
##Ex. return value.replace(arg,'')##
#return value'''
f.write(tag_template)
f.close()
with open(os.path.join(filepath_app, 'views.py'), 'r') as f:
app_views_text = f.read()
f.close()
pattern = re.compile(r'from django.shortcuts import render')
matches = pattern.finditer(app_views_text)
for match in matches:
stop = match.span()[-1]
newstring = app_views_text[:stop] + '''\nfrom django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth import authenticate, login, logout''' + '\n' + '#from {}.forms import Form_Name(s)\n'.format(app_name) + '#from {}.models import Model_Name(s)\n\n'.format(app_name) + '#Remember to add LOGIN_URL = "/app_name/user_login" to settings.py if you are adding login' + app_views_text[stop:] + '''\n# def index(request):
# return render(request, '{}')
# def formview(request):
# form = Form_Name()
# if request.method == 'POST':
# form = Form_Name(request.POST)
# if form.is_valid():
# form.save(commit = True)
# return index(request)
# else:
# print('Error')
# return render(request, '{}', {{'form':form}})
# def modelview(request):
# example_data = Model_Name.objects.all()
# context_dict = {{'data_list':example_data}}
# return render(request, '{}', context = context_dict)'''.format(os.path.join(app_name,'index.html'), os.path.join(app_name,'form_page.html'), os.path.join(app_name,'model_page.html'))
with open(os.path.join(filepath_app, 'views.py'), 'w') as f:
f.write(newstring)
f.close()
with open(os.path.join(filepath_app, 'models.py'), 'r') as f:
app_models_text = f.read()
f.close()
newstring = app_models_text + '''\n# class Model_Name(models.Model):
# f_name = models.CharField(max_length = 256)
# l_name = models.CharField(max_length = 256)
# def __str__(self):
# return str(self.f_name + ' ' + self.l_name)'''
with open(os.path.join(filepath_app, 'models.py'), 'w') as f:
f.write(newstring)
f.close()
with open(os.path.join(filepath_base, 'auto_populate.py'), 'w') as f:
generator_template = '''import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '{}.settings')
import django
django.setup()
import faker
import random
from {}.models import #Model_Name(s)
f = faker.Faker()
def populate(N):
for entry in range(N):
# fake_fname = f.first_name()
# fake_lname = f.last_name()
# fake_email = f.free_email()
# added_object = Model_Name.objects.get_or_create(f_name = fake_fname, l_name = fake_lname, email = fake_email)[0]
if __name__ == '__main__':
print('populating data')
populate(N = )
print('data population complete')'''.format(os.getcwd().split(os.sep)[-1], app_name)
f.write(generator_template)
f.close()
| 2.21875
| 2
|
139. Word Break.py
|
SreenathMopuri/LeetcodeProblemSolutionsInPython
| 0
|
12774488
|
#139. Word Break
"""
Leetcode link: https://leetcode.com/problems/word-break/
Solution:
for each prefix, if prefix is in dict and wordbreak(remaining str)=True, then
return True, cache result of wordbreak;
"""
Python
------
class Solution:
def wordBreak(self, s: str, wordDict: List[str]) -> bool:
dp = [False] * (len(s)+1)
dp[len(s)] = True
for i in range(len(s) -1, -1, -1):
for w in wordDict:
if (i + len(w) <= len(s) and s[i: i+len(w)] == w):
dp[i] = dp[i + len(w)]
if dp[i]:
break
return dp[0]
C++
---
class Solution {
public:
int memo[301];//array as max limit length of string
bool find(string s, int start, vector<string>& wordDict){
if(start == s.length()) return true;
if(memo[start] != -1) return memo[start];//checking if we have found result before
bool res = false;
for(int i = 0;i<wordDict.size();i++){
if(s[start] != wordDict[i][0]) continue;//before checking whole word, just checking first char is macthing or not
if(s.substr(start,wordDict[i].length()) == wordDict[i]){
res = find(s,start+wordDict[i].length(),wordDict);
}
if(res) return memo[start] = true;//storing result in array
}
return memo[start] = false;//storing result in array
}
bool wordBreak(string s, vector<string>& wordDict) {
memset(memo,-1,sizeof(memo));
return find(s,0,wordDict);
}
};
C++
----
class Solution {
public:
bool wordBreak(string s, vector<string>& wordDict) {
// whether substring s[0:i) can be represented by dictionary
vector<bool> dp(s.size() + 1, false); // size from 0 to size
dp[0] = true;
for (int i = 1; i <= s.size(); i++)
{
for (auto word: wordDict)
{
if (i >= word.size() && dp[i-word.size()])
{
if (s.substr(i-word.size(), word.size()).compare(word) == 0)
{
dp[i] = true;
}
}
}
}
return dp[s.size()];
}
};
| 3.46875
| 3
|
dora/services/serializers.py
|
francoisromain/dora-back
| 1
|
12774489
|
import logging
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
from rest_framework import serializers
from rest_framework.relations import PrimaryKeyRelatedField
from dora.structures.models import Structure, StructureMember
from .models import (
AccessCondition,
BeneficiaryAccessMode,
CoachOrientationMode,
ConcernedPublic,
Credential,
LocationKind,
Requirement,
Service,
ServiceCategories,
ServiceKind,
ServiceSubCategories,
)
logger = logging.getLogger(__name__)
class CreatablePrimaryKeyRelatedField(PrimaryKeyRelatedField):
def __init__(self, **kwargs):
self.max_length = kwargs.pop("max_length", None)
super().__init__(**kwargs)
def use_pk_only_optimization(self):
return True
def to_internal_value(self, data):
if isinstance(data, int):
return super().to_internal_value(data)
# If we receive a string instead of a primary key, search
# by value, and create a new object if not found
name = data.strip()
if name == "":
raise ValidationError("Cette valeur est vide")
if self.max_length is not None and len(name) > self.max_length:
raise ValidationError(
f"Cette valeur doit avoir moins de {self.max_length} caractères"
)
if self.root.instance:
structure = self.root.instance.structure
else:
structure_slug = self.root.initial_data["structure"]
structure = Structure.objects.get(slug=structure_slug)
if not structure:
raise ValidationError("La structure ne peut pas être vide")
queryset = self.queryset
# find if it already exists in the same structure
obj = queryset.filter(name=name, structure=structure).first()
if not obj:
# then in the global repository
obj = queryset.filter(name=name, structure=None).first()
if not obj:
# otherwise create it
obj = queryset.create(name=name, structure=structure)
return obj
class StructureSerializer(serializers.ModelSerializer):
has_admin = serializers.SerializerMethodField()
class Meta:
model = Structure
fields = [
"slug",
"name",
"short_desc",
"address1",
"address2",
"postal_code",
"city",
"url",
"siret",
"has_admin",
]
def get_has_admin(self, structure):
return structure.membership.filter(is_admin=True, user__is_staff=False).exists()
class ServiceSerializer(serializers.ModelSerializer):
is_available = serializers.SerializerMethodField()
forms_info = serializers.SerializerMethodField()
structure = serializers.SlugRelatedField(
queryset=Structure.objects.all(), slug_field="slug"
)
structure_info = StructureSerializer(source="structure", read_only=True)
kinds_display = serializers.SerializerMethodField()
category_display = serializers.SerializerMethodField()
subcategories_display = serializers.SerializerMethodField()
access_conditions = CreatablePrimaryKeyRelatedField(
many=True,
queryset=AccessCondition.objects.all(),
max_length=140,
required=False,
)
access_conditions_display = serializers.SerializerMethodField()
concerned_public = CreatablePrimaryKeyRelatedField(
many=True,
queryset=ConcernedPublic.objects.all(),
max_length=140,
required=False,
)
concerned_public_display = serializers.SerializerMethodField()
requirements = CreatablePrimaryKeyRelatedField(
many=True,
queryset=Requirement.objects.all(),
max_length=140,
required=False,
)
requirements_display = serializers.SerializerMethodField()
credentials = CreatablePrimaryKeyRelatedField(
many=True,
queryset=Credential.objects.all(),
max_length=140,
required=False,
)
credentials_display = serializers.SerializerMethodField()
location_kinds_display = serializers.SerializerMethodField()
beneficiaries_access_modes_display = serializers.SerializerMethodField()
coach_orientation_modes_display = serializers.SerializerMethodField()
department = serializers.SerializerMethodField()
can_write = serializers.SerializerMethodField()
class Meta:
model = Service
fields = [
"slug",
"name",
"short_desc",
"full_desc",
"kinds",
"category",
"subcategories",
"access_conditions",
"concerned_public",
"is_cumulative",
"has_fee",
"fee_details",
"beneficiaries_access_modes",
"beneficiaries_access_modes_other",
"coach_orientation_modes",
"coach_orientation_modes_other",
"requirements",
"credentials",
"forms",
"online_form",
"contact_name",
"contact_phone",
"contact_email",
"is_contact_info_public",
"location_kinds",
"remote_url",
"address1",
"address2",
"postal_code",
"city_code",
"city",
"geom",
"recurrence",
"suspension_date",
"structure",
"creation_date",
"modification_date",
"is_draft",
"is_available",
"forms_info",
"structure",
"structure_info",
"kinds_display",
"category_display",
"subcategories_display",
"access_conditions_display",
"concerned_public_display",
"requirements_display",
"credentials_display",
"location_kinds_display",
"beneficiaries_access_modes_display",
"coach_orientation_modes_display",
"department",
"can_write",
]
lookup_field = "slug"
def get_is_available(self, obj):
return True
def get_forms_info(self, obj):
forms = [{"name": form, "url": default_storage.url(form)} for form in obj.forms]
return forms
def get_kinds_display(self, obj):
return [ServiceKind(kind).label for kind in obj.kinds]
def get_location_kinds_display(self, obj):
return [LocationKind(kind).label for kind in obj.location_kinds]
def get_category_display(self, obj):
return ServiceCategories(obj.category).label if obj.category else ""
def get_subcategories_display(self, obj):
try:
return [ServiceSubCategories(cat).label for cat in obj.subcategories]
except ValueError:
logger.exception(
"Incorrect Service sub-category", extra={"values": obj.subcategories}
)
return []
def get_beneficiaries_access_modes_display(self, obj):
return [
BeneficiaryAccessMode(mode).label for mode in obj.beneficiaries_access_modes
]
def get_coach_orientation_modes_display(self, obj):
return [
CoachOrientationMode(mode).label for mode in obj.coach_orientation_modes
]
def get_access_conditions_display(self, obj):
return [item.name for item in obj.access_conditions.all()]
def get_concerned_public_display(self, obj):
return [item.name for item in obj.concerned_public.all()]
def get_requirements_display(self, obj):
return [item.name for item in obj.requirements.all()]
def get_credentials_display(self, obj):
return [item.name for item in obj.credentials.all()]
def get_department(self, obj):
code = obj.postal_code
return code[:3] if code.startswith("97") else code[:2]
def get_can_write(self, obj):
user = self.context.get("request").user
return obj.can_write(user)
# def validate_structure(self, value):
# user = self.context.get("request").user
# if (
# not user.is_staff
# and not StructureMember.objects.filter(
# structure_id=value.id, user_id=user.id
# ).exists()
# ):
# raise serializers.ValidationError(
# "Vous n’appartenez pas à cette structure", "not_member_of_struct"
# )
# return value
def validate(self, data):
user = self.context.get("request").user
structure = data.get("structure") or self.instance.structure
user_structures = StructureMember.objects.filter(user_id=user.id).values_list(
"structure_id", flat=True
)
if "structure" in data:
if not user.is_staff and data["structure"].id not in user_structures:
raise serializers.ValidationError(
{"structure": "Vous n’appartenez pas à cette structure"},
"not_member_of_struct",
)
assert structure.id is None or structure.id in user_structures or user.is_staff
if "access_conditions" in data:
self._validate_custom_choice(
"access_conditions", data, user, user_structures, structure
)
if "concerned_public" in data:
self._validate_custom_choice(
"concerned_public", data, user, user_structures, structure
)
if "requirements" in data:
self._validate_custom_choice(
"requirements", data, user, user_structures, structure
)
if "credentials" in data:
self._validate_custom_choice(
"credentials", data, user, user_structures, structure
)
return data
def _validate_custom_choice(self, field, data, user, user_structures, structure):
values = data[field]
for val in values:
if val.structure_id is not None and val.structure_id != structure.id:
raise serializers.ValidationError(
{field: "Ce choix n'est pas disponible dans cette structure"},
"unallowed_custom_choices_bad_struc",
)
return values
class AnonymousServiceSerializer(ServiceSerializer):
contact_name = serializers.SerializerMethodField()
contact_phone = serializers.SerializerMethodField()
contact_email = serializers.SerializerMethodField()
is_contact_info_public = serializers.SerializerMethodField()
def get_contact_name(self, obj):
return obj.contact_name if obj.is_contact_info_public else ""
def get_contact_phone(self, obj):
return obj.contact_phone if obj.is_contact_info_public else ""
def get_contact_email(self, obj):
return obj.contact_email if obj.is_contact_info_public else ""
def get_is_contact_info_public(self, obj):
return True if obj.is_contact_info_public else None
class ServiceListSerializer(ServiceSerializer):
class Meta:
model = Service
fields = [
"slug",
"name",
"structure",
"structure_info",
"postal_code",
"city",
"department",
"is_draft",
"modification_date",
"category_display",
"short_desc",
]
lookup_field = "slug"
class FeedbackSerializer(serializers.Serializer):
full_name = serializers.CharField()
email = serializers.EmailField()
message = serializers.CharField()
| 2.140625
| 2
|
raspirobot.py
|
AndrewNatoli/AN-PonderBot
| 0
|
12774490
|
"""
Contains logic for bump switches, sonar and motor control
"""
__author__ = 'andrew'
import os
import threading
import config
from rrb2 import *
from time import sleep
from enum import Enum
from random import randint
bacon = True
class RaspiRobot(threading.Thread):
class Incidents(Enum):
nothing = -1
wentForward = 0
wentReverse = 1
turnedLeft = 2
turnedRight = 3
crashedForward = 4
crashedLeft = 5
crashedRight = 6
class Directions(Enum):
stopped = -1
forward = 0
reverse = 1
left = 2
right = 3
okToRun = False # Keeps the thread alive
timeSinceDistanceScan = 0 # The amount of ticks since we last checked the sonar reading
direction = Directions.stopped
lastIncident = Incidents.nothing
moveTime = 0 # How many iterations have we been moving for?
stopTime = 0 # Stop moving after this many iterations of the run loop
leftCollision = False # Status of left lever switch used as a collision detector
rightCollision = False # Status of right lever switch used as a collision detector
distance = 9001 # Distance reading from the sonar
active = False # Whether or not we SHOULD move.
def __init__(self):
super(RaspiRobot, self).__init__()
print "Created Raspi Robot. MWAH HAWH HAWH!"
try:
self.rr = RRB2()
self.led1(True)
sleep(0.1)
self.led1(False)
sleep(0.1)
self.led1(False)
self.led2(False)
self.okToRun = True
print "Raspi Robot Initialized."
print "Start with \"start\" command!"
except Exception:
self.okToRun = False
print "Failed to initialize RaspiRobot Board"
# Run the robot!
def run(self):
print "Running Raspi Robot"
while self.okToRun:
if self.active:
self.doMotion()
else:
print "Robot stopped. Sleep for three seconds."
sleep(3)
# Use CLI command "start" to start the robot
def startMoving(self):
print "RasPi Robot: startMoving"
self.active = True
self.okToRun = True
# Make the robot stop moving
def stopMoving(self):
self.active = False
def doMotion(self):
# Check the sonar reading every fifty iterations
if self.timeSinceDistanceScan >= 20:
self.distance = self.rr.get_distance()
self.timeSinceDistanceScan = 0
# Increment the timeSinceDistanceScan
self.timeSinceDistanceScan += 1
# Check for side collisions first
self.leftCollision = self.rr.sw1_closed()
self.rightCollision = self.rr.sw2_closed()
if self.leftCollision or self.rightCollision:
# Did we move forward into something?
if self.direction == self.Directions.forward:
print "Crashed into something while moving forward."
self.lastIncident = self.Incidents.crashedForward
self.reverse(200)
""" Going in reverse is handled by the timer """
# Stopped?
if self.direction == self.Directions.stopped:
# Which switch did it?
if self.leftCollision and not self.rightCollision:
# Try turning right...
self.right(200)
elif self.rightCollision and not self.leftCollision:
# Try turning left...
self.left(200)
else:
# Keep backing up....
self.reverse(200)
# Are we moving? Should we stop?
if self.direction != self.Directions.stopped:
self.moveTime += 1
# If we've been going in a certain direction for too long, stop ourselves.
if self.moveTime >= self.stopTime:
# Reset our move counter... JUST IN CASE. 2014-10-15 Attempt to fix infinite reverse bug.
self.moveTime = 0;
# First, we'll stop...
howMoved = self.direction
# We were going backwards...
if howMoved == self.Directions.reverse:
# Why were we going in reverse?
if self.lastIncident == self.Incidents.crashedForward:
# We crashed while moving forward...
if self.leftCollision or self.rightCollision:
# We're STILL crashed? Ugh! Get free somehow!
if self.leftCollision and not self.rightCollision:
self.rr.right(0.5, config.__MAX_SPEED__)
elif self.rightCollision and not self.leftCollision:
self.rr.left(0.5, config.__MAX_SPEED__)
else: # Both are pressed... try to wiggle free.
self.rr.right(0.5, config.__MAX_SPEED__)
self.rr.left(0.5, config.__MAX_SPEED__)
self.reverse(250)
else:
# We're free!
self.lastIncident = self.Incidents.nothing
# Turn randomly...
choose = randint(1,2)
if choose == 1:
self.left(200)
else:
self.right(200)
# Did we crash while turning?
elif self.lastIncident == self.Incidents.crashedLeft or self.lastIncident == self.Incidents.crashedRight:
# We had crashed while turning left before backing up!
if self.lastIncident == self.Incidents.crashedLeft:
self.rr.right(0.5, config.__MAX_SPEED__) # Pauses the thread while this happens...
self.lastIncident = self.Incidents.nothing # Reset the incident
# Crashed right while turning right
if self.rr.sw2_closed():
self.lastIncident = self.Incidents.crashedRight
self.reverse(randint(200,450))
# No we didn't!
else:
# There's an obstacle closer than 10cm
if self.rr.get_distance() < 10:
while self.rr.get_distance() < 10:
self.rr.right(0.5)
if self.rr.sw2_closed():
self.lastIncident = self.Incidents.crashedRight
self.reverse(randint(200,500))
break # Quit this little distance check loop
# If we didn't crash right while in that loop... move forward!
if self.lastIncident is not self.Incidents.crashedRight:
self.forward(randint(200, 400))
# Obstacle further than 10cm
else:
self.lastIncident = self.Incidents.nothing
self.forward(randint(200, 400))
# We crashed while turning to the right somehow!
elif self.lastIncident == self.Incidents.crashedRight:
self.rr.left(0.5, config.__MAX_SPEED__) # Pauses the thread while this happens...
self.lastIncident = self.Incidents.nothing # Reset the incident
# Crashed right while turning right
if self.rr.sw1_closed():
self.lastIncident = self.Incidents.crashedLeft
self.reverse(randint(200,450))
# No we didn't!
else:
# There's an obstacle closer than 10cm
if self.rr.get_distance() < 10:
while self.rr.get_distance() < 10:
self.rr.left(0.5)
if self.rr.sw1_closed():
self.lastIncident = self.Incidents.crashedLeft
self.reverse(randint(200, 500))
break # Quit this little distance check loop
# If we didn't crash right while in that loop... move forward!
if self.lastIncident is not self.Incidents.crashedLeft:
self.forward(randint(200, 400))
# Obstacle further than 10cm
else:
self.lastIncident = self.Incidents.nothing
self.forward(randint(200, 400))
else:
print "Not sure how we ended up here.... (side crash --> reverse --> motion expiry)"
# We weren't backing up because of an incident. It was for the funsies! (or sonar or something)
else:
# If NOW we have a collision...
if self.leftCollision or self.rightCollision:
# Left Collision
if self.leftCollision and not self.rightCollision:
# Try moving right
self.rr.right(0.5,config.__MAX_SPEED__)
# If there's still a collision
if self.leftCollision or self.rightCollision:
# Back up and report the incident
self.reverse(randint(200,400))
self.lastIncident = self.Incidents.crashedLeft
# No collision.
else:
# Reverse if close to something
if self.rr.get_distance() < 10:
self.reverse(randint(200,400))
# Forward if not
else:
self.forward(randint(200,400))
# If we had a collision from the right... handle it just about the same way
elif self.rightCollision and not self.leftCollision:
self.rr.left(0.5,config.__MAX_SPEED__)
if self.leftCollision or self.rightCollision:
self.reverse(randint(200,400))
self.lastIncident = self.Incidents.crashedRight
else:
if self.rr.get_distance() < 10:
self.reverse(randint(200,400))
else:
self.forward(randint(200,400))
# No collision! Check distance and do things that way.
else:
if self.distance > 10:
choose = randint(1,4)
if choose == 1 or choose == 2:
self.forward(randint(200,400))
elif choose == 3:
self.left(randint(100,400))
else:
self.right(randint(100,400))
else:
choose = randint(1,3)
if choose == 1:
self.reverse(randint(100,400))
elif choose == 2:
self.left(randint(100,300))
else:
self.right(randint(100,300))
# We were moving forward...
if howMoved == self.Directions.forward:
# Something further than 10cm...
if self.distance > 10:
if not self.leftCollision and not self.rightCollision:
choose = randint(1, 9)
if choose == 7:
self.left(randint(40, 150))
elif choose == 8:
self.left(randint(40, 150))
elif choose == 9:
self.reverse(randint(40,150))
else:
self.forward(randint(200, 400))
else:
self.lastIncident = self.Incidents.crashedForward
if self.leftCollision and not self.rightCollision:
self.right(250)
elif self.rightCollision and not self.leftCollision:
self.left(250)
else:
self.reverse(randint(250, 450))
# Closer than 10cm...
else:
# Bumpers are clear
if not self.leftCollision and not self.rightCollision:
choose = randint(1,3)
if choose == 1:
self.left(250)
elif choose == 2:
self.right(250)
else:
self.reverse()
# Closer than 10cm and side collision
else:
self.lastIncident = self.Incidents.crashedForward
# Crashed left
if self.leftCollision and not self.rightCollision:
self.right(randint(200, 300))
# Crashed Right
elif self.rightCollision and not self.leftCollision:
self.left(randint(200, 300))
# Hit both sensors...
else:
self.reverse(randint(200, 400))
# We were turning left
if howMoved == self.Directions.left:
# Something's further than 10cm away
if self.distance > 10:
# Bumpers are clear...
if not self.leftCollision and not self.rightCollision:
self.forward(200)
# Side collision! D:
else:
self.lastIncident = self.Incidents.crashedLeft
self.reverse(randint(200,400))
# Something's closer than 10cm away
else:
# Keep turning left
if not self.leftCollision and not self.rightCollision:
self.left(250)
# Side collision!
else:
self.lastIncident = self.Incidents.crashedLeft
self.reverse(randint(200,400))
# We were turning right
if howMoved == self.Directions.right:
# Something is further than 10cm away
if self.distance > 10:
# Bumpers are clear...
if not self.leftCollision and not self.rightCollision:
self.forward(200)
# Side collision! D:
else:
self.lastIncident = self.Incidents.crashedRight
self.reverse(randint(200, 400))
# Something is closer than 10cm away
else:
# Keep turning right
if not self.leftCollision and not self.rightCollision:
self.right(250)
# Side collision!
else:
self.lastIncident = self.Incidents.crashedRight
self.reverse(randint(200, 400))
# These checks will happen WHILE we're moving
else:
# Check the distance
if self.distance <= 20:
# Are we moving forward?
if self.direction == self.Directions.forward:
# This will force the robot to stop and carry out post-motion behavior.
print "Something is in the way. Stop moving forward."
self.moveTime = self.stopTime
elif self.distance > 20:
# If we're moving in reverse and there's a clear path ahead
if self.direction == self.Directions.reverse:
if not self.leftCollision and not self.rightCollision:
self.moveTime = self.stopTime
elif self.leftCollision or self.rightCollision:
if self.leftCollision and not self.rightCollision:
self.right(randint(100,300))
self.lastIncident = self.Incidents.crashedLeft
elif self.rightCollision and not self.leftCollision:
self.left(randint(100,300))
self.lastIncident = self.Incidents.crashedRight
elif self.rightCollision and self.leftCollision:
self.reverse(randint(100,300))
self.lastIncident = self.Incidents.crashedForward
else:
print "Not sure how we ended up here (gibberish code sdf2hewdfsu9)"
# Start moving forward
def forward(self,time=200):
self.stop()
try:
print "Revving up..."
self.rr.set_motors(config.__MAX_SPEED__,0,config.__MAX_SPEED__,0)
self.direction = self.Directions.forward
self.stopTime = time
print "Moving forward for " + str(time) + " ticks."
except Exception, e:
print "Can't move forward!"
print e
self.stop()
# Start moving in reverse
def reverse(self,time=200):
self.stop()
try:
self.rr.set_motors(config.__MAX_SPEED__,1,config.__MAX_SPEED__,1)
self.direction = self.Directions.reverse
self.stopTime = time
print "Backing up for " + str(time) + " ticks."
except Exception, e:
print "Can't back up!"
print e
self.stop()
# Turn left
def left(self,time=200):
self.stop()
try:
self.rr.set_motors(config.__MAX_SPEED__,1,config.__MAX_SPEED__,0)
self.direction = self.Directions.left
self.stopTime = time
print "Turning left for " + str(time) + " ticks."
except Exception, e:
print "Can't turn left!"
print e
self.stop()
# Turn right
def right(self,time=200):
self.stop()
try:
self.rr.set_motors(config.__MAX_SPEED__,0,config.__MAX_SPEED__,1)
self.direction = self.Directions.right
self.stopTime = time
print "Turning right for " + str(time) + " ticks."
except Exception, e:
print "Can't turn right!"
print e
self.stop()
# Stop the robot. If we can't, shut it down.
def stop(self):
self.moveTime = 0
self.stopTime = 0
try:
self.rr.stop()
print "Stopped."
except Exception:
print "WARNING: Couldn't stop robot!"
for i in range(0,100):
try:
self.rr.stop()
print "OK: Regained control."
return # We're clear.
except Exception:
pass
# If we've lost control entirely then shut down the system.
os.system("sudo shutdown -h 0 PonderBot lost control of RaspiRobot board.")
# First LED on the board
def led1(self,boolean):
self.rr.set_led1(boolean)
# Second LED on the board
def led2(self,boolean):
self.rr.set_led2(boolean)
# The extra LED sitting in the first open collector output
def led3(self,boolean):
self.rr.set_oc1(boolean)
def kill(self):
print "Killed Raspi Robot Thread."
self.okToRun = False
| 3.1875
| 3
|
dynamic_programming/longest_common_subsequence/test.py
|
Shawn-Ng/algorithms-test
| 0
|
12774491
|
# Recursive, O(2^n)
def LCS(X, Y, m, n):
if m == 0 or n == 0:
return 0
elif X[m - 1] == Y[n - 1]:
return 1 + LCS(X, Y, m - 1, n - 1)
else:
return max(LCS(X, Y, m - 1, n), LCS(X, Y, m, n - 1))
X = "AGGTAB"
Y = "GXTXAYB"
print("Length of LCS is ", LCS(X, Y, len(X), len(Y)))
# Overlapping Substructure, Tabulation, O(mn)
def LCS(X, Y):
m = len(X)
n = len(Y)
L = [[None] * (n + 1) for i in range(m + 1)]
# build L[m+1][n+1] bottom up
# L[i][j] contains length of LCS of X[0..i-1]
# and Y[0..j-1]
for i in range(m + 1):
for j in range(n + 1):
if i == 0 or j == 0:
L[i][j] = 0
elif X[i - 1] == Y[j - 1]:
L[i][j] = L[i - 1][j - 1] + 1
else:
L[i][j] = max(L[i - 1][j], L[i][j - 1])
# L[m][n] contains LCS of X[0..m-1] & Y[0..n-1]
return L[m][n]
X = "ABCDGH"
Y = "AEDFHR"
print("Length of LCS is ", LCS(X, Y))
X = "AGGTAB"
Y = "GXTXAYB"
print("Length of LCS is ", LCS(X, Y))
| 3.515625
| 4
|
day-6/main.py
|
a18antsv/Python-Two-Week-Challenge
| 0
|
12774492
|
<filename>day-6/main.py
import os
import requests
from bs4 import BeautifulSoup
from babel.numbers import format_currency
def get_countries():
countries = []
url = "https://www.iban.com/currency-codes"
request = requests.get(url)
soup = BeautifulSoup(request.text, "html.parser")
table = soup.find("table")
rows = table.find_all("tr")[1:]
for row in rows:
cells = row.find_all("td")
if cells[1].text == "No universal currency":
continue
countries.append({
"name": cells[0].text.capitalize(),
"code": cells[2].text
})
return countries
def print_countries(countries = []):
for index, country in enumerate(countries):
print(f"# {index} {country['name']}")
def ask_country(countries = []):
user_input = input("#: ")
try:
number = int(user_input.strip())
if number in range(0, len(countries)):
return countries[number]
print("Choose a number from the list.")
except ValueError:
print("That wasn't a number.")
return ask_country(countries)
def ask_amount(code_1 = "", code_2 = ""):
user_input = input(f"How many {code_1} do you want to convert to {code_2}?\n")
try:
amount = int(user_input.strip())
if(amount >= 0):
return amount
print("The amount cannot be negative.\n")
except ValueError:
print("That wasn't a number.\n")
return ask_amount(code_1, code_2)
def get_converted_amount(code_1 = "gbp", code_2 = "usd", amount = 50):
url = f"https://transferwise.com/gb/currency-converter/{code_1}-to-{code_2}-rate?amount={amount}"
request = requests.get(url)
soup = BeautifulSoup(request.text, "html.parser")
exchange_rate = float(soup.select_one("h3.cc__source-to-target span.text-success").text)
return amount * exchange_rate
def main():
os.system("clear")
print("Welcome to CurrencyConvert PRO 2000")
countries = get_countries()
print_countries(countries)
print("\nWhere are you from? Choose a country by number.")
country_1 = ask_country(countries)
print(country_1["name"] + "\n")
print("Now choose another country.\n")
country_2 = ask_country(countries)
print(country_2["name"] + "\n")
amount = ask_amount(country_1["code"], country_2["code"])
converted_amount = get_converted_amount(country_1["code"], country_2["code"], amount)
print(
format_currency(amount, country_1["code"], locale="ko_KR") +
" is " +
format_currency(converted_amount, country_2["code"], locale="ko_KR")
)
main()
| 3.71875
| 4
|
qtoggleserver/mppsolar/commands/qpigs.py
|
qtoggle/qtoggleserver-mppsolar
| 1
|
12774493
|
<reponame>qtoggle/qtoggleserver-mppsolar
from .base import Command
class QPIGS(Command):
REQUEST_FMT = 'QPIGS'
RESPONSE_FMT = (
'{grid_voltage:f} '
'{grid_frequency:f} '
'{ac_output_voltage:f} '
'{ac_output_frequency:f} '
'{ac_output_apparent_power:f} '
'{ac_output_active_power:f} '
'{ac_output_load:f} '
'{bus_voltage:f} '
'{battery_voltage:f} '
'{battery_charging_current:f} '
'{battery_state_of_charge:f} '
'{heat_sink_temperature:f} '
'{pv_current:f} '
'{pv_voltage:f} '
'{scc_voltage:f} '
'{battery_discharging_current:f} '
'{has_sbu_priority:b}'
'{is_configuration_status_changed:b}'
'{is_scc_firmware_updated:b}'
'{has_load:b}'
'{is_battery_voltage_too_steady_while_charging:b}'
'{is_battery_charging:b}'
'{is_battery_charging_from_scc:b}'
'{is_battery_charging_from_grid:b}'
)
UNITS = {
'ac_output_active_power': 'W',
'ac_output_apparent_power': 'VA',
'ac_output_frequency': 'Hz',
'ac_output_load': '%',
'ac_output_voltage': 'V',
'battery_charging_current': 'A',
'battery_discharging_current': 'A',
'battery_state_of_charge': '%',
'battery_voltage': 'V',
'battery_voltage_offset_fans': '10mV',
'bus_voltage': 'V',
'grid_frequency': 'Hz',
'grid_voltage': 'V',
'heat_sink_temperature': 'C',
'pv_charging_power': 'W',
'pv_current': 'A',
'pv_power': 'W',
'pv_voltage': 'V',
'scc_voltage': 'V',
}
DISPLAY_NAMES = {
'ac_output_active_power': 'AC Output Active Power',
'ac_output_apparent_power': 'AC Output Apparent Power',
'ac_output_frequency': 'AC Output Frequency',
'ac_output_load': 'AC Output Load',
'ac_output_voltage': 'AC Output Voltage',
'battery_charging_current': 'Battery Charging Current',
'battery_discharging_current': 'Battery Discharging Current',
'battery_state_of_charge': 'Battery State Of Charge',
'battery_voltage': 'Battery Voltage',
'battery_voltage_offset_fans': 'Battery Voltage Offset Fans',
'bus_voltage': 'Bus Voltage',
'eeprom_version': 'EEPROM Version',
'grid_frequency': 'Grid Frequency',
'grid_voltage': 'Grid Voltage',
'has_load': 'Has Load',
'has_sbu_priority': 'Has SBU Priority',
'heat_sink_temperature': 'Heat Sink Temperature',
'is_ac_output_from_grid_or_pv': 'AC Output From Grid/PV',
'is_battery_charging': 'Battery Charging',
'is_battery_charging_from_grid': 'Battery Charging From Grid',
'is_battery_charging_from_scc': 'Battery Charging From SCC',
'is_battery_float_charging': 'Battery Float-charging',
'is_battery_low': 'Battery Low',
'is_battery_present': 'Battery Present',
'is_battery_voltage_too_steady_while_charging': 'Battery Voltage Too Steady While Charging',
'is_configuration_status_changed': 'Configuration Status Changed',
'is_dustproof_installed': 'Dustproof Installed',
'is_grid_present': 'Grid Present',
'is_scc_active': 'SCC Active',
'is_scc_firmware_updated': 'SCC Firmware Updated',
'is_turned_on': 'Turned On',
'pv_charging_power': 'PV Charging Power',
'pv_current': 'PV Current',
'pv_power': 'PV Power',
'pv_voltage': 'PV Voltage',
'scc_voltage': 'SCC Voltage',
}
VIRTUAL_PROPERTIES = {
'pv_power': {
'value': lambda properties: properties.get('pv_current', 0) * properties.get('pv_voltage', 0),
'type': 'float'
},
'is_battery_charging': {
'value': lambda properties: (
properties.get('is_battery_charging_from_grid', False) or
properties.get('is_battery_charging_from_scc', False)
),
'type': 'bool'
},
}
class QPIGS_LV(QPIGS):
RESPONSE_FMT = (
'{grid_voltage:f} '
'{grid_frequency:f} '
'{ac_output_voltage:f} '
'{ac_output_frequency:f} '
'{ac_output_apparent_power:f} '
'{ac_output_active_power:f} '
'{ac_output_load:f} '
'{bus_voltage:f} '
'{battery_voltage:f} '
'{battery_charging_current:f} '
'{battery_state_of_charge:f} '
'{heat_sink_temperature:f} '
'{pv_current:f} '
'{pv_voltage:f} '
'{scc_voltage:f} '
'{battery_discharging_current:f} '
'{is_scc_active:b}'
'{is_battery_charging_from_grid:b}'
'{is_battery_charging_from_scc:b}'
'{is_battery_low:b}'
'{is_battery_present:b}'
'{is_grid_present:b}'
'{has_load:b} '
'{_reserved_1:s} '
'{_reserved_2:s} '
'{pv_power:f} '
'{is_battery_float_charging:b}'
'{is_turned_on:b}'
'{_reserved_3:b}'
)
class QPIGS_GKMK(QPIGS):
RESPONSE_FMT = (
'{grid_voltage:f} '
'{grid_frequency:f} '
'{ac_output_voltage:f} '
'{ac_output_frequency:f} '
'{ac_output_apparent_power:f} '
'{ac_output_active_power:f} '
'{ac_output_load:f} '
'{bus_voltage:f} '
'{battery_voltage:f} '
'{battery_charging_current:f} '
'{battery_state_of_charge:f} '
'{heat_sink_temperature:f} '
'{pv_current:f} '
'{pv_voltage:f} '
'{scc_voltage:f} '
'{battery_discharging_current:f} '
'{is_ac_output_from_grid_or_pv:b}'
'{is_configuration_status_changed:b}'
'{is_scc_firmware_updated:b}'
'{has_load:b}'
'{_reserved_1:b}'
'{is_battery_charging:b}'
'{is_battery_charging_from_scc:b}'
'{is_battery_charging_from_grid:b} '
'{battery_voltage_offset_fans:d} '
'{eeprom_version:d} '
'{pv_charging_power:d} '
'{is_battery_float_charging:b}'
'{is_turned_on:b}'
'{is_dustproof_installed:b}'
)
class QPIGS_MAX(QPIGS):
RESPONSE_FMT = (
'{grid_voltage:f} '
'{grid_frequency:f} '
'{ac_output_voltage:f} '
'{ac_output_frequency:f} '
'{ac_output_apparent_power:f} '
'{ac_output_active_power:f} '
'{ac_output_load:f} '
'{bus_voltage:f} '
'{battery_voltage:f} '
'{battery_charging_current:f} '
'{battery_state_of_charge:f} '
'{heat_sink_temperature:f} '
'{pv_current:f} '
'{pv_voltage:f} '
'{scc_voltage:f} '
'{battery_discharging_current:f} '
'{has_sbu_priority:b}'
'{is_configuration_status_changed:b}'
'{is_scc_firmware_updated:b}'
'{has_load:b}'
'{is_battery_voltage_too_steady_while_charging:b}'
'{is_battery_charging:b}'
'{is_battery_charging_from_scc:b}'
'{is_battery_charging_from_grid:b} '
'{battery_voltage_offset_fans:d} '
'{eeprom_version:d} '
'{pv_charging_power:d} '
'{is_battery_float_charging:b}'
'{is_turned_on:b}'
'{is_dustproof_installed:b}'
)
| 2.25
| 2
|
stadistic_basic/calculoz.py
|
nathramk/stadistic_basic
| 0
|
12774494
|
class CalculoZ():
def calcular_z(self, n1, n2, x, y, ux, uy, ox, oy):
arriba = (x-y)-(ux-uy)
abajo = (((ox)**2/(n1))+((oy)**2/(n2)))**0.5
z = arriba/abajo
return z
| 3.0625
| 3
|
setup.py
|
kororo/docker-template
| 2
|
12774495
|
<filename>setup.py
import os
from setuptools import setup
pkg = 'docker-template'
def get_requirements(r: str):
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
# parse_requirements() returns generator of pip.req.InstallRequirement objects
install_reqs = parse_requirements(r, session=pkg)
return install_reqs
rf = os.path.join('requirements.txt')
rs = [str(ir.req) for ir in get_requirements(rf)]
setup(
name=pkg,
packages=['cli'],
version='0.2',
description='',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/kororo/docker-template',
classifiers=[],
install_requires=[rs],
scripts=['cli/docker-template']
)
| 1.859375
| 2
|
setup.py
|
chrisbrake/docser
| 0
|
12774496
|
import versioneer
from setuptools import setup
with open('README.rst', 'r') as fh:
long_description = fh.read()
with open('requirements.txt') as fh:
requirements = fh.readlines()
setup(
name='docser',
packages=['docser'],
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='A simple server for hosting Sphinx documentation',
long_description=long_description,
long_description_content_type='text/x-rst',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/chrisbrake/docser',
keywords=['docser', 'Sphinx', 'documentation'],
classifiers=[
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
],
install_requires=requirements
)
| 1.367188
| 1
|
skill/tests.py
|
ngr/sm_00
| 0
|
12774497
|
<reponame>ngr/sm_00
from django.test import TestCase
from django.utils import timezone
from django.db import connection
#from django.core.urlresolvers import reverse
import datetime
from random import random, randrange
from skill.models import Skill, SkillTrained
from slave.models import Slave, SlaveManager, RaceDefaults
from slave.settings import *
def push_base_skill():
cursor = connection.cursor()
cursor.execute("""INSERT INTO `dj_sm_00`.`skill_skill`
( `id` , `name` , `primary_attribute` , `difficulty` )
VALUES ( NULL , 'Learning1', '0', '1' );""")
print("Executed raw SQL to create learning skill")
return
def get_current_db_skills():
cursor = connection.cursor()
cursor.execute("SELECT * FROM `dj_sm_00`.`skill_skill` WHERE 1;")
for row in cursor.fetchone():
print("Row:", row)
return row
def create_skill(name='<NAME>', pr_attr=0, difficulty=1):
""" Creates a skill """
# push_base_skill()
# print(get_current_db_skills())
# print("Skill obj all() get():", Skill.objects.all().get())
s = Skill(name=name, primary_attribute=pr_attr, difficulty=difficulty)
s.save()
# print(get_current_db_skills())
# s.required_skill.add(bs)
# print(s)
return s
def create_slave(name='Slave', age=1):
""" Creates a Slave with dayas_delta age """
birth_time = timezone.now() - datetime.timedelta(seconds=(GAME_YEAR * age))
sl = Slave.objects.spawn(name=name, date_birth=birth_time)
# sl = Slave.objects.spawn()
# print("create_slave() created slave", sl)
return sl
class SkillManagerTests(TestCase):
""" Tests for SkillTrained models. It represents the level
of skill trained for each Slave. """
def test_set_st_available_skill(self):
sl = create_slave()
print("Created test slave", sl)
sk1 = create_skill()
print("Created test skill", sk1)
SkillTrained.objects.set_st(sl, sk1, 3)
print("Created a connection")
st = SkillTrained.objects.filter(slave=sl)
self.assertEqual(SkillTrained.objects.filter(slave=sl, skill=sk1).get().exp, 3)
def test_set_st_with_skill_101(self):
""" MaxValue is 100. Should return None """
sl = create_slave()
sk1 = create_skill()
self.assertEqual(SkillTrained.objects.set_st(sl, sk1, 101), None)
#####################
### FIX
def test_create_st_unavailable_skill(self):
sl = create_slave()
print("Created test slave", sl)
sk1 = create_skill()
print("Created test skill", sk1)
sk2 = create_skill()
sk2.required_skills.add(sk1)
# Нихера не работает. должно фейлить, а оно пускает.
# Тесты эти автоматические с базой работают через жопу.
SkillTrained.objects.set_st(sl, sk2, 3)
## END OF FIX
###########################
def test_get_skill_level_with_skill_ok(self):
""" Should return skill level as defined """
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 6)
st = SkillTrained.objects.filter(slave=sl)
self.assertEqual(SkillTrained.objects.get_skill_level(sl, sk1), 3)
def test_get_skill_level_with_skill_zero(self):
""" Should return zero level """
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 0)
st = SkillTrained.objects.filter(slave=sl)
print("get_skill_level() for ZERO skill:", SkillTrained.objects.get_skill_level(sl, sk1))
self.assertEqual(SkillTrained.objects.get_skill_level(sl, sk1), 0)
def test_get_skill_level_with_skill_not_trained(self):
""" Should return zero level """
sl = create_slave()
sk1 = create_skill()
print("get_skill_level() for NOT trained skill:", SkillTrained.objects.get_skill_level(sl, sk1))
self.assertEqual(SkillTrained.objects.get_skill_level(sl, sk1), 0)
############################
# Skill usage
def test_use_skill_return_type(self):
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 50)
result = SkillTrained.objects.use_skill(sl, sk1)
print("Result:", result)
self.assertIn(result, [True, False])
def test_use_skill_bonus_max(self):
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 50)
result = SkillTrained.objects.use_skill(sl, sk1, 1)
print("Result:", result)
self.assertTrue(result)
def test_use_skill_bonus_min(self):
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 50)
result = SkillTrained.objects.use_skill(sl, sk1, -1)
print("Result:", result)
self.assertFalse(result)
def test_use_skill_invalid_bonus_big(self):
""" Catches invalid bonus """
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 50)
with self.assertRaises(Exception) as cm:
SkillTrained.objects.use_skill(sl, sk1, 5)
self.assertEqual(type(cm.exception), AttributeError)
def test_use_skill_invalid_bonus_small(self):
""" Catches invalid bonus """
sl = create_slave()
sk1 = create_skill()
SkillTrained.objects.set_st(sl, sk1, 50)
with self.assertRaises(Exception) as cm:
SkillTrained.objects.use_skill(sl, sk1, -1.1)
self.assertEqual(type(cm.exception), AttributeError)
#Create your tests here.
| 2.875
| 3
|
sundial2.py
|
WillBickerstaff/sundial
| 1
|
12774498
|
import sys, itertools, textwrap, os
ltrs, minlen, wd = (sys.argv[1].lower(), int(sys.argv[2]), set())
dictwords = set(l.lower().strip() for l in open('/usr/share/dict/words') if l.strip() >= minlen and ltrs[0] in l.lower() and "'" not in l)
for i in range(minlen, len(ltrs) + 1): wd = wd | set(dictwords & set(''.join(l) for l in itertools.permutations(ltrs, i)))
for l in textwrap.wrap(' '.join([x for x in sorted(wd, key=lambda x: len(x))]), int(os.popen('stty size', 'r').read().split()[1])): print l
| 2.9375
| 3
|
Integration/Deployer/deployer.py
|
gaurav-kc/IOT_Platform
| 3
|
12774499
|
import flask
import threading
import requests
import json
import sshclient
import deployer_helper
from flask import request
from pathlib import Path
def req_handler(app,port):
@app.route('/deployment/dodeploy', methods=['POST'])
def dodeploy():
try :
req = request.get_json()
print(req)
ip = req["serverip"]
sshport = req['sshPort']
machine_username = req['machineusername']
machine_password = req['password']
serviceid = req['serviceid']
username = req['username']
application_name = req['applicationname']
service_name = req['servicename']
if username != 'admin':
config_path = '/userservice/'+ username + '/' + application_name + '/config.json'
filename = deployer_helper.getFileName(config_path, service_name)
smres = deployer_helper.getSensorTopic(username,application_name,service_name,serviceid,config_path)
deployer_helper.notifyActionManager(username,application_name,service_name,serviceid,config_path,smres['sensor_host'])
sensortopic = smres['temporary_topic']
print("Returned Sensor topic by sensor manager is ",sensortopic)
deployer_helper.generateDokerFile(config_path, service_name, sensortopic, serviceid)
file_path = '/userservice/'+username + '/' + application_name + '/' + service_name + '/' + filename
else:
filename = service_name + '.py'
file_path = '/userservice/bootstrap/init/' + service_name +'/' + filename
sensortopic = "None"
print("file path : ",file_path)
containerid = sshclient.deployService(username, machine_username, machine_password,ip,port,serviceid,service_name,file_path, filename,sensortopic)
containerid = containerid[:-1]
URL = "http://localhost:8080/servicelcm/service/deploymentStatus"
req = {
'serviceId' : serviceid,
'username' : username,
'serviceName' : service_name,
'status' : 'success',
'ip' : ip,
'port' : 55555,
'containerId' : containerid,
'applicationName' : application_name
}
print(req)
requests.post(url = URL, json = req)
except Exception as error:
print("Error ",error)
URL = "http://localhost:8080/servicelcm/service/deploymentStatus"
req = {
'serviceId' : serviceid,
'username' : username,
'serviceName' : service_name,
'status' : 'success',
'ip' : ip,
'port' : 55555,
'containerId' : containerid,
'applicationName' : application_name
}
requests.post(url = URL, json = req)
res = {'status' : 'ok'}
return flask.jsonify(res)
app.run(host = '0.0.0.0',port = port)
def main():
app = flask.Flask('Deoployment Manger')
port = 8888 #deployer port
req_t = threading.Thread(target = req_handler, args = (app,port))
req_t.start()
req_t.join()
return
if __name__ == '__main__':
main()
| 2.25
| 2
|
examples/unpack.py
|
ubirch/ubirch-protocol-python
| 3
|
12774500
|
import binascii
import sys
from uuid import UUID
import msgpack
signed = 0x02
chained = 0x03
usage = " usage:\n" \
" python3 unpack.py [ <binary-file-name> | <UPP(hex)> | <UPP(base64)> ]"
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
upp = b''
arg = sys.argv[1]
# try to get UPP from binary file
try:
with open(arg, "rb") as f:
upp = f.read()
except OSError:
pass
if not upp:
# try to parse argument as hex string representation of UPP
try:
upp = binascii.unhexlify(arg)
except binascii.Error:
pass
if not upp:
# try to parse argument as base64 string representation of UPP
try:
upp = binascii.a2b_base64(arg)
except Exception:
print("unable to parse UPP from argument: \"{}\"".format(arg))
print(usage)
sys.exit(1)
if not (upp[0] == 0x95 or upp[0] == 0x96):
print("invalid UPP")
print(usage)
sys.exit(1)
# unpack msgpack formatted UPP
if upp[1] >> 4 == 2: # version 2
unpacked = msgpack.unpackb(upp)
elif upp[1] >> 4 == 1: # version 1 (legacy)
unpacked = msgpack.unpackb(upp, raw=True)
else:
print("unsupported UPP version")
print(usage)
sys.exit(1)
print(" hex: {}".format(binascii.hexlify(upp).decode()))
print("base64: {}".format(binascii.b2a_base64(upp).decode()))
version = unpacked[0]
print("- Version: 0x{:02x}".format(version))
uuid = UUID(binascii.hexlify(unpacked[1]).decode())
print("- UUID: {}".format(str(uuid)))
if version & 0x0F == chained:
prev_sign = unpacked[2]
print("- prev.Sign.: {}".format(binascii.b2a_base64(prev_sign, newline=False).decode()))
print(" [hex]: {:s} ({:d} bytes)".format(binascii.hexlify(prev_sign).decode(), len(prev_sign)))
payload_type = unpacked[-3]
print("- Type: 0x{:02x}".format(payload_type))
payload = unpacked[-2]
if type(payload) is bytes:
print("- Payload: {:s}".format(binascii.b2a_base64(payload, newline=False).decode()))
print(" [hex]: {:s} ({:d} bytes)".format(binascii.hexlify(payload).decode(), len(payload)))
else:
print("- Payload: {:s}".format(repr(payload)))
signature = unpacked[-1]
print("- Signature: {:s}".format(binascii.b2a_base64(signature, newline=False).decode()))
print(" [hex]: {:s} ({:d} bytes)".format(binascii.hexlify(signature).decode(), len(signature)))
| 2.953125
| 3
|
elram/repository/commands.py
|
Bgeninatti/elram
| 0
|
12774501
|
import logging
from elram.config import load_config
from elram.repository.models import User, database, Event, Attendance, Account, Transaction
CONFIG = load_config()
logger = logging.getLogger('main')
def populate_db(data):
models_mapping = {
'users': User,
'accounts': Account,
}
for model_key, model_data in data.items():
model_class = models_mapping.get(model_key)
if model_class is None:
logger.error('No model class found', extra={'model_key': model_key})
continue
models = (model_class(**data) for data in model_data)
model_class.bulk_create(models)
logger.info(
'Records created',
extra={'model': model_class.__name__, 'records': len(model_data)},
)
def init_db(db_name, user, password, host, port):
database.init(database=db_name, user=user, password=password, host=host, port=port)
database.connect()
database.create_tables([User, Event, Attendance, Account, Transaction])
return database
| 2.46875
| 2
|
IDSRunmodeVerify.py
|
PhilSchroeder/IDSDeathBlossom
| 19
|
12774502
|
# -*- coding: utf-8 -*-
#*************************************************************
# Copyright (c) 2003-2012, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# * Neither the name of the nor the names of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#*************************************************************
from IDSUtils import *
from IDSMail import *
import time
from IDSRunmodeCompare import *
from IDSLogging import *
class RunmodeVerify:
#Given a list of pcaps and sids that we should see fire, validate.
#TODO: auto-generate this list from the other modes
#TODO: Support counts for events something like foo.pcap: [2999:10,433:2,1563:1]
def verify(self):
import yaml
failcnt = 0
successcnt = 0
resultsdict = {}
#there is an inheritence problem here this is a nasty hack to set and unset self.fast.log
#newfastlogorig = self.newfastlog
#perflogorig = self.perflog
f = open(self.Runmode.conf["verifyconf"])
try:
pcapMap = yaml.load(f)
except:
p_error("%s:Verify yaml conf mapping failed %s" % (str(whoami()), self.Runmode.conf["verifyconf"]))
sys.exit(-10)
f.close()
for key in pcapMap['pcaps']:
pcap = "%s/%s" % (pcapMap['pcapdir'], key)
alertdict = self.deepDefaultDict()
#for a perl programmer used to HoH's multi-dimensional dicts are very frustrating. Here we store a list because simply checking against a value using a
#multi-dimensinonal dict creates a key? wtf? FIXME
alertlist = []
if os.path.isfile(pcap):
resultsdict[key] = "PASS"
else:
p_warn("%s:Failed to find file %s\n" % (str(whoami()),pcap))
if self.run_ids(pcap, "no") == 0:
if os.path.exists(self.newfastlog):
#self.parse_fast(alertdict, self.newfastlog, self.mode)
self.IDSRunmodeCompare.parse_fast(alertdict, self.newfastlog, self.mode)
alertlist = alertdict.keys()
p_debug(self.newfastlog)
p_debug(str(alertlist))
else:
p_error("%s: failed to find alert log file %s\n" % (str(whoami()), str(self.newfastlog)))
sys.exit(-1)
for sid in pcapMap['pcaps'][key]:
if str(sid) not in alertlist and resultsdict[key] == "PASS":
#print "FAIL:sid %s not found in %s" % (sid, str(alertlist))
resultsdict[key] = "FAIL:sid %s not found in %s" % (sid, pcap)
failcnt += 1
self.newfastlog = newfastlogorig
self.perflog = perflogorig
if resultsdict[key] == "PASS":
#print "PASS:sid %s found in %s" % (sid, pcap)
successcnt += 1
else:
resultsdict[key] = "FAIL:IDS failure\n"
failcnt += 1
reportfile = "%s/verify-report-%s.txt" % (self.Runmode.conf["globallogdir"], str(self.currentts))
report = open(reportfile, 'w')
p_info("verify results\n")
report.write("verify results\n")
for key,value in resultsdict.iteritems():
p_info("%s:%s" % (key,value))
report.write("%s:%s\n" % (key,value))
p_info("successcnt:%i\n" % successcnt)
report.write("successcnt:%i\n" % successcnt)
p_info("failcnt:%i\n" % failcnt)
report.write("failcnt:%i\n" % failcnt)
report.close()
def verify2(self):
import yaml
failcnt = 0
successcnt = 0
resultsdict = {}
#there is an inheritence problem here this is a nasty hack to set and unset self.fast.log
#if self.newfastlog:
# newfastlogorig = self.newfastlog
#perflogorig = self.perflog
f = open(self.Runmode.conf["verifyconf"])
try:
testMap = yaml.load(f)
except:
p_error("%s:Verify yaml conf mapping failed %s" % (str(whoami()), self.Runmode.conf["verifyconf"]))
sys.exit(-10)
f.close()
for testid in testMap:
pcap = "%s/%s" % (testMap[testid]['pcapdir'],testMap[testid]['pcap'])
rules = "%s/%s" % (testMap[testid]['ruledir'],testMap[testid]['rulefile'])
alert_opt_regex = re.compile(r"\s*(?P<sid>\d+)\s*(?P<operator>(=|>=|<=|>|<|\!=))\s*(?P<count>\d+)\s*")
#parse_fast will return [sid][mode]
alertdict = recursivedefaultdict()
resultsdict[testid] = "PASS"
#for a perl programmer used to HoH's multi-dimensional dicts are very frustrating. Here we store a list because simply checking against a value using a
#multi-dimensinonal dict creates a key? wtf? FIXME
if not os.path.isfile(pcap) and not os.path.isfile(rules):
resultsdict[testid] = "FAIL"
p_info("FAIL:Failed to find pcap:%s or rules file:%\n" % (pcap,rules))
failcnt += 1
next
self.Runmode.conf["usecustomrules"] = True
self.conf["customrules"] = rules
if self.conf.has_key("configtpl"):
self.useTemplateConfig()
else:
p_error("Verification Runmode must have a config template supplied for the engine bailing")
sys.exit(1)
if self.run_ids(pcap, "no") == 0:
if os.path.exists(self.newfastlog):
self.parse_fast(alertdict, self.newfastlog, self.mode)
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:Failed to find alert log file %s\n" % (str(whoami()), str(self.newfastlog)))
failcnt += 1
next
for match in alert_opt_regex.finditer(testMap[testid]['alerts']):
if alertdict.has_key(match.group('sid')):
#print alertdict[match.group('sid')]
#print alertdict[match.group('sid')][self.mode]
#print match.group("count")
if match.group('operator') == "=":
p_warn("equal operator found")
if int(match.group("count")) == int(alertdict[match.group('sid')][self.mode]):
resultsdict[testid] = "PASS"
p_info("PASS:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
next
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
elif match.group('operator') == "<":
if int(match.group("count")) < int(alertdict[match.group('sid')][self.mode]):
resultsdict[testid] = "PASS"
p_info("PASS:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
next
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
elif match.group('operator') == ">":
if int(match.group("count")) > int(alertdict[match.group('sid')][self.mode]):
resultsdict[testid] = "PASS"
p_info("PASS:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
next
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
elif match.group('operator') == "<=":
if int(match.group("count")) <= int(alertdict[match.group('sid')][self.mode]):
resultsdict[testid] = "PASS"
p_info("PASS:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
elif match.group('operator') == ">=":
if int(match.group("count")) >= int(alertdict[match.group('sid')][self.mode]):
resultsdict[testid] = "PASS"
p_info("PASS:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
next
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
elif match.group('operator') == "!=":
if int(match.group("count")) != int(alertdict[match.group('sid')][self.mode]):
resultsdict[testid] = "PASS"
p_info("PASS:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
next
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:sid %s found %s in %s" % (match.group('sid'), alertdict[match.group('sid')][self.mode] , pcap))
break
else:
resulstsdict[testid] = "FAIL"
p_info("FAIL:sid %s not found in %s" % (sid, pcap))
break
elif int(match.group("count")) == 0:
resultsdict[testid] = "PASS"
p_info("PASS:sid %s not found in pcap %s but expected" % (match.group('sid'), pcap))
next
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:IDS Failure")
break;
else:
resultsdict[testid] = "FAIL"
p_info("FAIL:IDS Failure")
break
if resultsdict[testid] == "PASS":
successcnt += 1
else:
failcnt += 1
reportfile = "%s/verify-report-%s.txt" % (self.Runmode.conf["globallogdir"], str(self.currentts))
report = open(reportfile, 'w')
p_info("verify results")
report.write("verify results\n")
for key,value in resultsdict.iteritems():
p_info("%s:%s" % (key,value))
report.write("%s:%s\n" % (key,value))
p_info("successcnt:%i" % successcnt)
report.write("successcnt:%i\n" % successcnt)
p_info("failcnt:%i" % failcnt)
report.write("failcnt:%i\n" % failcnt)
for key,value in resultsdict.iteritems():
print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
print "%s:%s" % (key,value)
print "description: %s" % (testMap[key]["description"])
print "notes: %s" % (testMap[key]["notes"])
print "behavior: %s" % (testMap[key]["behavior"])
print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
report.close()
| 1.070313
| 1
|
plate_alpr/plate_ocr.py
|
alisson-moura/plate_control
| 1
|
12774503
|
from openalpr import Alpr
import re
import os
class Plate:
def __init__(self):
self.alpr = Alpr("eu","/etc/openalpr/conf", "/usr/share/openalpr/runtime_data")
if not self.alpr.is_loaded():
print("Erro ao carregar o ALPR..")
sys.exit(1)
self.alpr.set_top_n(10)
self.alpr.set_default_region("")
def plate_ocr(self, placa):
results = self.alpr.recognize_file(placa)
i = 0
plate = ""
for plate in results['results']:
i += 1
for candidate in plate['candidates']:
if candidate ['matches_template']:
prefix = "*"
teste = candidate['plate']
x = re.search('^[A-Z]{3}[0-9]{1}[A-Z]{1}[0-9]{2}', teste)
if (x):
plate = candidate['plate']
#return plate
break
self.alpr.unload()
if(plate != ""):
print(plate)
return plate
#placa = Plate()
#placa01 = placa.plate_ocr('/home/pi/Pictures/Mercosul/img01.jpeg')
#print(placa01)
| 2.671875
| 3
|
swarmlib/abc/bees/onlooker_bee.py
|
alxfmpl/swarmlib
| 221
|
12774504
|
<filename>swarmlib/abc/bees/onlooker_bee.py
# ------------------------------------------------------------------------------------------------------
# Copyright (c) <NAME>. All rights reserved.
# Licensed under the BSD 3-Clause License. See LICENSE.txt in the project root for license information.
# ------------------------------------------------------------------------------------------------------
from typing import Tuple
from .bee_base import BeeBase
class OnlookerBee(BeeBase):
def explore(self, starting_position: Tuple[float, float], start_value: float) -> None:
"""
Explore new food sources from the given one
Args:
starting_position ([type]): [description]
start_value (float): [description]
"""
self._explore(starting_position, start_value)
| 2.703125
| 3
|
django_teams/models.py
|
SumedhWalujkar/django_teams
| 0
|
12774505
|
# This is where the models go!
from django.db import models
from django.urls import reverse
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
# Using the user: user = models.ForeignKey(settings.AUTH_USER_MODEL)
CurrentUser = None
CurrentTeam = None
class Team(models.Model):
users = models.ManyToManyField(settings.AUTH_USER_MODEL,
blank=True,
through='django_teams.TeamStatus',
related_name='team_member')
name = models.CharField(max_length=255)
private = models.BooleanField(default=False)
description = models.TextField(null=True, blank=True)
def get_absolute_url(self):
return reverse('team-detail', kwargs={'pk': self.pk})
def __str__(self):
return self.name
def add_user(self, user, team_role=1):
TeamStatus(user=user, team=self, role=team_role).save()
def approve_user(self, user):
ts = TeamStatus.objects.get(user=user, team=self)
if ts.role == 1:
ts.role = 10
ts.save()
def approved_objects(self):
return Ownership.objects.select_related('team').filter(team=self, approved=True)
@staticmethod
def get_current_team():
if CurrentTeam is not None:
return CurrentTeam
return None
class TeamStatus(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
team = models.ForeignKey('django_teams.Team', on_delete=models.DO_NOTHING)
comment = models.CharField(max_length=255, default='', null=True, blank=True)
TEAM_ROLES = (
(1, 'Requesting Access'),
(10, 'Team Member'),
(20, 'Team Leader'),
)
role = models.IntegerField(choices=TEAM_ROLES)
def approve(self):
self.role = 10
self.save()
def __str__(self):
return "%s requesting to join %s" % (self.user.__unicode__(), self.team.__unicode__())
class Ownership(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
approved = models.BooleanField(default=False)
team = models.ForeignKey('django_teams.Team', on_delete=models.DO_NOTHING)
@staticmethod
def check_permission(item):
content_type = ContentType.objects.get_for_model(item)
res = Ownership.objects.filter(team=Team.get_current_team(), content_type=content_type, object_id=item.id)
return len(res) > 0
@staticmethod
def grant_ownership(team, item):
content_type = ContentType.objects.get_for_model(item)
res = Ownership.objects.get_or_create(team=team, content_type=content_type, object_id=item.id)
if res[1]:
res[0].save()
| 2.109375
| 2
|
tests/test_09_0_openpyxl.py
|
simkimsia/ug-read-write-excel-using-python
| 1
|
12774506
|
from openpyxl.styles import colors, Font
from examples.c09_0_font_styles.openpyxl import index
from openpyxl import Workbook
from base_test_cases import ExcelTest
class TestOpenPyXLFontStyles(ExcelTest):
def test_font_color(self):
wb = Workbook()
ws = wb.active
a1 = ws['A1']
a1_font = a1.font
default_color = colors.Color(
indexed=None, type='theme', rgb=None, tint=0.0, theme=1, auto=None)
self.assertEqual(a1_font.color, default_color)
a1.font = Font(color="FF000000")
self.assertEqual(a1.font.color.rgb, "FF000000")
a1 = index.set_font_color_red(wb)
self.assertEqual(a1.font.color.rgb, colors.RED)
def test_font_size(self):
wb = Workbook()
ws = wb.active
a1 = ws['A1']
a1_font = a1.font
default_size = 11
self.assertEqual(a1_font.size, default_size)
new_size = 20
a1 = index.set_font_size(wb, new_size)
self.assertEqual(a1.font.size, new_size)
def test_font_style(self):
wb = Workbook()
ws = wb.active
a1 = ws['A1']
a1_font = a1.font
default_bold = False
default_italic = False
default_underline = None
default_style = 'Calibri'
self.assertEqual(a1_font.bold, default_bold)
self.assertEqual(a1_font.italic, default_italic)
self.assertEqual(a1_font.underline, default_underline)
self.assertEqual(a1_font.name, default_style)
new_style = "Helvetica"
a1 = index.set_font_style(wb, new_style)
self.assertEqual(a1.font.name, new_style)
self.assertTrue(a1.font.bold)
self.assertTrue(a1.font.italic)
four_kinds_of_underlines = [
'single', 'singleAccounting',
'double', 'doubleAccounting']
self.assertEqual(a1.font.underline, four_kinds_of_underlines[0])
| 2.796875
| 3
|
examples/qm7/qm7b_tf_model.py
|
ozgurozkan123/deepchem
| 14
|
12774507
|
"""
Script that trains Tensorflow singletask models on QM7 dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import deepchem as dc
import numpy as np
from qm7_datasets import load_qm7b_from_mat
np.random.seed(123)
qm7_tasks, datasets, transformers = load_qm7b_from_mat(split='stratified')
train_dataset, valid_dataset, test_dataset = datasets
fit_transformers = [dc.trans.CoulombFitTransformer(train_dataset)]
regression_metric = [dc.metrics.Metric(dc.metrics.mean_absolute_error, mode="regression"),
dc.metrics.Metric(dc.metrics.pearson_r2_score, mode="regression")]
model = dc.models.TensorflowMultiTaskFitTransformRegressor(
n_tasks=len(qm7_tasks), n_features=[23, 23], learning_rate=0.001 , momentum=.8, batch_size=25,
weight_init_stddevs=[1/np.sqrt(400),1/np.sqrt(100),1/np.sqrt(100)],
bias_init_consts=[0.,0.,0.], layer_sizes=[400,100,100],
dropouts=[0.01,0.01,0.01], fit_transformers=fit_transformers, n_evals=10, seed=123)
# Fit trained model
model.fit(train_dataset, nb_epoch=50)
model.save()
train_scores = model.evaluate(train_dataset, regression_metric, transformers)
print("Train scores [kcal/mol]")
print(train_scores)
valid_scores = model.evaluate(valid_dataset, regression_metric, transformers)
print("Valid scores [kcal/mol]")
print(valid_scores)
test_scores = model.evaluate(test_dataset, regression_metric, transformers)
print("Test scores [kcal/mol]")
print(test_scores)
| 2.375
| 2
|
main.py
|
dtmcdona/DMAutomate
| 0
|
12774508
|
<reponame>dtmcdona/DMAutomate<gh_stars>0
import macrofilecontroller
import macromenuview
class App:
def __init__(self):
self.action = "display"
self.running = True
self.currentfile = "dm_macro.py"
def run_app(self):
view = macromenuview.MacroView()
controller = macrofilecontroller.MacroController()
self.action = view.display_menu()
print("Action is: "+self.action)
while self.running:
if self.action == "create":
print("Would you like to save your current macro before loading? (y/n)")
user_input = input().lower()
if user_input == 'y' or user_input == 'yes':
print("What do you want to name it? (not including .py ext)")
filename_input = input()
controller.save(filename_input)
controller.create_macro()
else:
controller.create_macro()
elif self.action == "open":
print("Would you like to save your current macro before loading? (y/n)")
user_input = input().lower()
if user_input == 'y' or user_input == 'yes':
print("What do you want to name it? (not including .py ext)")
filename_input = str(input())
controller.save_macro(filename_input)
pyfiles = view.display_list()
print('| Which index would you like to open? |')
user_input = int(input())
filename = pyfiles[user_input]
controller.open_macro(filename)
elif self.action == "edit":
pyfiles = view.display_list()
print('| Which index would you like to open? |')
user_input = int(input())
filename = str(pyfiles[user_input])
view.display_file(filename)
view.insert_input(filename)
elif self.action == "save":
print("What do you want to name it? (not including .py ext)")
filename_input = str(input())
controller.save_macro(filename_input)
elif self.action == "delete":
view.delete_input()
elif self.action == "insert":
view.display_file(self.currentfile)
view.insert_input(self.currentfile)
elif self.action == "record":
print("Recording... (press 'Esc' to exit to main menu)")
if not controller.listeners:
controller.activate_listeners()
controller.running = True
while controller.running:
pass
elif self.action == "play":
controller.play()
elif self.action == "view":
pyfiles = view.display_list()
print('| Which index would you like to view? |')
user_input = int(input())
filename = pyfiles[user_input]
view.display_file(filename)
elif self.action == "settings":
print('| Would you like random time intervals added between actions? (y/n)|')
user_input = input().lower()
if user_input == 'y' or user_input == 'yes':
controller.randomEnabled = True
self.action = view.display_menu()
print("Action is: " + self.action)
run = App()
App().run_app()
| 3.28125
| 3
|
Example/1-example-pi-pulse.py
|
smartmzl/Quanlse
| 0
|
12774509
|
<reponame>smartmzl/Quanlse
#!/usr/bin/python3
# -*- coding: utf8 -*-
# Copyright (c) 2021 Baidu, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Example: Pi pulse
Please visit https://quanlse.baidu.com/#/doc/tutorial-pi-pulse for more details about this example.
"""
from numpy import round
from math import pi, sqrt
from Quanlse.Utils import Hamiltonian as qham
from Quanlse.Utils import Operator
# Sampling period.
dt = 0.2
# Number of qubit(s).
qubits = 1
# System energy level.
level = 2
# Gate duration time.
tg = 20
# --------------------------------
# Construct the system Hamiltonian
# --------------------------------
# Create the Hamiltonian.
ham = qham.createHam(title="1q-2l", dt=dt, qubitNum=qubits, sysLevel=level)
# Add the control term(s).
qham.addControl(ham, name="q0-ctrlx", onQubits=0, matrices=Operator.driveX(level))
# Add Pi pulse wave(s).
# `amp` is calculated from the integral of Gaussian function.
amp = pi / (tg / 8) / sqrt(2 * pi)
qham.addWave(ham, "q0-ctrlx", f="gaussian", t0=0, t=tg, para={"a": amp, "tau": tg / 2, "sigma": tg / 8})
# Print the basic information of Hamiltonian.
qham.printHam(ham)
# Simulate the evolution and print the result.
unitary = qham.getUnitary(ham)
print("Evolution unitary:\n", round(unitary, 2))
# Print the waveform.
qham.plotWaves(ham)
| 2.625
| 3
|
girderformindlogger/models/aes_encrypt.py
|
jj105/mindlogger-app-backend
| 0
|
12774510
|
<gh_stars>0
# -*- coding: utf-8 -*-
import copy
import datetime
import json
import os
import six
import cherrypy
from bson.objectid import ObjectId
from girderformindlogger.constants import AccessType
from girderformindlogger.exceptions import ValidationException, GirderException
from girderformindlogger.models.model_base import AccessControlledModel, Model
from girderformindlogger.utility.model_importer import ModelImporter
from girderformindlogger.utility.progress import noProgress, setResponseTimeLimit
from girderformindlogger import events
from bson import json_util
from Cryptodome.Cipher import AES
import random
import string
class AESEncryption(AccessControlledModel):
"""
This model is used for encrypting fields using AES
"""
def __init__(self):
self.fields = []
super(AESEncryption, self).__init__()
def initAES(self, fields=[]):
self.AES_KEY = cherrypy.config['aes_key'] if 'aes_key' in cherrypy.config else b'<KEY>'
self.fields = fields
self.maxCount = 4
# basic function for aes-encryption
def encrypt(self, data, maxLength):
length = len(data)
if length < maxLength:
# insert other characters at the end of text so that length of text won't be detected
data = data + random.choice(string.ascii_letters+string.digits) * (maxLength - len(data))
data = data + '%0{}d'.format(self.maxCount) % length
cipher = AES.new(self.AES_KEY, AES.MODE_EAX)
ciphertext, tag = cipher.encrypt_and_digest(data.encode("utf-8"))
return ciphertext + cipher.nonce + tag
# basic function for aes-decryption
def decrypt(self, data):
try:
cipher = AES.new(self.AES_KEY, AES.MODE_EAX, nonce=data[-32:-16])
plaintext = cipher.decrypt(data[:-32])
cipher.verify(data[-16:])
txt = plaintext.decode('utf-8')
length = int(txt[-self.maxCount: ])
return ('ok', txt[:length])
except:
return ('error', None)
def navigate(self, document, path):
current = document
for node in path:
if node not in current or not isinstance(current[node], dict):
return None
current = current[node]
return current
# encrypt selected fields using AES
def encryptFields(self, document, fields):
if not document:
return document
for field in fields:
path = field[0].split('.')
key = path.pop()
data = self.navigate(document, path)
if data and data.get(key, None) and isinstance(data[key], str):
encrypted = self.encrypt(data[key], field[1])
data[key] = encrypted
return document
# decrypt selected fields using AES
def decryptFields(self, document, fields):
if not document:
return document
for field in fields:
path = field[0].split('.')
key = path.pop()
data = self.navigate(document, path)
if data and data.get(key, None) and isinstance(data[key], bytes):
status, decrypted = self.decrypt(data[key])
if status == 'ok':
data[key] = decrypted
return document
# overwrite functions which save data in mongodb
def save(self, document, validate=True, triggerEvents=True):
if validate and triggerEvents:
event = events.trigger('.'.join(('model', self.name, 'validate')), document)
if event.defaultPrevented:
validate = False
if validate:
document = self.validate(document)
self.encryptFields(document, self.fields)
return self.decryptFields(super().save(document, False, triggerEvents), self.fields)
def find(self, *args, **kwargs):
documents = list(super().find(*args, **kwargs))
for document in documents:
self.decryptFields(document, self.fields)
return documents
def findOne(self, *args, **kwargs):
document = super().findOne(*args, **kwargs)
self.decryptFields(document, self.fields)
return document
| 2.359375
| 2
|
tests/test_dong_code.py
|
nhlsm/PyDataGoKr
| 2
|
12774511
|
import unittest
from data_go_kr.utils.dong_code import *
class Test0(unittest.TestCase):
"""
Test that the result sum of all numbers
"""
@classmethod
def setUpClass(cls):
# debug
LOG_FORMAT = '%(pathname)s:%(lineno)03d - %(message)s'
# LOG_LEVEL = logging.DEBUG # DEBUG(10), INFO(20), (0~50)
LOG_LEVEL = logging.INFO # DEBUG(10), INFO(20), (0~50)
logging.basicConfig(format=LOG_FORMAT, level=LOG_LEVEL, stream=sys.stdout)
def test_0(self):
# logging.info('dtypes: %s', LAWD_CODE.dtypes)
# logging.info('key: %s', dong_code.LAWD_CODE )
pass
def test_1(self):
# x = LAWD_CODE.query(' 법정동명.str.contains("서울") and 폐지여부=="존재" ')
# logging.info('key: %s', x )
# x = LAWD_CODE.query(' 법정동명.str.contains("인천광역시 미추홀구") and 폐지여부=="존재" ')
# logging.info('key: %s', x )
pass
def test_2(self):
# x = LAWD_CODE['법정동코드'].str[:5].unique()
# logging.info('left5\n%s', x )
# exists = LAWD_CODE.query( '폐지여부=="존재"' )
# f5 = exists['법정동코드'].str[:5].unique()
# logging.info('1:%s', len(f5) )
#
# x = LAWD_CODE.query( ' 법정동코드.str.slice(2, 5) != "000" and 법정동코드.str.endswith("00000") and 폐지여부=="존재"' )
# logging.info('2:%s', len(x) )
# logging.info('2:\n%s', x )
# x = LAWD_CODE['법정동코드'].str.slice(2,5)
# logging.info('left5\n%s', x )
pass
def test_lawd_01(self):
class1_o = lawd_01('o')
# logging.info('[o] class1: %s', len(class1_o) )
# logging.info('\n%s', class1_o )
class1_x = lawd_01('x')
# logging.info('[x] class1: %s', len(class1_x) )
# logging.info('\n%s', class1_x )
class1_a = lawd_01('a')
# logging.info('[a] class1: %s', len(class1_a) )
# logging.info('\n%s', class1_a )
self.assertEqual( len(class1_o) + len(class1_x), len(class1_a) )
def test_lawd_05(self):
class2_o = lawd_05('o')
# logging.info('[o] class2: %s', len(class2_o) )
# logging.info('\n%s', class2_o )
class2_x = lawd_05('x')
# logging.info('[x] class2: %s', len(class2_x) )
# logging.info('\n%s', class2_x )
class2_a = lawd_05('a')
# logging.info('[a] class2: %s', len(class2_a) )
# logging.info('\n%s', class2_a )
self.assertEqual( len(class2_o) + len(class2_x), len(class2_a) )
| 3.078125
| 3
|
generalexam/plotting/skeleton_line_plotting.py
|
thunderhoser/GeneralExam
| 4
|
12774512
|
<reponame>thunderhoser/GeneralExam<filename>generalexam/plotting/skeleton_line_plotting.py
"""Plotting methods for skeleton lines.
A "skeleton line" is a polyline description of a polygon. For more details, see
skeleton_lines.py.
"""
import numpy
import matplotlib
matplotlib.use('agg')
from generalexam.ge_utils import skeleton_lines
from gewittergefahr.gg_utils import error_checking
DEFAULT_POLYGON_COLOUR = numpy.array([0., 0., 0.]) / 255
DEFAULT_SKELETON_LINE_COLOUR = numpy.array([252., 141., 98.]) / 255
DEFAULT_END_NODE_COLOUR = numpy.array([252., 141., 98.]) / 255
DEFAULT_NEW_EDGE_COLOUR = numpy.array([102., 194., 165.]) / 255
DEFAULT_BRANCH_NODE_COLOUR = numpy.array([102., 194., 165.]) / 255
DEFAULT_JUMPER_NODE_COLOUR = numpy.array([141., 160., 203.]) / 255
DEFAULT_LINE_WIDTH = 2.
DEFAULT_MARKER_SIZE = 8
MARKER_TYPE = 'o'
FONT_SIZE = 16
HORIZONTAL_ALIGNMENT_FOR_NODES = 'left'
VERTICAL_ALIGNMENT_FOR_NODES = 'bottom'
HORIZONTAL_ALIGNMENT_FOR_POLYGON_VERTICES = 'right'
VERTICAL_ALIGNMENT_FOR_POLYGON_VERTICES = 'top'
def plot_polygon(
polygon_object_xy, axes_object, line_colour=DEFAULT_POLYGON_COLOUR,
line_width=DEFAULT_LINE_WIDTH):
"""Plots original polygon (without skeleton line or Delaunay triangulation).
:param polygon_object_xy: Instance of `shapely.geometry.Polygon` with
vertices in x-y (Cartesian) coordinates.
:param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
:param line_colour: Colour of polygon edges (in any format accepted by
`matplotlib.colors`).
:param line_width: Width of polygon edges (real positive number).
"""
vertex_x_coords = numpy.array(polygon_object_xy.exterior.xy[0])
vertex_y_coords = numpy.array(polygon_object_xy.exterior.xy[1])
axes_object.plot(
vertex_x_coords, vertex_y_coords, color=line_colour, linestyle='solid',
linewidth=line_width)
num_vertices = len(vertex_x_coords)
for i in range(num_vertices - 1):
axes_object.text(
vertex_x_coords[i], vertex_y_coords[i], str(i), fontsize=FONT_SIZE,
color=line_colour,
horizontalalignment=HORIZONTAL_ALIGNMENT_FOR_POLYGON_VERTICES,
verticalalignment=VERTICAL_ALIGNMENT_FOR_POLYGON_VERTICES)
def plot_delaunay_triangulation(
polygon_object_xy, node_table, new_edge_table, axes_object,
new_edge_colour=DEFAULT_NEW_EDGE_COLOUR,
new_edge_width=DEFAULT_LINE_WIDTH,
end_node_colour=DEFAULT_END_NODE_COLOUR,
end_node_marker_size=DEFAULT_MARKER_SIZE,
branch_node_colour=DEFAULT_BRANCH_NODE_COLOUR,
branch_node_marker_size=DEFAULT_MARKER_SIZE,
jumper_node_colour=DEFAULT_JUMPER_NODE_COLOUR,
jumper_node_marker_size=DEFAULT_MARKER_SIZE):
"""Plots Delaunay triangulation of polygon.
:param polygon_object_xy: Instance of `shapely.geometry.Polygon` with
vertices in x-y (Cartesian) coordinates.
:param node_table: pandas DataFrame created by
`skeleton_lines._find_and_classify_nodes` or
`skeleton_lines._find_and_classify_node_children`.
:param new_edge_table: pandas DataFrame created by
`skeleton_lines._find_new_edges_from_triangulation`.
:param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
:param new_edge_colour: Colour of new edges (those in triangulation and not
in original polygon) (in any format accepted by `matplotlib.colors`).
:param new_edge_width: Width of new edges.
:param end_node_colour: Colour of end nodes.
:param end_node_marker_size: Marker size for end nodes.
:param branch_node_colour: Colour of branch nodes.
:param branch_node_marker_size: Marker size for branch nodes.
:param jumper_node_colour: Colour of jumper nodes.
:param jumper_node_marker_size: Marker size for jumper nodes.
"""
polygon_vertex_x_coords = numpy.array(polygon_object_xy.exterior.xy[0])
polygon_vertex_y_coords = numpy.array(polygon_object_xy.exterior.xy[1])
num_new_edges = len(new_edge_table.index)
for i in range(num_new_edges):
these_vertex_indices = new_edge_table[
skeleton_lines.VERTEX_INDICES_KEY].values[i]
axes_object.plot(
polygon_vertex_x_coords[these_vertex_indices],
polygon_vertex_y_coords[these_vertex_indices],
color=new_edge_colour, linestyle='solid', linewidth=new_edge_width)
num_nodes = len(node_table.index)
for i in range(num_nodes):
this_node_type = node_table[skeleton_lines.NODE_TYPE_KEY].values[i]
if this_node_type == skeleton_lines.END_NODE_TYPE:
this_colour = end_node_colour
this_marker_size = end_node_marker_size
elif this_node_type == skeleton_lines.BRANCH_NODE_TYPE:
this_colour = branch_node_colour
this_marker_size = branch_node_marker_size
elif this_node_type == skeleton_lines.JUMPER_NODE_TYPE:
this_colour = jumper_node_colour
this_marker_size = jumper_node_marker_size
axes_object.plot(
node_table[skeleton_lines.NODE_X_COORDS_KEY].values[i],
node_table[skeleton_lines.NODE_Y_COORDS_KEY].values[i],
linestyle='None', marker=MARKER_TYPE, markerfacecolor=this_colour,
markeredgecolor=this_colour, markersize=this_marker_size,
markeredgewidth=1)
axes_object.text(
node_table[skeleton_lines.NODE_X_COORDS_KEY].values[i],
node_table[skeleton_lines.NODE_Y_COORDS_KEY].values[i], str(i),
fontsize=FONT_SIZE, color=this_colour,
horizontalalignment=HORIZONTAL_ALIGNMENT_FOR_NODES,
verticalalignment=VERTICAL_ALIGNMENT_FOR_NODES)
def plot_skeleton_line(
skeleton_line_x_coords, skeleton_line_y_coords, axes_object,
line_colour=DEFAULT_SKELETON_LINE_COLOUR,
line_width=DEFAULT_LINE_WIDTH):
"""Plots skeleton line through polygon.
P = number of points in skeleton line
:param skeleton_line_x_coords: length-P numpy array with x-coordinates on
skeleton line.
:param skeleton_line_y_coords: length-P numpy array with y-coordinates on
skeleton line.
:param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
:param line_colour: Colour of skeleton line (in any format accepted by
`matplotlib.colors`).
:param line_width: Width of skeleton line (real positive number).
"""
error_checking.assert_is_numpy_array_without_nan(skeleton_line_x_coords)
error_checking.assert_is_numpy_array(
skeleton_line_x_coords, num_dimensions=1)
num_points = len(skeleton_line_x_coords)
error_checking.assert_is_numpy_array_without_nan(skeleton_line_y_coords)
error_checking.assert_is_numpy_array(
skeleton_line_y_coords, exact_dimensions=numpy.array([num_points]))
axes_object.plot(
skeleton_line_x_coords, skeleton_line_y_coords, color=line_colour,
linestyle='solid', linewidth=line_width)
| 2.609375
| 3
|
tests/test_database.py
|
reline/nolanbot
| 1
|
12774513
|
<reponame>reline/nolanbot<filename>tests/test_database.py
import unittest
from database import Database
class TestDatabase(unittest.TestCase):
def test_smoke(self):
database = Database()
database.fetch_all_cars()
database.query_cars("miata")
if __name__ == '__main__':
unittest.main()
| 1.953125
| 2
|
datablox_framework/datablox_framework/fileserver_wsgi.py
|
mpi-sws-rse/datablox
| 0
|
12774514
|
<gh_stars>0
"""this is a version of the fileserver that works with wsgi (eg. gunicorn)"""
import os
import os.path
import urllib
import urlparse
import sys
import logging
from random import choice, randint
import string
import fcntl
logger = logging.getLogger("gunicorn.error")
DEBUG=True
# if we're debugging stuff, we log stack traces, otherwise we only log the error message
if DEBUG:
log_exc=logger.exception
else:
log_exc=logger.error
try:
import datablox_engage_adapter.file_locator
using_engage = True
except ImportError:
using_engage = False
if using_engage:
engage_file_locator = datablox_engage_adapter.file_locator.FileLocator()
file_server_keypath = engage_file_locator.get_file_server_key_file()
else:
file_server_keypath = os.path.expanduser('~/datablox_file_server_key')
from block import decrypt_path
FILESERVER_PORT=4990
BLOCK_SIZE = 128000
KEY_MESSAGE = "key="
KEY_MESSAGE_LEN = len(KEY_MESSAGE)
def gen_random(length, chars=string.letters+string.digits):
return ''.join([ choice(chars) for i in range(length) ])
def get_key(path):
"""Get the key for the fileserver. Since there could be
multiple slaves, we need to use file locking to serialize
access to the key file. The first slave to try to get the key
will generate the key and create the file.
"""
path = os.path.abspath(os.path.expanduser(path))
lockfile = path + ".lock"
with open(lockfile, "w") as lf:
fcntl.lockf(lf, fcntl.LOCK_EX)
try:
if not os.path.exists(path):
k = gen_random(8)
with open(path, 'w') as f:
f.write(k)
os.chmod(path, 0400)
logger.info("Generated new keyfile at %s" % path)
return k
else:
with open(path, "r") as f:
logger.info("Reading existing keyfile at %s" % path)
return f.read().rstrip()
finally:
fcntl.lockf(lf, fcntl.LOCK_UN)
deskey = get_key(file_server_keypath)
error_headers = [("content-type", "text/plain")]
def send_file(path, size):
with open(path) as f:
block = f.read(BLOCK_SIZE)
while block:
yield block
block = f.read(BLOCK_SIZE)
def app(environ, start_response):
path = None
try:
qs = environ.get("QUERY_STRING")
qdict = urlparse.parse_qs(qs)
enc_path = qdict["key"][0]
path = decrypt_path(enc_path, deskey)
logger.debug("Decrypted path " + path)
size = os.path.getsize(path)
except KeyError, e:
log_exc("Invalid request(KeyError): %s, query string was '%s'" %
(e, qs))
start_response('404 Page Not Found', error_headers, sys.exc_info())
return ["Invalid request"]
except ValueError, e:
log_exc("Invalid request (ValueError): %s, query string was '%s'" %
(e, qs))
if path:
logger.error("Path was %s" % path)
start_response('404 Page Not Found', error_headers, sys.exc_info())
return ["Invalid request"]
except IOError:
log_exc("Could not open file at %s" % path)
start_response('404 Page Not Found', error_headers, sys.exc_info())
return ["Could not open file at %s" % path]
except Exception, e:
log_exc("Unexpected error %s, query string was '%s'" % (e, qs))
if path:
logger.error("Path was %s" % path)
start_response('500 Internal Server Error', error_headers, sys.exc_info())
return ["Unexpected error %s" % e]
start_response("200 OK", [
("Content-Length", str(size))
])
return send_file(path, size)
| 2.25
| 2
|
tests/test_tapioca_asana.py
|
henriquebastos/tapioca-asana
| 7
|
12774515
|
# coding: utf-8
import unittest
from tapioca_asana import Asana
class TestTapiocaAsana(unittest.TestCase):
def setUp(self):
self.wrapper = Asana()
if __name__ == '__main__':
unittest.main()
| 1.8125
| 2
|
python/iviz/Widgets/HistogramWidget.py
|
eddy-ilg/iviz
| 0
|
12774516
|
#!/usr/bin/env python3
import sys
from PyQt5.QtWidgets import QVBoxLayout,QWidget
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
import matplotlib.pyplot as plt
import random
import numpy as np
class HistogramWidget(QWidget):
def __init__(self, statNames=None, histograms=None, binWidth=None):
super().__init__()
self._statNames = statNames
self._histograms = histograms
self._binWidth = binWidth
self.figure = plt.figure()
self.canvas = FigureCanvas(self.figure)
self.toolbar = NavigationToolbar(self.canvas, self)
layout = QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
self.setLayout(layout)
self.plot()
def plot(self, statNames=None, histograms=None, binWidth=None):
self._statNames = statNames
self._histograms = histograms
self._binWidth = binWidth
self.figure.clear()
if self._statNames is None or self._histograms is None or len(self._histograms) == 0:
return
ax = self.figure.add_subplot(111)
ax.set_title("Histograms")
legend = []
for i in range(len(self._statNames)):
label = self._statNames[i]
values = self._histograms[i]
first_nonzero_index = next((i for i, x in enumerate(values) if x!=0), None)
last_nonzero_index = next((len(values) - idx for idx, item in enumerate(reversed(values), 1) if item), None)
bins = [j for j in range((first_nonzero_index-10000), last_nonzero_index-10000)]
values = values[first_nonzero_index:last_nonzero_index]
# this is for merging bins according to bin width
mergedValues = [sum(values[i:i + self._binWidth]) for i in range(0, len(values), self._binWidth)]
mergedBins = [bins[i]/10 for i in range(0, len(bins), self._binWidth)]
left, right = mergedBins[:-1], mergedBins[1:]
X = np.array([left, right]).T.flatten()
X = np.append(X, mergedBins[len(mergedBins)-1])
Y = np.array([mergedValues, mergedValues]).T.flatten()[:-1]
legend.append(label)
ax.plot(X, Y)
ax.legend(legend)
self.canvas.draw()
| 2.546875
| 3
|
src/hri/src/greetVisitors.py
|
APMMonteiro/european_robotic_league
| 0
|
12774517
|
#!/usr/bin/python
import spacy
import json
import numpy as np
import rospy
from std_msgs.msg import String
from spacy.matcher import PhraseMatcher
from spacy.matcher import Matcher
from spacy.tokens import Span
from spacy.lang.en import English
class Greet_Visitors:
def __init__(self):
rospy.init_node('Greet_Visitors')
self.tts_pub = rospy.Publisher('/hri/tts_input', String, queue_size=1,latch=True)
def subscribe_greet(self):
#print("subscriber is called")
greet_subscriber = rospy.Subscriber('/hri/greet_input', String,self.greet_callback)
location_subscriber = rospy.Subscriber('/hri/location_input', String,self.loaction_callback)
def greet_callback(self, msg):
#parse text and execute main code
self.text = msg.data
dictMsg={}
dictMsg["person"]=self.recognised_visitor()
if(self.recognised_visitor() == "plumber"):
dictMsg["room"]=self.ask_plumber()
dictWrapper=dictMsg
jsonStr = json.dumps(dictWrapper)
print(jsonStr)
output_pub = rospy.Publisher('/hri/greet_output', String, queue_size=1,latch=True)
output_pub.publish(jsonStr) #Publish what the component sees for debugging (as there is a delay due to system performance)
def location_callback(self, msg):
#parse text and execute main code
self.text = msg.data
dictMsg={}
dictMsg["person"]=self.recognised_room()
dictWrapper=dictMsg
jsonStr = json.dumps(dictWrapper)
print(jsonStr)
output_pub = rospy.Publisher('/hri/location_output', String, queue_size=1,latch=True)
output_pub.publish(jsonStr) #Publish what the component sees for debugging (as there is a delay due to system performance)
def string_to_tts(self, string):
self.tts_pub.publish(string) #Publish what the component sees for debugging (as there is a delay due to system performance)
def string_to_obj(self, string):
return json.dumps(string)
def obj_to_string(self, obj):
return json.loads(obj)
def recognised_visitor(self):
if(self.return_people() is not None):
return self.return_people().text
else:
return "unrecognised"
def recognised_room(self):
if(self.return_rooms() is not None):
return self.return_rooms().text
else:
return "unrecognised"
def ask_plumber(self):
if(self.return_rooms() is not None):
return self.return_rooms().text
def return_rooms(self):
nlp = spacy.load("en_core_web_sm")
doc = nlp(self.text)
rooms = ["kitchen", "bedroom", "bathroom", "hallway", "living room"]
room_patterns = list(nlp.pipe(rooms))
roomMatcher = PhraseMatcher(nlp.vocab)
roomMatcher.add("ROOM", [*room_patterns])
for match_id, start, end in roomMatcher(doc):
# Create a Span with the label for "GPE"
roomSpan = Span(doc, start, end, label="ROOM")
return roomSpan
def return_people(self):
nlp = spacy.load("en_core_web_sm")
doc = nlp(self.text)
people = ["doctor", "<NAME>", "postman", "<NAME>", "plumber"]
people_patterns = list(nlp.pipe(people))
peopleMatcher = PhraseMatcher(nlp.vocab)
peopleMatcher.add("PEOPLE", [*people_patterns])
for match_id, start, end in peopleMatcher(doc):
peopleSpan = Span(doc, start, end, label="PEOPLE")
return peopleSpan
greet_visitors = Greet_Visitors()
greet_visitors.subscribe_greet()
rospy.spin()
| 2.578125
| 3
|
main.py
|
Deeryeen/unitypackage-exporter
| 0
|
12774518
|
# This script is to extract any files inside of a .unitypackage file.
# Please make sure you only use this on .unitypackage files you own.
# This will create a folder with the exact same name as the input file.
# Have fun!
# Used for creating the temp folder name.
from hashlib import md5
# Uncompressing .unityasset files.
import tarfile
# Gnarly file handling stuff.
from pathlib import Path
from shutil import copy2, rmtree
# Getting input from the user.
import argparse
from sys import exit
parser = argparse.ArgumentParser()
parser.add_argument('file', action='store', help='Path to the .unityasset file')
# parser.add_argument('-o', '--override', metavar='folder', action='store', default=None, help='Override default export location')
args = parser.parse_args()
unity_file = Path(args.file)
# There is probably a better way to just get the name of a file, but I'm lazy and want this to work.
unity_file_name = args.file.split('.')[:-1]
unity_file_name = '.'.join(unity_file_name)
# Does file exist?
if not unity_file.exists():
print(f'File "{unity_file}" does not exist!')
exit(1)
# Is file Valid? (Doesn't actually check magic nor the contents of the tar archive. This is just a dumb check)
if not unity_file.name.split('.')[-1] == 'unitypackage':
print(f'File "{unity_file.name}" is not valid.')
exit(1)
# Make target directory.
# Check if the target directory exists.
new_directory = Path(unity_file_name)
if new_directory.exists():
# We don't want to overwrite a previous export.
print('The target directory exists... Could it be possible that you\'ve already exported this asset file?')
exit(1)
new_directory.mkdir(0o755, parents=True, exist_ok=False)
print('Initialized working environment.') # We just created the folders and checked some stuff, it's not like anything fancy really happened.
# Generate the temp directory name and pathlib object. This is an MD5 hash of the input filename.
tmp_dir_name = md5(unity_file_name.encode()).hexdigest()
tmp_dir = Path(tmp_dir_name)
# Extract to the temp directory.
unity_tar_file = tarfile.open(unity_file, mode='r:gz')
unity_tar_file.extractall(tmp_dir)
print(f'Read and extracted "{unity_file_name}"')
index=0
print('Processing extracted files...')
# Iterate through the directories.
for asset_directory in tmp_dir.iterdir():
index=index+1
# Generate links to asset elements known to be present.
asset_file = Path(asset_directory, 'asset')
# Check if it exists.
if not asset_file.exists():
# No need to make a fuss if it doesn't. Just continue.
continue
# Get original filename and directory structure.
asset_path_name = Path(asset_directory, 'pathname')
with open(asset_path_name, 'r') as f:
pathname = f.read().split('/')
new_asset_name = pathname[-1]
new_dir_name = '/'.join(pathname[:-1])
new_dir = Path(new_directory, new_dir_name)
new_dir.mkdir(mode=0o755, parents=True, exist_ok=True)
print(f'Found and copied {index} files...', end='\r')
copy2(asset_file, Path(new_dir, new_asset_name))
print('\nDone processing the extracted files.')
rmtree(tmp_dir)
print('Cleaned up environment, enjoy!')
| 3.484375
| 3
|
donkeycar/parts/crossvalidator.py
|
paasovaara/donkey
| 5
|
12774519
|
#!/usr/bin/env python3
"""
Cross validator
Usage:
crossvalidator.py (--model=<model>) [--tub=<tub1,tub2,..tubn>] [--type=(linear|categorical)] [--output=<csv-filename>]
Options:
-h --help Show this screen.
--tub TUBPATHS List of paths to tubs. Comma separated. Use quotes to use wildcards. ie "~/tubs/*"
--type TYPE Either categorical or linear [default: 'linear']
--output CSVFILE Csv filename
"""
import os
from docopt import docopt
import math
import donkeycar as dk
import statistics
import numpy as np
import csv
from donkeycar.parts.keras import KerasCategorical, KerasLinear
from donkeycar.parts.datastore import TubGroup
def print_mean(l, message):
mean = statistics.mean(l)
stdev = statistics.stdev(l)
print(message, " mean: ", mean, " and standard dev: ", stdev)
def print_lse(correct, estimates, message):
print(message, " lse: ", lse(correct, estimates))
def lse(correct, estimates):
sum = 0.0
for index in range(len(correct)):
c = correct[index]
e = estimates[index]
sum += math.pow(c - e, 2)
return sum / len(correct)
def validate(model_path=None, tub_names=None, model_type='linear', output=None):
print("Using a model of type: ", model_type)
if model_type == "categorical":
kl = KerasCategorical()
elif model_type == "linear":
kl = KerasLinear()
if model_path:
kl.load(model_path)
print('tub_names', tub_names)
tubgroup = TubGroup(tub_names)
# See Also: ShowPredictionPlots
if not output:
output = model_path + ".validator.csv"
print('saving to output file: ', output)
with open(output, 'w') as csvfile:
w = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
w.writerow(['Angle', 'Angle estimate', 'Angle error', 'Throttle', 'Throttle estimate', 'Throttle error'])
for tub in tubgroup.tubs:
num_records = tub.get_num_records()
print('cross validation set size: %d' % num_records)
correct_angles = []
correct_throttles = []
estimate_angles = []
estimate_throttles = []
error_angles = []
error_throttles = []
for iRec in tub.get_index(shuffled=False):
record = tub.get_record(iRec)
img = record["cam/image_array"]
user_angle = float(record["user/angle"])
user_throttle = float(record["user/throttle"])
pilot_angle, pilot_throttle = kl.run(img)
correct_angles.append(user_angle)
correct_throttles.append(user_throttle)
estimate_angles.append(pilot_angle.item())
estimate_throttles.append(pilot_throttle.item())
error_angle = user_angle - pilot_angle.item()
error_angles.append(error_angle)
error_throttle = user_throttle - pilot_throttle.item()
error_throttles.append(error_throttle)
w.writerow([user_angle, pilot_angle, error_angle, user_throttle, pilot_throttle, error_throttle])
print_mean(correct_angles, "Correct angle")
print_mean(estimate_angles, "Estimate angle")
print_mean(correct_throttles, "Correct throttle")
print_mean(estimate_throttles, "Estimate throttle")
print_mean(error_angles, "Error angle")
print_mean(error_throttles, "Error throttle")
print_lse(correct_angles, estimate_angles, "Angle LSE")
print_lse(correct_throttles, estimate_throttles, "Throttle LSE")
if __name__ == '__main__':
args = docopt(__doc__)
validate(model_path=args['--model'], tub_names=args['--tub'], model_type=args['--type'], output=args['--output'])
| 3.078125
| 3
|
utils/__init__.py
|
utplanets/deepmars2
| 2
|
12774520
|
<gh_stars>1-10
from dotenv import find_dotenv, load_dotenv
import sys
import os
def getenv(name):
val = os.getenv(name)
return val
def load_env():
load_dotenv(find_dotenv())
return
| 1.851563
| 2
|
learning/modules/map_to_map/map_batch_select.py
|
esteng/guiding-multi-step
| 0
|
12774521
|
<filename>learning/modules/map_to_map/map_batch_select.py
import torch
import numpy as np
import torch.nn as nn
class MapBatchSelect(nn.Module):
"""
Given a batch of B maps and poses, and a boolean mask of length B, return a batch of P maps and poses, where
P is the number of True in the boolean mask.
This is used to pick a subset of semantic maps for path-prediction, if we are not planning on every single timestep
"""
def __init__(self):
super(MapBatchSelect, self).__init__()
def init_weights(self):
pass
def one(self, tensor, plan_mask, device):
mask_t = torch.Tensor(plan_mask) == True
mask_t = mask_t.to(device)
return tensor[mask_t]
def forward(self, maps, map_coverages, map_poses, cam_poses, noisy_poses, start_poses, sent_embeds, plan_mask=None, show=""):
if plan_mask is None:
return maps, map_coverages, map_poses, cam_poses, noisy_poses, start_poses, sent_embeds
mask_t = torch.Tensor(plan_mask) == True
mask_t = mask_t.to(maps.device)
maps_size = list(maps.size())[1:]
select_maps = maps[mask_t[:, np.newaxis, np.newaxis, np.newaxis].expand_as(maps)].view([-1] + maps_size)
covs_size = list(map_coverages.size())[1:]
select_coverages = map_coverages[mask_t[:, np.newaxis, np.newaxis, np.newaxis].expand_as(map_coverages)].view([-1] + covs_size)
if sent_embeds.shape[0] == mask_t.shape[0]:
select_sent_embeds = sent_embeds[mask_t[:, np.newaxis].expand_as(sent_embeds)].view([-1, sent_embeds.size(1)])
else:
select_sent_embeds = sent_embeds
select_poses = map_poses[mask_t] if map_poses is not None else None
select_cam_poses = cam_poses[mask_t] if cam_poses is not None else None
select_noisy_poses = noisy_poses[mask_t] if noisy_poses is not None else None
select_start_poses = start_poses[mask_t] if start_poses is not None else None
#print("Selected " + str(len(select_maps)) + " maps from " + str(len(maps)))
return select_maps, select_coverages, select_poses, select_cam_poses, select_noisy_poses, select_start_poses, select_sent_embeds
| 2.84375
| 3
|
ooobuild/lo/bridge/bridge.py
|
Amourspirit/ooo_uno_tmpl
| 0
|
12774522
|
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Service Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.bridge
from .x_bridge import XBridge as XBridge_8e4e0a1a
from ..lang.x_component import XComponent as XComponent_98dc0ab5
from ..lang.x_initialization import XInitialization as XInitialization_d46c0cca
class Bridge(XBridge_8e4e0a1a, XComponent_98dc0ab5, XInitialization_d46c0cca):
"""
Service Class
This meta service allows the bridgefactory service to instantiate an interprocess bridge using a certain transfer protocol.
Components, that support a certain protocol, must have at least two service names:
The protocol name should be written as common servicenames, first letter is a capital letter, the rest in small letters postfixed by Bridge (e.g.: com.sun.star.bridge.UrpBridge would be correct servicename for the \"urp\" protocol). However, the protocol names are compared case insensitive. If there exist two services supporting the same protocol, it is arbitrary which one is chosen, so this should be omitted.
See Also:
`API Bridge <https://api.libreoffice.org/docs/idl/ref/servicecom_1_1sun_1_1star_1_1bridge_1_1Bridge.html>`_
"""
__ooo_ns__: str = 'com.sun.star.bridge'
__ooo_full_ns__: str = 'com.sun.star.bridge.Bridge'
__ooo_type_name__: str = 'service'
__all__ = ['Bridge']
| 1.65625
| 2
|
core/recc/algorithm/search.py
|
bogonets/answer
| 3
|
12774523
|
<filename>core/recc/algorithm/search.py
# -*- coding: utf-8 -*-
from typing import Any
def any_none(*args: Any) -> bool:
for arg in args:
if arg is None:
return True
return False
def any_not_none(*args: Any) -> bool:
for arg in args:
if arg is not None:
return True
return False
def all_none(*args: Any) -> bool:
for arg in args:
if arg is not None:
return False
return True
def all_not_none(*args: Any) -> bool:
for arg in args:
if arg is None:
return False
return True
| 2.921875
| 3
|
scripts/P1_Files/start/TicTacToe-Flask.py
|
wesleybeckner/general_applications_of_neural_networks
| 0
|
12774524
|
<gh_stars>0
from flask import Flask, render_template_string, request, make_response
from TicTacToe import *
TEXT = """
<!doctype html>
<html>
<head><title>Tic Tac Toe</title></head>
<body>
<h1>Tic Tac Toe</h1>
<h2>{{msg}}</h2>
<form action="" method="POST">
<table>
{% for j in range(0, 3) %}
<tr>
{% for i in range(0, 3) %}
<td>
<button type="submit" name="choice" value="{{j*3+i+1}}"
{{"disabled" if board[j*3+i+1]!=" "}}>
{{board[j*3+i+1]}}
</button>
</td>
{% endfor %}
</tr>
{% endfor %}
</table>
<button type="submit" name="reset">Start Over</button>
</form>
</body>
</html>
"""
app = Flask(__name__)
game = GameEngine(setup='auto')
game.setup_game()
@app.route("/", methods=["GET", "POST"])
def play_game():
game_cookie = request.cookies.get("game_board")
print(game_cookie)
# print(request.form['choice'])
if game_cookie:
game.board = {i: x for i, x in zip(range(1,10),
game_cookie.split(","))}
if "choice" in request.form:
move = int(request.form['choice'])
winner, board = game.make_move(move)
print(board)
if "reset" in request.form:
game.setup_game()
game.winner = ""
game.board = {i: " " for i in range(1,10)}
if game.winner != "":
if game.winner == "Stalemate":
msg = game.winner + "!"
else:
msg = game.winner + " Won!"
else:
msg = "play move"
resp = make_response(render_template_string(TEXT,
msg=msg,
board=game.board))
c = ",".join(map(str, game.board.values()))
resp.set_cookie("game_board", c)
first_move = False
return resp
if __name__ == "__main__":
port = 5000 + random.randint(0, 999)
print(port)
url = "http://1172.16.58.3:{0}".format(port)
print(url)
app.run(use_reloader=True, debug=True, port=port)
| 3.09375
| 3
|
anomaly_detection/c3d.py
|
Kim-Ha-Jeong/Capstone_flask
| 3
|
12774525
|
<reponame>Kim-Ha-Jeong/Capstone_flask
# -*- coding: utf-8 -*-
import h5py
import cv2
import keras.backend as K
import numpy as np
from keras.layers.convolutional import Conv3D, MaxPooling3D, ZeroPadding3D
from keras.layers.core import Dense, Dropout, Flatten
from keras.models import Model
from keras.models import Sequential
from keras.utils.data_utils import get_file
import anomaly_detection.configuration as cfg
C3D_MEAN_PATH = 'https://github.com/adamcasson/c3d/releases/download/v0.1/c3d_mean.npy'
def preprocess_input(video):
intervals = np.ceil(np.linspace(0, video.shape[0] - 1, 16)).astype(int)
frames = video[intervals]
# Reshape to 128x171
reshape_frames = np.zeros((frames.shape[0], 128, 171, frames.shape[3]))
for i, img in enumerate(frames):
img = cv2.resize(img, (171, 128))
reshape_frames[i, :, :, :] = img
mean_path = get_file('c3d_mean.npy',
C3D_MEAN_PATH,
cache_subdir='models',
md5_hash='08a07d9761e76097985124d9e8b2fe34')
mean = np.load(mean_path)
reshape_frames -= mean
# Crop to 112x112
reshape_frames = reshape_frames[:, 8:120, 30:142, :]
# Add extra dimension for samples
reshape_frames = np.expand_dims(reshape_frames, axis=0)
return reshape_frames
def C3D(weights='sports1M'):
if weights not in {'sports1M', None}:
raise ValueError('weights should be either be sports1M or None')
if K.image_data_format() == 'channels_last':
shape = (16, 112, 112, 3)
else:
shape = (3, 16, 112, 112)
model = Sequential()
model.add(Conv3D(64, 3, activation='relu', padding='same', name='conv1', input_shape=shape))
model.add(MaxPooling3D(pool_size=(1, 2, 2), strides=(1, 2, 2), padding='same', name='pool1'))
model.add(Conv3D(128, 3, activation='relu', padding='same', name='conv2'))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), padding='valid', name='pool2'))
model.add(Conv3D(256, 3, activation='relu', padding='same', name='conv3a'))
model.add(Conv3D(256, 3, activation='relu', padding='same', name='conv3b'))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), padding='valid', name='pool3'))
model.add(Conv3D(512, 3, activation='relu', padding='same', name='conv4a'))
model.add(Conv3D(512, 3, activation='relu', padding='same', name='conv4b'))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), padding='valid', name='pool4'))
model.add(Conv3D(512, 3, activation='relu', padding='same', name='conv5a'))
model.add(Conv3D(512, 3, activation='relu', padding='same', name='conv5b'))
model.add(ZeroPadding3D(padding=(0, 1, 1)))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), padding='valid', name='pool5'))
model.add(Flatten())
model.add(Dense(4096, activation='relu', name='fc6'))
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu', name='fc7'))
model.add(Dropout(0.5))
model.add(Dense(487, activation='softmax', name='fc8'))
if weights == 'sports1M':
model.load_weights(cfg.c3d_model_weights)
return model
def c3d_feature_extractor():
model = C3D()
layer_name = 'fc6'
feature_extractor_model = Model(inputs=model.input, outputs=model.get_layer(layer_name).output)
return feature_extractor_model
| 2.40625
| 2
|
models/InitialBlock.py
|
JJavierga/ENet-Real-Time-Semantic-Segmentation
| 268
|
12774526
|
###################################################
# Copyright (c) 2019 #
# Authors: @iArunava <<EMAIL>> #
# @AvivSham <<EMAIL>> #
# #
# License: BSD License 3.0 #
# #
# The Code in this file is distributed for free #
# usage and modification with proper linkage back #
# to this repository. #
###################################################
import torch
import torch.nn as nn
class InitialBlock(nn.Module):
def __init__ (self,in_channels = 3,out_channels = 13):
super().__init__()
self.maxpool = nn.MaxPool2d(kernel_size=2,
stride = 2,
padding = 0)
self.conv = nn.Conv2d(in_channels,
out_channels,
kernel_size = 3,
stride = 2,
padding = 1)
self.prelu = nn.PReLU(16)
self.batchnorm = nn.BatchNorm2d(out_channels)
def forward(self, x):
main = self.conv(x)
main = self.batchnorm(main)
side = self.maxpool(x)
x = torch.cat((main, side), dim=1)
x = self.prelu(x)
return x
| 2.90625
| 3
|
app/apps/product/migrations/0001_initial.py
|
tonyguesswho/jumga
| 0
|
12774527
|
# Generated by Django 3.1.4 on 2021-01-05 20:37
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('seller', '0003_auto_20210105_2035'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(blank=True)),
('price', models.DecimalField(decimal_places=2, max_digits=50)),
('title', models.CharField(max_length=255)),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('seller', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='seller.seller')),
],
),
]
| 1.78125
| 2
|
programmers-lecture/1.intro/2.get_sum_of_first_and_last_elements.py
|
khh180cm/algorithm
| 0
|
12774528
|
<filename>programmers-lecture/1.intro/2.get_sum_of_first_and_last_elements.py
"""
입력으로 주어지는 리스트의 첫 원소와 마지막 원소의 합을 리턴
"""
def solution(x):
assert isinstance(x, list) and x and all(isinstance(i, int) for i in x), 'Value error!!!'
first_element = x[0]
last_element = x[-1]
return first_element + last_element
res = solution([i for i in range(11)])
print(f'해는 {res}')
| 4.09375
| 4
|
source/hellogithub.py
|
yy7576/Hello-github
| 0
|
12774529
|
<reponame>yy7576/Hello-github<filename>source/hellogithub.py<gh_stars>0
#!/usr/bin/python
#Filename: hellogithub.py
print('Hello World')
| 1.21875
| 1
|
scripts/add_new_plugin.py
|
d066y/detectem
| 0
|
12774530
|
import os
import click
ROOT_DIRECTORY = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir)
)
PLUGIN_DIRECTORY = os.path.join(ROOT_DIRECTORY, 'detectem/plugins')
PLUGIN_DIRECTORIES = [
d for d in os.listdir(PLUGIN_DIRECTORY)
if os.path.isdir(os.path.join(PLUGIN_DIRECTORY, d)) and d != '__pycache__'
]
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header', 'xpath']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--category',
type=click.Choice(PLUGIN_DIRECTORIES),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher):
create_plugin_file(name, category, matcher)
create_test_file(name, matcher)
def create_plugin_file(name, category, matcher):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
homepage = ''
matchers = [
{{'{matcher}': 'Plugin signature v(?P<version>[0-9\.]+)'}},
]
"""
js_matchers = [
{{'check': '', 'version': ''}},
]
"""
'''.format(name=name, title=name.title(), matcher=matcher).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.join(
PLUGIN_DIRECTORY, category, plugin_filename
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.join(
ROOT_DIRECTORY, 'tests', 'plugins', 'fixtures', test_filename
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
| 2.46875
| 2
|
my_packages/DepthProjection/DepthProjectionModule.py
|
PlanNoa/video_super_resolution
| 6
|
12774531
|
import torch
import torch.nn as nn
from utils.tools import transpose1323
from my_packages.DepthProjection.models.HG_model import HGModel
class DepthProjectionModule(nn.Module):
def __init__(self):
super(DepthProjectionModule, self).__init__()
self.model = HGModel("my_packages/DepthProjection/pretrained/best_generalization_net_G.pth")
def forward(self, input):
input = transpose1323(input)
data1 = self.model(input[0:1])
data2 = self.model(input[1:2])
p = torch.mean(torch.stack([data1, data2]), dim=0)
p = torch.squeeze(p[0])
return p
| 2.421875
| 2
|
data/results/centralities/pagerank/networkX_pagerank_performance.py
|
cassinius/graphinius
| 17
|
12774532
|
<reponame>cassinius/graphinius
import networkx as nx
from networkx import pagerank, pagerank_numpy, pagerank_scipy
import time
import json
output_folder = 'comparison_selected'
'''
Unweighted graphs
'''
print("========================================")
print("========== UNWEIGHTED GRAPHS ===========")
print("========================================")
G_social_300 = nx.read_edgelist('../../social_network_edges_300.csv', create_using=nx.DiGraph())
G_social_1K = nx.read_edgelist('../../social_network_edges_1K.csv', create_using=nx.DiGraph())
G_social_20K = nx.read_edgelist('../../social_network_edges_20K.csv', create_using=nx.DiGraph())
start = time.time()
cb_300 = pagerank(G_social_300, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank on ~300 node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_social_network_edges_300.csv_results.json', 'w')
file.write( json.dumps(cb_300) )
file.close
start = time.time()
cb_1K = pagerank(G_social_1K, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank on ~1K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_social_network_edges_1K.csv_results.json', 'w')
file.write( json.dumps(cb_1K) )
file.close
start = time.time()
cb_20K = pagerank(G_social_20K, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank on ~20K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_social_network_edges_20K.csv_results.json', 'w')
file.write( json.dumps(cb_20K) )
file.close
'''
NUMPY - Unweighted
'''
print("========================================")
print("========= NUMPY - UNWEIGHTED ===========")
print("========================================")
start = time.time()
cb_300 = pagerank_numpy(G_social_300, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank NUMPY on ~300 node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_numpy_social_network_edges_300.csv_results.json', 'w')
file.write( json.dumps(cb_300) )
file.close
start = time.time()
cb_1K = pagerank_numpy(G_social_1K, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank NUMPY on ~1K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_numpy_social_network_edges_1K.csv_results.json', 'w')
file.write( json.dumps(cb_1K) )
file.close
start = time.time()
cb_20K = pagerank_numpy(G_social_20K, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank NUMPY on ~20K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_numpy_social_network_edges_20K.csv_results.json', 'w')
file.write( json.dumps(cb_20K) )
file.close
'''
SCIPY - Unweighted
'''
print("========================================")
print("========= SCIPY - UNWEIGHTED ===========")
print("========================================")
start = time.time()
cb_300 = pagerank_scipy(G_social_300, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank SCIPY on ~300 node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_scipy_social_network_edges_300.csv_results.json', 'w')
file.write( json.dumps(cb_300) )
file.close
start = time.time()
cb_1K = pagerank_scipy(G_social_1K, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank SCIPY on ~1K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_scipy_social_network_edges_1K.csv_results.json', 'w')
file.write( json.dumps(cb_1K) )
file.close
start = time.time()
cb_20K = pagerank_scipy(G_social_20K, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank SCIPY on ~20K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_scipy_social_network_edges_20K.csv_results.json', 'w')
file.write( json.dumps(cb_20K) )
file.close
'''
Weighted graphs
'''
print("========================================")
print("=========== WEIGHTED GRAPHS ============")
print("========================================")
G_social_300_weighted = nx.read_weighted_edgelist('../../social_network_edges_300_weighted.csv', create_using=nx.DiGraph())
G_social_1K_weighted = nx.read_weighted_edgelist('../../social_network_edges_1K_weighted.csv', create_using=nx.DiGraph())
G_social_20K_weighted = nx.read_weighted_edgelist('../../social_network_edges_20K_weighted.csv', create_using=nx.DiGraph())
start = time.time()
cb_300_w = pagerank(G_social_300_weighted, alpha=0.85, weight="weight")
end = time.time()
duration = (end-start)*1000
print("PageRank on ~300 node weighted social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_social_network_edges_300.csv_weighted_results.json', 'w')
file.write( json.dumps(cb_300_w) )
file.close
start = time.time()
cb_1K_w = pagerank(G_social_1K_weighted, alpha=0.85, weight="weight")
end = time.time()
duration = (end-start)*1000
print("PageRank on ~1K node weighted social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_social_network_edges_1K.csv_weighted_results.json', 'w')
file.write( json.dumps(cb_1K_w) )
file.close
start = time.time()
cb_20K_w = pagerank(G_social_20K_weighted, alpha=0.85, weight="weight")
end = time.time()
duration = (end-start)*1000
print("PageRank on ~20K node weighted social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_social_network_edges_20K.csv_weighted_results.json', 'w')
file.write( json.dumps(cb_20K_w) )
file.close
'''
NUMPY - Weighted
'''
print("========================================")
print("=========== NUMPY - WEIGHTED ===========")
print("========================================")
start = time.time()
cb_300 = pagerank_numpy(G_social_300_weighted, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank NUMPY on ~300 node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_numpy_social_network_edges_300_weighted.csv_results.json', 'w')
file.write( json.dumps(cb_300) )
file.close
start = time.time()
cb_1K = pagerank_numpy(G_social_1K_weighted, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank NUMPY on ~1K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_numpy_social_network_edges_1K_weighted.csv_results.json', 'w')
file.write( json.dumps(cb_1K) )
file.close
# start = time.time()
# cb_20K = pagerank_numpy(G_social_20K_weighted, alpha=0.85)
# end = time.time()
# duration = (end-start)*1000
# print("PageRank NUMPY on ~20K node social net took " + str(duration) + " ms.")
# file = open(output_folder + '/pagerank_numpy_social_network_edges_20K_weighted.csv_results.json', 'w')
# file.write( json.dumps(cb_20K) )
# file.close
'''
SCIPY - Weighted
'''
print("========================================")
print("=========== SCIPY - WEIGHTED ===========")
print("========================================")
start = time.time()
cb_300 = pagerank_scipy(G_social_300_weighted, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank SCIPY on ~300 node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_scipy_social_network_edges_300_weighted.csv_results.json', 'w')
file.write( json.dumps(cb_300) )
file.close
start = time.time()
cb_1K = pagerank_scipy(G_social_1K_weighted, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank SCIPY on ~1K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_scipy_social_network_edges_1K_weighted.csv_results.json', 'w')
file.write( json.dumps(cb_1K) )
file.close
start = time.time()
cb_20K = pagerank_scipy(G_social_20K_weighted, alpha=0.85)
end = time.time()
duration = (end-start)*1000
print("PageRank SCIPY on ~20K node social net took " + str(duration) + " ms.")
file = open(output_folder + '/pagerank_scipy_social_network_edges_20K_weighted.csv_results.json', 'w')
file.write( json.dumps(cb_20K) )
file.close
# print("Dimensions of graph: ")
# print("#Nodes: " + str(nx.number_of_nodes(G_social_20K_weighted)))
# print("#Edges: " + str(nx.number_of_edges(G_social_20K_weighted)))
# print(G_social_300_weighted.edges(data = True))
| 2.609375
| 3
|
tests/test_utilities.py
|
ryankanno/nyc-restaurant-inspections-api
| 1
|
12774533
|
<filename>tests/test_utilities.py<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import ok_
from nyc_inspections.utilities import empty_dict
import unittest
class TestUtilities(unittest.TestCase):
def test_empty_dict(self):
d1 = {"foo": 1}
ok_(len(d1) == 1)
empty_dict(d1)
ok_(len(d1) == 0)
d2 = {"foo": 1, "bar": 2}
ok_(len(d2) == 2)
ok_(d2["foo"] == 1)
empty_dict(d2, ["foo"])
ok_("foo" not in d2)
ok_("bar" in d2)
# vim: filetype=python
| 2.671875
| 3
|
code/extracting_from_payslip.py
|
BastinFlorian/BoondManager-Auto-Holidays-Validation
| 0
|
12774534
|
<reponame>BastinFlorian/BoondManager-Auto-Holidays-Validation
'''Functions extracting the number of CP and RTT per employee
Extracting from pdf -- pdf2xt(path)
Selecting needed values and dealing with specific cases -- extraction_rtt_conges(test)
Creating a df with the selected informations per employee -- out(data)
'''
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.pdfpage import PDFPage
from io import BytesIO
import pandas as pd
# Extract text from pdf using pdf miner###
def pdf2xt(path):
rsrcmgr = PDFResourceManager()
retstr = BytesIO()
codec = 'utf-8'
device = TextConverter(rsrcmgr, retstr, codec=codec)
with open(path, "rb") as fp: # open in 'rb' mode to read PDF bytes
interpreter = PDFPageInterpreter(rsrcmgr, device)
for page in PDFPage.get_pages(fp, check_extractable=True):
interpreter.process_page(page)
device.close()
text = retstr.getvalue()
retstr.close()
return text
# Regex on extracted pdf to get cp N, cp N-1 & RTT
# and the name of each BeNexter
def extraction_rtt_conges(test):
isinstance(test, list)
RTT_solde = 0
RTT_pris = 0
RTT_acquis = 0
Congé_N_solde = 0
Congé_N_pris = 0
Congé_N_acquis = 0
Congé_N_1_solde = 0
Congé_N_1_pris = 0
Congé_N_1_acquis = 0
list_of_nb = []
decimale = []
entier = []
# Solve pdf extraction problem with the following
# techniques
# Extracted value in the form "2.0810.58.4.22" to transform into 2.08, 10.58 and 4.22
for nb in test[::-1]:
decimale.append(nb[:2])
entier.append(nb[2:])
entier = entier[1:]
for i in range(len(entier)):
try:
list_of_nb.append(float(entier[i] + '.' + decimale[i]))
except ValueError:
print("ERROR FUNCTION EXTRACTION_RTT_CONGES")
# Some pay slips do not contains taken CP and CP N-1
# Deal with all the cases
# To solve precision problems : 2.006 + 2.00 = 4.00600001 != 4.006
# We add epsilon noise
cmpt = 0
# if RTT taken holidays for this month
if abs(list_of_nb[0] + list_of_nb[1] - list_of_nb[2]) < 0.001:
RTT_solde, RTT_pris, RTT_acquis = list_of_nb[:3]
cmpt = 3
else:
RTT_solde, RTT_acquis = list_of_nb[:2]
cmpt = 2
# if CP_N taken this month)
try:
if (abs(list_of_nb[cmpt]
+ list_of_nb[cmpt + 1]
- list_of_nb[cmpt + 2]) < 0.001):
Congé_N_solde, Congé_N_pris, Congé_N_acquis = list_of_nb[cmpt: cmpt + 3]
cmpt += 3
else:
Congé_N_solde, Congé_N_acquis = list_of_nb[cmpt: cmpt + 2]
cmpt += 2
except IndexError:
print(list_of_nb)
# if CP N-1 taken this month
print(cmpt)
try:
if ((abs(list_of_nb[cmpt] + list_of_nb[cmpt + 1] - list_of_nb[cmpt + 2]) < 0.001)):
Congé_N_1_solde, Congé_N_1_pris, Congé_N_1_acquis = list_of_nb[cmpt: cmpt + 3]
else:
if ((abs(list_of_nb[cmpt] - list_of_nb[cmpt + 1]) < 0.001)):
Congé_N_1_solde, Congé_N_1_acquis = list_of_nb[cmpt: cmpt + 2]
except Exception as e:
try:
if ((abs(list_of_nb[cmpt] - list_of_nb[cmpt + 1]) < 0.001)):
Congé_N_1_solde, Congé_N_1_acquis = list_of_nb[cmpt: cmpt + 2]
except Exception as e:
Congé_N_1_solde, Congé_N_1_acquis = 0, 0
if (Congé_N_pris > 0.001):
Congé_N_1_pris = Congé_N_1_solde
Congé_N_1_solde = 0
print([RTT_solde, RTT_pris, RTT_acquis, Congé_N_solde, Congé_N_pris, Congé_N_acquis, \
Congé_N_1_solde, Congé_N_1_pris, Congé_N_1_acquis])
return [RTT_solde, RTT_pris, RTT_acquis, Congé_N_solde, Congé_N_pris, Congé_N_acquis, \
Congé_N_1_solde, Congé_N_1_pris, Congé_N_1_acquis]
### Create a df with all the informations (cp, rtt, name) of the beNexteurs
def out(data):
out = {}
var = ["prenom", "nom", "RTT_solde", "RTT_pris", "RTT_acquis", "Congé_N_solde", "Congé_N_pris", \
"Congé_N_acquis", "Congé_N_1_solde", "Congé_N_1_pris", "Congé_N_1_acquis"]
for name in var:
out[name] = []
# iterate on all BeNexteur
tmp = []
for bulletin in data:
u = 0
# Split par espace pour récupérer la ligne de nombre qui nous intéresse (cette ligne contient le mot "Net",
# du fait du scrapping
bulletin_splitted = bulletin.split()
# Get name and surname
nom_prenom = bulletin_splitted[0].split("##")
nom_compose = (len(nom_prenom) <= 3)
if nom_compose:
nom = nom_prenom[2]
i = 0
while (bulletin_splitted[1].split("##")[i].upper() ==
bulletin_splitted[1].split("##")[i]):
nom += ' ' + bulletin_splitted[1].split("##")[i]
i += 1
prenom = bulletin_splitted[1].split("##")[i]
i += 1
if (bulletin_splitted[2].split("##")[0] != "Eléments"):
prenom += ' ' + bulletin_splitted[2].split("##")[0]
else:
nom = nom_prenom[2]
prenom = nom_prenom[3]
if (bulletin_splitted[1].split("##")[0] != "Eléments"):
prenom += ' ' + bulletin_splitted[1].split("##")[0]
rtt_pris = True
conge_n_pris = True
for idx in range(len(bulletin_splitted)):
sent = bulletin_splitted[idx]
if (idx != len(bulletin_splitted) - 1):
sent_nxt = bulletin_splitted[idx + 1]
else:
sent_nxt = ""
if ("Net" in sent and len(sent) > 22 and sent_nxt == "payé"):
u += 1
# ("payé" == sent_nxt.strip()[:4])):
# extraction des différents champs utils du pdf
res = extraction_rtt_conges(sent[:-3].split("."))
for i in range(len(res)):
# on vérifie que la valeur n'est pas au dessus du seuil maximale (résout pb de scraping)
## A CHANGER DANS L HYPOTHESE OU IL EST POSSIBLE D AVOIR PLUS DE
## 35 JOURS DE CP SUR UNE ANNEE
cond = (res[i] < 35)
if (cond):
out[var[i + 2]].append(res[i])
else:
out[var[i + 2]].append(0)
if ((nom + prenom not in tmp) and u > 0):
tmp.append(nom + prenom)
out["nom"].append(nom.replace("-", " "))
out["prenom"].append(prenom.replace("-", " "))
out = pd.DataFrame(out)
return (out)
| 2.9375
| 3
|
apps/landfill/tests/test_ratings.py
|
muffinresearch/addons-server
| 1
|
12774535
|
<filename>apps/landfill/tests/test_ratings.py<gh_stars>1-10
# -*- coding: utf-8 -*-
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon, Review
from landfill.ratings import generate_ratings
class RatingsTests(amo.tests.TestCase):
def setUp(self):
super(RatingsTests, self).setUp()
self.addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
def test_ratings_generation(self):
generate_ratings(self.addon, 1)
eq_(Review.objects.all().count(), 1)
eq_(Review.objects.last().addon, self.addon)
eq_(unicode(Review.objects.last().title), u'Test Review 1')
eq_(Review.objects.last().user.email, u'<EMAIL>')
| 1.9375
| 2
|
plot_rts.py
|
terraregina/BalancingControl
| 0
|
12774536
|
<gh_stars>0
# %%
from misc import calc_dkl, extract_params_from_ttl
import pickle as pickle
import numpy as np
import matplotlib.pyplot as plt
from numpy.linalg import multi_dot
import itertools as itertools
from misc import run_action_selection, test_vals, params_dict
from misc import load_data, load_data_from_ttl, simulate, make_title
import os
import pandas as pd
from scipy import stats
''' PARAMETERS'''
cols = plt.rcParams['axes.prop_cycle'].by_key()['color']
methods = ['ardm','rdm']
modes = ['conflict', 'agreement', 'goal', 'habit']
nmodes = len(modes)
trials = 1000
polss = np.array([3,8,81,2])
nmodes = len(modes)
params_list = [[False, False, True, 'standard'],\
[True, False, True, 'post_prior1' ],\
[True, False, False, 'post_prior0'],\
[False, True, True, 'like_prior1' ],\
[False, True, False, 'like_prior0']]
path = os.getcwd() + '\\parameter_data\\'
parameter_names = ['npi', 'methods', 'b', 'wd', 's', 'params_list']
'''
BRUTE FORCE PARAMETER SEARCH
'''
pols = np.array([3]) #,8,81]
par_list = []
parameter_names = ['npi', 'methods', 'b', 'wd', 's', 'params_list']
trials = 1000
# ss = [0.01, 0.03, 0.05, 0.07, 0.1]
# wds = np.arange(0.5,2.5,0.5)
# bs = np.arange(1, 2.5, 0.5)
# ss = [0.01, 0.03, 0.05, 0.07, 0.1]
# wds = [0.7, 0.9, 1.3, 1.7, 1.9, 2.1, 2.3]
# bs = [1, 1.3, 1.7, 1.9, 2.1, 2.3]
bs = np.arange(1,3,0.3).round(4)
bs = np.arange(1,3,0.3).round(4)
ss = np.arange(0.005, 0.011, 0.001).round(5)
wds = np.arange(200, 10, -10)
size = wds.size+1
wds = wds[np.newaxis,:]*ss[:,np.newaxis]
wds = wds + ss[:,np.newaxis]
drift_var = np.column_stack((wds.ravel(), ss.repeat(size-1))).round(6)
# print(drift_var)
# print((0.01*0.5*drift_var[:,0] + drift_var[:,1]).reshape(len(ss), size-1))
for p in itertools.product(pols, methods, bs, drift_var, params_list):
par_list.append([p[0]]+[p[1]] + [p[2]]+ [p[3]]+ [p[4]])
# def simulate(i, selector, b, s, wd, sample_post, sample_other, prior_as_start, plot=False, calc_fit=False):
# npi=2
# empirical = np.zeros([nmodes, npi])
# RT = np.zeros([nmodes, trials])
# if plot:
# x_positions = []
# for i in range(4):
# x_positions.append([x for x in range(i*npi + i, i*npi + i + npi)])
# fig, ax = plt.subplots(2,1)
# for m, mode in enumerate(modes):
# i = np.where(polss == npi)[0][0]
# prior = test_vals[i][m][1]
# like = test_vals[i][m][2]
# post = test_vals[i][m][0]
# # print('variance:', s)
# actions, ac_sel = run_action_selection(selector, prior, like, post, trials,\
# prior_as_start=prior_as_start, sample_other=sample_other, sample_post=sample_post,\
# var=s, wd=wd, b=b)
# actions = np.asarray(actions)
# actions = actions[actions != -1]
# actions = actions.tolist()
# empirical[m,:] = (np.bincount(actions + [x for x in range(npi)]) - 1) / len(actions)
# RT[m,:] = ac_sel.RT.squeeze()
# if plot:
# x_pos = x_positions[m]
# lab =' '.join([mode, 'mode', str(stats.mode(ac_sel.RT)[0][0][0]), 'median', str(np.median(ac_sel.RT)), 'mean', str(ac_sel.RT.mean())])
# ax[0].hist(ac_sel.RT, bins=100, alpha=0.5, label=lab)
# if m == 0:
# ax[1].bar(x_pos, post, alpha=0.5, color='k', label = "post" )
# else:
# ax[1].bar(x_pos, post, alpha=0.5, color='k')
# ax[1].bar(x_pos, empirical[m,:], label=mode + ' empir', alpha=0.5, color=cols[m])
# if plot:
# ax[0].legend()
# ax[1].legend()
# plt.show()
# if calc_fit:
# posts = np.zeros([len(modes), 3]) # translate the posteriors
# post = np.asarray(test_vals)[i,:,0] # into a numpy array
# for indx, p in enumerate(post):
# posts[indx,:] = np.asarray(p)
# fit = np.abs((posts - empirical)/posts).mean(axis=1).mean()
# if calc_fit:
# return actions, empirical, RT, fit
# else:
# return actions, empirical, RT
def generate_data():
# not currently iterating over policy sizes
for ind, p in enumerate(par_list):
print(ind)
npi = p[0]
selector = p[1]
b = p[2]
wd = p[3][0]
s = p[3][1]
sample_post, sample_other, prior_as_start, reg = p[4]
ttl = '_'.join(['npi', str(npi), selector, reg, 'b' ,str(b), 'wd' ,str(wd), 's', str(s), '.txt'])
print('\n' + ttl)
i = np.where(polss == npi)[0][0]
actions, empirical, RT = simulate(selector, b,s,wd, sample_post, sample_other, prior_as_start, npi=npi)
# empirical = np.zeros([nmodes, npi])
# RT = np.zeros([nmodes, trials])
# for m, mode in enumerate(modes):
# i = np.where(polss == npi)[0][0]
# prior = test_vals[i][m][1]
# like = test_vals[i][m][2]
# post = test_vals[i][m][0]
# # print('variance:', s)
# actions, ac_sel = run_action_selection(selector, prior, like, post, trials,\
# prior_as_start=prior_as_start, sample_other=sample_other, sample_post=sample_post,\
# var=s, wd=wd, b=b)
# actions = np.asarray(actions)
# actions = actions[actions != -1]
# actions = actions.tolist()
# empirical[m,:] = (np.bincount(actions + [x for x in range(npi)]) - 1) / len(actions)
# RT[m,:] = ac_sel.RT.squeeze()
ttl = '_'.join(['npi', str(npi), selector, reg, 'b' ,str(b), 'wd' ,str(wd), 's', str(s), '.txt'])
with open(path + ttl, 'wb') as fp:
dict = {
'RT': RT,
'empirical': empirical,
'parameters': parameter_names,
'parameter_values':p
}
pickle.dump(dict, fp)
def calc_penalty(fit, agr_median, desired_median = 300, m=100, k=0.165, alpha=0, beta=1):
median_penalty = ((agr_median - desired_median)/m)**2 + 1
post_fit_penalty = np.exp(fit/k)
return [alpha*median_penalty + beta*post_fit_penalty, median_penalty, post_fit_penalty]
def find_best_params(initial_params, sd_b= 0.01, sd_wd = 10, no = 2, iters=100,tol= 1e-3):
npi = initial_params[0]
selector = initial_params[1]
b = initial_params[2]
wd = initial_params[3][0]
s = initial_params[3][1]
sample_post, sample_other, prior_as_start, reg = initial_params[4]
ind = np.where(polss == npi)[0][0]
posts = np.zeros([len(modes), npi]) # translate the posteriors
post = np.asarray(test_vals)[ind,:,0] # into a numpy array
for indx, p in enumerate(post):
posts[indx,:] = np.asarray(p)
best_fit = np.infty
i = 0
post_fits = np.zeros([trials, no+1])
fit_penalties = np.zeros([trials, no+1])
agr_meds = np.zeros([trials, no+1])
med_penalties = np.zeros([trials, no+1])
penalties = np.zeros([trials, no+1])
best_wds = np.zeros(trials)
# calc fitness of initial guess
np.random.seed(0)
action, empirical, RT = simulate(selector, b, s, wd, sample_post, sample_other, prior_as_start, npi=npi)
post_fits[i,0] = np.abs((posts - empirical)/posts).mean(axis=1).mean()
agr_meds[i,0] = np.median(RT[1,:])
penalties[i,0], med_penalties[i,0], fit_penalties[i,0] = calc_penalty(post_fits[i,0], agr_meds[i,0])
best_fit_counter = 0
total = 0
last_best_fit = 1042342
factor = 0.15
while(best_fit > tol and i < iters and total < 30):
np.random.seed()
# generate offspring
# bs = np.append(np.array([b]), np.random.normal(b,sd_b,no))
# wds = np.append(np.array([wds]), np.random.normal(wd, wd*0.05, no))
# if (best_fit< 0.05):
# factor = 0.002
# elif(best_fit< 0.1):
# factor = 0.01
# elif(best_fit < 0.3):
# factor = 0.1
# else:
bs = np.append(np.array([b]), np.random.normal(b,0,no))
wds = (wd/s+ np.append(np.array([0]), np.random.normal(0,wd/s*factor, no)))*s
# wds = np.append(np.array([wd]), np.random.normal(wd, 0, no))
print("wds", wds)
# ss = np.append(np.array([s]), np.random.normal(s, s*0.05, no))
ss = np.append(np.array([s]), np.random.normal(s, 0, no))
# calc offspring fitness
for o in range(no):
np.random.seed(0)
action, empirical, RT = simulate(selector, bs[o+1], ss[o+1], wds[o+1], sample_post, sample_other, prior_as_start, npi=npi)
post_fits[i,o+1] = np.abs((posts - empirical)/posts).mean(axis=1).mean()
agr_meds[i,o+1] = np.median(RT[1,:])
penalties[i,o+1], med_penalties[i,o+1], fit_penalties[i,o+1] = calc_penalty(post_fits[i,o+1], agr_meds[i,o+1])
# select best candidate
print("\npost_fits", post_fits[i,:])
# print("fit_penalties", fit_penalties)
# print("med_penalties", med_penalties)
# print("penalties", penalties)
best = np.argmin(penalties[i,:])
b, bs[best] = [bs[best]]*2
wd, wds[best] = [wds[best]]*2
best_wds[i] = wd
s, ss[best] = [ss[best]]*2
post_fits[i+1,0] = post_fits[i,best]
fit_penalties[i+1,0] = fit_penalties[i,best]
agr_meds[i+1,0] = agr_meds[i,best]
med_penalties[i+1,0] = med_penalties[i,best]
penalties[i+1,0] = penalties[i,best]
best_fit = post_fits[i,best]
print(i, best, best_fit)
print(b,wd)
if last_best_fit == best_fit:
best_fit_counter += 1
else:
best_fit_counter = 0
last_best_fit = best_fit
print(best_fit_counter)
if(best_fit_counter >= 10):
total = total + best_fit_counter
factor = factor/5
best_fit_counter = 0
i += 1
results = {
'fits': post_fits,
'penalties': fit_penalties,
'agr_meds': agr_meds,
'best_wds': best_wds,
'params': initial_params,
}
return results
def sim_plot_rt_post(initial_params, trials = 2000, save_fig = False):
cols = plt.rcParams['axes.prop_cycle'].by_key()['color']
npi = initial_params[0]
selector = initial_params[1]
b = initial_params[2]
wd = initial_params[3][0]
s = initial_params[3][1]
sample_post, sample_other, prior_as_start, reg = initial_params[4]
empirical = np.zeros([nmodes, npi])
RT = np.zeros([nmodes, trials])
ind = np.where(polss == npi)[0][0]
x_positions = []
for i in range(4):
x_positions.append([x for x in range(i*npi + i, i*npi + i + npi)])
fig, ax = plt.subplots(2,1)
for m, mode in enumerate(modes):
prior = test_vals[ind][m][1]
like = test_vals[ind][m][2]
post = test_vals[ind][m][0]
actions, ac_sel = run_action_selection(selector, prior, like, post, trials,\
prior_as_start=prior_as_start, sample_other=sample_other, sample_post=sample_post,\
var=s, wd=wd, b=b)
height = (np.bincount(actions + [x for x in range(npi)]) - 1) / len(actions)
empirical[m,:] =height
x_pos = x_positions[m]
lab =' '.join([mode, 'mode', str(stats.mode(ac_sel.RT)[0][0][0]), 'median', str(np.median(ac_sel.RT)), 'mean', str(ac_sel.RT.mean())])
ax[0].hist(ac_sel.RT, bins=100, alpha=0.5, label=lab)
if m == 0:
ax[1].bar(x_pos, post, alpha=0.5, color='k', label = "post" )
else:
ax[1].bar(x_pos, post, alpha=0.5, color='k')
ax[1].bar(x_pos, height, label=mode + ' empir', alpha=0.5, color=cols[m])
ax[0].legend()
ax[1].legend()
posts = np.zeros([len(modes),npi]) # translate the posteriors
post = np.asarray(test_vals)[ind,:,0] # into a numpy array
for indx, p in enumerate(post):
posts[indx,:] = np.asarray(p)
print( np.abs((posts - empirical)/posts).mean(axis=1).mean())
if not save_fig:
plt.show()
else:
plt.savefig(make_title(initial_params, add='figure'), dpi=300)
def plot_from_file(ttl):
cols = plt.rcParams['axes.prop_cycle'].by_key()['color']
ax = plt.subplot(211)
ax2 = plt.subplot(212)
npi, selector, pars, regime, s, wd, b = extract_params_from_ttl(ttl)
with open(path + ttl, 'rb') as fp:
data = pickle.load(fp)
x_positions = []
for i in range(4):
x_positions.append([x for x in range(i*npi + i, i*npi + i + npi)])
empirical = np.asarray(data['empirical'])
RT = np.asarray(data['RT'])
df = pd.DataFrame(RT.T, index = np.arange(RT.shape[1]), columns=modes)
print(df.describe())
print(df.mode())
print(df.median())
for i in range(4):
x_pos = x_positions[i]
post = test_vals[0][i][0]
ax.bar(x_pos, post, label='post', alpha=0.5, color="k")
ax.bar(x_pos, empirical[i,:], label='empirical', alpha=0.5, color=cols[i])
ax2.hist(RT[i,:], bins=100, alpha=0.5)
plt.show()
ind = np.where(polss == npi)[0][0]
posts = np.zeros([len(modes), 3]) # translate the posteriors
post = np.asarray(test_vals)[ind,:,0] # into a numpy array
for indx, p in enumerate(post):
posts[indx,:] = np.asarray(p)
print( np.abs((posts - empirical)/posts).mean(axis=1).mean())
####################################################
####################################################
####################################################
'''
OPTIMIZE
'''
regime = 'like_prior0'
initial_params = [3, 'ardm', 1.0061389130038418, [1.3957648731779595, 0.00885223], params_dict[regime] + [regime]]
regime = 'like_prior0'
# initial_params = [3, 'rdm', 1.55336596842844, [ 1.5108917222399416, 0.00456143], params_dict[regime] + [regime]]
s = 0.0009
initial_params = [3, 'ardm', 5, [0.4926206128104312, 0.009], params_dict['like_prior0'] + ['like_prior0']]
initial_params = [3, 'rdm', 3, [330*s, s], params_dict[regime] + [regime]]
# initial_params = [3, 'ardm', 5, [0.4926206128104312, 0.009], params_dict['like_prior0'] + ['like_prior0']]
s = 0.0009
# for b in [1.025, 1.035, 1.045, 1.05]:
for b in [1.01]:
initial_params = [2, 'ddm', b, [0.09983708592967723, s], params_dict[regime] + [regime]]
sim_plot_rt_post(initial_params, trials=10000, save_fig=True)
var = 0.0009
for s in(var + var*np.arange(0.005, 0.015, 0.002)):
initial_params = [2, 'ddm', b, [0.09983708592967723, s], params_dict[regime] + [regime]]
sim_plot_rt_post(initial_params, trials=10000, save_fig=True)
ratio = 0.09983708592967723 / 0.0009
for wd in(s*(ratio + ratio*np.arange(0.005, 0.015, 0.002))):
initial_params = [2, 'ddm', b, [0.09983708592967723, s], params_dict[regime] + [regime]]
sim_plot_rt_post(initial_params, trials=10000, save_fig=True)
# find_best_params(initial_params)
'''serialise find best parameters'''
results = []
for method in methods:
print(method)
for key in params_dict:
print(key)
initial_params[4] = params_dict[key] + [key]
results.append(find_best_params(initial_params))
with open('fit_results.txt', 'wb') as fp:
pickle.dump(results,fp)
# find_best_params(initial_params)
# initial_params[2] = 1.5
# initial_params[3][0] = 1.5
'''
SIMULATE AND PLOT RT DIST WITH POST APPROX
'''
# sim_plot_rt_post(initial_params)
# print('poop')
'''
EXPLORE AROUND TOP CANDIDATES
'''
# %%
df = load_data()
# print the fit values for all the best fits
df['opt_fit_group'] = df.groupby(['npi', 'selector','regime'])['fit'].transform('min')
df['optimal'] =(df['fit'] == df['opt_fit_group'])*1
best_fits = df[df['optimal'] == 1]
print(best_fits[['selector','regime','b', 's', 'w', 'fit','agr_mode','hab_mode','goal_mode', 'conf_mode']].sort_values(['selector','fit']))
trials = 1000
for ind, row in best_fits.iterrows():
if row.selector == '0.0':
pass
else:
npi, selector, par, reg, so, wdo, bo = extract_params_from_ttl(row.title)
wd = 1.07811
b = 5 #1.1
i = np.where(polss == npi)[0][0]
# trials = 10000
ratio = wd/so - 10
so = so
wd = so*ratio
np.random.seed(0)
sim_plot_rt_post([npi, selector, b, [wd,so], par + [reg]], trials=1000)
# # print('original fit: ', fit)
# ratio = wdo/so
# # bs = bo + np.arange(0.10, 0.3, 0.01)
# bo = 1.1
# wd = 1.0781099999999995
# ratios = np.arange(ratio - 2*ratio*0.01, ratio - ratio*0.01, ratio*0.001)
# wds = ratios*so
# print(wdo, wds)
# np.random.seed(0)
# fits = np.zeros(wds.size+1)
# actions, empirical,RT, fits[0] = simulate(i,selector, bo, so, wdo, par[0], par[1], par[2], calc_fit=True)
# for wdi, wd in enumerate(wds):
# print('wdi, wd: ', wdi, wd)
# np.random.seed(0)
# actions, empirical, RT, fits[wdi+1] = simulate(i,selector, bo, so, wd, par[0], par[1], par[2], calc_fit=True)
# print(fits)
# print(row[['selector','regime','s', 'w', 'fit','agr_mode','hab_mode','goal_mode', 'conf_mode']])
# print(row.fit)
# plot_from_file(row.title)
# break
#%%
# posts = np.zeros([len(modes), 3]) # translate the posteriors
# post = np.asarray(test_vals)[0,:,0] # into a numpy array
# for indx, p in enumerate(post):
# posts[indx,:] = np.asarray(p)
# for ind, row in df[df['optimal'] == 1].iterrows():
# npi, selector, pars, regime, s, wd, b = extract_params_from_ttl(row.title)
# ratio = wd/s
# # wds = np.arange(-10,10,1)
# # wds = wds[wds != 0]
# wds = np.arange(0,3)
# wds = (wds + ratio)*s
# sample_post, sample_other, prior_as_start = params_dict[regime]
# for ind, wd in enumerate(wds):
# ttl = '_'.join(['npi', str(npi), selector, regime, 'b' ,str(b), 'wd' ,str(wd), 's', str(s), '.txt'])
# print('\n' + ttl)
# empirical = np.zeros([nmodes, npi])
# RT = np.zeros([nmodes, trials])
# for m, mode in enumerate(modes):
# i = np.where(polss == npi)[0][0]
# prior = test_vals[i][m][1]
# like = test_vals[i][m][2]
# post = test_vals[i][m][0]
# # print('variance:', s)
# actions, ac_sel = run_action_selection(selector, prior, like, post, trials,\
# prior_as_start=prior_as_start, sample_other=sample_other, sample_post=sample_post,\
# var=s, wd=wd, b=b)
# actions = np.asarray(actions)
# actions = actions[actions != -1]
# actions = actions.tolist()
# empirical[m,:] = (np.bincount(actions + [x for x in range(npi)]) - 1) / len(actions)
# RT[m,:] = ac_sel.RT.squeeze()
# post_fit = np.abs((posts - empirical)/posts).mean(axis=1).mean()
# print(post_fit)
# ttl = '_'.join(['npi', str(npi), selector, regime, 'b' ,str(b), 'wd' ,str(wd), 's', str(s), '.txt'])
# with open(path + ttl, 'wb') as fp:
# dict = {
# 'RT': RT,
# 'empirical': empirical,
# 'parameters': parameter_names,
# 'parameter_values':p
# }
# pickle.dump(dict, fp)
# generate_data()
'''
PLOT APPROXIMATION FROM DATA FILE AND STATS
'''
ttl = 'npi_3_rdm_standard_b_1_wd_0.025_s_2.5e-05_.txt'
| 1.78125
| 2
|
tests/unit/injection/inject_unit_test.py
|
mt3o/injectable
| 71
|
12774537
|
<reponame>mt3o/injectable
from unittest.mock import MagicMock
import pytest
from pytest import fixture
from pytest_mock import MockFixture
from injectable import inject, Injectable, inject_multiple
from injectable.errors import InjectionError
from injectable.constants import DEFAULT_NAMESPACE
from injectable.injection.injection_utils import RegistryType
@fixture
def get_dependency_name_mock(mocker: MockFixture):
return mocker.patch("injectable.injection.inject.get_dependency_name")
@fixture
def get_dependency_registry_type_mock(mocker: MockFixture):
return mocker.patch("injectable.injection.inject.get_dependency_registry_type")
@fixture
def get_namespace_injectables_mock(mocker: MockFixture):
return mocker.patch("injectable.injection.inject.get_namespace_injectables")
@fixture
def filter_by_group_mock(mocker: MockFixture):
return mocker.patch("injectable.injection.inject.filter_by_group")
@fixture
def resolve_single_injectable_mock(mocker: MockFixture):
return mocker.patch("injectable.injection.inject.resolve_single_injectable")
class TestInject:
def test__inject__with_default_values(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
resolve_single_injectable_mock,
):
# given
expected_instance = MagicMock
injectable = MagicMock(spec=Injectable)
injectable.get_instance.return_value = expected_instance
matches = {injectable}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
resolve_single_injectable_mock.return_value = injectable
dependency = "TEST"
# when
instance = inject(dependency)
# then
assert get_namespace_injectables_mock.called is True
(
dependency_name_arg,
registry_type_arg,
namespace_arg,
) = get_namespace_injectables_mock.call_args[0]
assert dependency_name_arg is dependency_name
assert registry_type_arg is registry_type
assert namespace_arg is DEFAULT_NAMESPACE
assert filter_by_group_mock.called is False
assert resolve_single_injectable_mock.called is True
(
dependency_name_arg,
registry_type_arg,
matches_arg,
) = resolve_single_injectable_mock.call_args[0]
assert dependency_name_arg == dependency_name
assert registry_type_arg == registry_type
assert matches_arg == matches
assert injectable.get_instance.called is True
assert injectable.get_instance.call_args[1]["lazy"] is False
assert instance == expected_instance
def test__inject__with_no_matches_for_dependency_when_non_optional(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
resolve_single_injectable_mock,
):
# given
matches = {}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
dependency = "TEST"
# when
with pytest.raises(InjectionError):
inject(dependency)
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is False
assert resolve_single_injectable_mock.called is False
def test__inject__with_no_matches_for_dependency_when_optional(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
resolve_single_injectable_mock,
):
# given
matches = {}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
dependency = "TEST"
# when
instance = inject(dependency, optional=True)
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is False
assert resolve_single_injectable_mock.called is False
assert instance is None
def test__inject__with_no_matches_for_group_when_non_optional(
self,
get_namespace_injectables_mock,
filter_by_group_mock,
resolve_single_injectable_mock,
):
# given
matches = {MagicMock(spec=Injectable)}
lookup_key = "TEST"
lookup_type = "class"
get_namespace_injectables_mock.return_value = [matches, lookup_key, lookup_type]
filter_by_group_mock.return_value = {}
dependency = "TEST"
# when
with pytest.raises(InjectionError):
inject(dependency, group="TEST_GROUP")
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is True
assert resolve_single_injectable_mock.called is False
def test__inject__with_no_matches_for_group_when_optional(
self,
get_namespace_injectables_mock,
filter_by_group_mock,
resolve_single_injectable_mock,
):
# given
matches = {MagicMock(spec=Injectable)}
lookup_key = "TEST"
lookup_type = "class"
get_namespace_injectables_mock.return_value = [matches, lookup_key, lookup_type]
filter_by_group_mock.return_value = {}
dependency = "TEST"
# when
instance = inject(dependency, group="TEST_GROUP", optional=True)
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is True
assert resolve_single_injectable_mock.called is False
assert instance is None
def test__inject__with_explicit_values(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
resolve_single_injectable_mock,
):
# given
expected_instance = MagicMock
primary_injectable = MagicMock(spec=Injectable)
primary_injectable.get_instance.return_value = expected_instance
non_primary_injectable = MagicMock(spec=Injectable)
matches = {
primary_injectable,
non_primary_injectable,
MagicMock(spec=Injectable),
}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
filtered_matches = {primary_injectable, non_primary_injectable}
filter_by_group_mock.return_value = filtered_matches
resolve_single_injectable_mock.return_value = primary_injectable
dependency = "TEST"
namespace = "TEST_NAMESPACE"
group = "TEST_GROUP"
exclude_groups = ["A", "B"]
# when
instance = inject(
dependency,
namespace=namespace,
group=group,
exclude_groups=exclude_groups,
lazy=True,
optional=False,
)
# then
assert get_namespace_injectables_mock.called is True
(
dependency_name_arg,
registry_type_arg,
namespace_arg,
) = get_namespace_injectables_mock.call_args[0]
assert dependency_name_arg is dependency_name
assert registry_type_arg is registry_type
assert namespace_arg is namespace
assert filter_by_group_mock.called is True
matches_arg, group_arg, exclude_groups_arg = filter_by_group_mock.call_args[0]
assert matches_arg == matches
assert group_arg == group
assert exclude_groups_arg == exclude_groups
assert resolve_single_injectable_mock.called is True
(
dependency_name_arg,
registry_type_arg,
matches_arg,
) = resolve_single_injectable_mock.call_args[0]
assert dependency_name_arg == dependency_name
assert registry_type_arg == registry_type
assert matches_arg == filtered_matches
assert primary_injectable.get_instance.called is True
assert non_primary_injectable.get_instance.called is False
assert primary_injectable.get_instance.call_args[1]["lazy"] is True
assert instance == expected_instance
class TestInjectMultiple:
def test__inject_multiple__with_default_values(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
):
# given
expected_instances = [MagicMock(), MagicMock()]
injectables = [MagicMock(spec=Injectable), MagicMock(spec=Injectable)]
for i in range(len(expected_instances)):
injectables[i].get_instance.return_value = expected_instances[i]
matches = {*injectables}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
dependency = "TEST"
# when
instances = inject_multiple(dependency)
# then
assert get_namespace_injectables_mock.called is True
(
dependency_name_arg,
registry_type_arg,
namespace_arg,
) = get_namespace_injectables_mock.call_args[0]
assert dependency_name_arg is dependency
assert registry_type_arg is registry_type
assert namespace_arg is DEFAULT_NAMESPACE
assert filter_by_group_mock.called is False
assert all(injectable.get_instance.called is True for injectable in injectables)
assert all(
injectable.get_instance.call_args[1]["lazy"] is False
for injectable in injectables
)
assert len(instances) == len(expected_instances)
assert all(instance in expected_instances for instance in instances)
def test__inject_multiple__with_no_matches_for_dependency_when_non_optional(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
):
# given
matches = {}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
dependency = "TEST"
# when
with pytest.raises(InjectionError):
inject_multiple(dependency)
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is False
def test__inject_multiple__with_no_matches_for_dependency_when_optional(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
):
# given
matches = {}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
dependency = "TEST"
# when
instances = inject_multiple(dependency, optional=True)
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is False
assert instances == []
def test__inject_multiple__with_no_matches_for_group_when_non_optional(
self,
get_namespace_injectables_mock,
filter_by_group_mock,
):
# given
matches = {MagicMock(spec=Injectable), MagicMock(spec=Injectable)}
lookup_key = "TEST"
lookup_type = "class"
get_namespace_injectables_mock.return_value = [matches, lookup_key, lookup_type]
filter_by_group_mock.return_value = {}
dependency = "TEST"
# when
with pytest.raises(InjectionError):
inject_multiple(dependency, group="TEST_GROUP")
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is True
def test__inject_multiple__with_no_matches_for_group_when_optional(
self,
get_namespace_injectables_mock,
filter_by_group_mock,
):
# given
matches = {MagicMock(spec=Injectable), MagicMock(spec=Injectable)}
lookup_key = "TEST"
lookup_type = "class"
get_namespace_injectables_mock.return_value = [matches, lookup_key, lookup_type]
filter_by_group_mock.return_value = {}
dependency = "TEST"
# when
instances = inject_multiple(dependency, group="TEST_GROUP", optional=True)
# then
assert get_namespace_injectables_mock.called is True
assert filter_by_group_mock.called is True
assert instances == []
def test__inject_multiple__with_explicit_values(
self,
get_dependency_name_mock,
get_dependency_registry_type_mock,
get_namespace_injectables_mock,
filter_by_group_mock,
):
# given
expected_instances = [MagicMock(), MagicMock()]
injectables = [
MagicMock(spec=Injectable),
MagicMock(spec=Injectable),
MagicMock(spec=Injectable),
]
for i in range(len(expected_instances)):
injectables[i].get_instance.return_value = expected_instances[i]
matches = {*injectables}
dependency_name = "TEST"
get_dependency_name_mock.return_value = dependency_name
registry_type = RegistryType.CLASS
get_dependency_registry_type_mock.return_value = registry_type
get_namespace_injectables_mock.return_value = matches
filtered_matches = {*injectables[:2]}
filter_by_group_mock.return_value = filtered_matches
dependency = "TEST"
namespace = "TEST_NAMESPACE"
group = "TEST_GROUP"
exclude_groups = ["A", "B"]
# when
instances = inject_multiple(
dependency,
namespace=namespace,
group=group,
exclude_groups=exclude_groups,
lazy=True,
optional=False,
)
# then
assert get_namespace_injectables_mock.called is True
(
dependency_name_arg,
registry_type_arg,
namespace_arg,
) = get_namespace_injectables_mock.call_args[0]
assert dependency_name_arg is dependency_name
assert registry_type_arg is registry_type
assert namespace_arg is namespace
assert filter_by_group_mock.called is True
matches_arg, group_arg, exclude_groups_arg = filter_by_group_mock.call_args[0]
assert matches_arg == matches
assert group_arg == group
assert exclude_groups_arg == exclude_groups
assert injectables[0].get_instance.called is True
assert injectables[1].get_instance.called is True
assert injectables[2].get_instance.called is False
assert all(
injectable.get_instance.call_args[1]["lazy"] is True
for injectable in injectables[:2]
)
assert len(instances) == len(expected_instances)
assert all(instance in expected_instances for instance in instances)
| 2.34375
| 2
|
setup.py
|
michi7x7/pm-mos-model
| 1
|
12774538
|
<reponame>michi7x7/pm-mos-model
from setuptools import setup
import distutils.cmd
from distutils.command.build_py import build_py
# don't import CryMOS!
build_cpp = {'__file__': 'CryMOS/cpp/build.py'}
with open('CryMOS/cpp/build.py') as f:
exec(f.read(), build_cpp)
ver_file = {'__file__': 'CryMOS/version.py'}
with open('CryMOS/version.py') as f:
exec(f.read(), ver_file)
import os
if 'PM_MOS_VERSION' in os.environ:
version = os.environ['PM_MOS_VERSION']
else:
version = ver_file['__version__']
class DwnlBoostCommand(distutils.cmd.Command):
"""A custom command to run Pylint on all Python source files."""
description = 'Download boost to build directory'
user_options = [
# The format is (long option, short option, description).
('version=', None, 'Boost version (def. 1.72.0)'),
('hash=', None, 'zip-file hash (8c20440aaba21dd963c0f7149517445f50c62ce4eb689df2b5544cc89e6e621e)')
]
def initialize_options(self):
"""Set default values for options."""
# Each user option must be listed here with their default value.
self.version = '1.72.0'
self.hash = '8c20440aaba21dd963c0f7149517445f50c62ce4eb689df2b5544cc89e6e621e'
def finalize_options(self):
"""Post-process options."""
self.fname = 'boost_' + self.version.replace('.', '_')
def run(self):
"""Run command."""
from tqdm import tqdm
from zipfile import ZipFile
import os.path
print("Downloading boost")
contents = self.download_boost_archive()
print("Preparing to extract")
zipfile = ZipFile(contents)
print("Extracting")
if not os.path.isdir("build"):
os.mkdir("build")
filelist = zipfile.namelist()
filelist = list(f for f in filelist if f.startswith(self.fname + '/boost/'))
for file in tqdm(iterable=filelist, total=len(filelist), unit='files'):
zipfile.extract(member=file, path="build")
def download_boost_archive(self):
from tqdm import tqdm
import requests
from io import BytesIO
from hashlib import sha256
url = f"https://dl.bintray.com/boostorg/release/1.72.0/source/{self.fname}.zip"
with requests.get(url, stream=True, timeout=3) as r:
r.raise_for_status()
contents = BytesIO()
total_len = int(r.headers['Content-Length'])
pbar = tqdm(total=total_len, unit='B', unit_scale=True)
for chunk in r.iter_content(chunk_size=8192):
contents.write(chunk)
pbar.update(len(chunk))
pbar.close()
buf = contents.getbuffer()
assert len(buf) == total_len
if self.hash is not None:
sh = sha256()
sh.update(buf)
assert self.hash == sh.hexdigest()
return contents
class BuildPyFixVersion(build_py):
""" fix version when building"""
user_options = build_py.user_options + [
('version=', None, 'Version Name')
]
def initialize_options(self):
super().initialize_options()
self.version = version
def run(self):
super().run()
self.fix_version()
def fix_version(self):
print("version is", self.version)
file = self.build_lib + "/CryMOS/version.py"
with open(file, 'w') as f:
f.write(f"__version__ = '{self.version}'\n")
class BuildDocCmd(distutils.cmd.Command):
"""A custom command to run Pylint on all Python source files."""
description = 'Build Documentation'
user_options = []
def initialize_options(self):
"""Set default values for options."""
pass
def finalize_options(self):
"""Post-process options."""
pass
def run(self):
"""Run command."""
self.convert_ipynb()
def convert_ipynb(self):
from subprocess import run, CalledProcessError
from os import path, makedirs
from glob import glob
try:
import jupyter
import jupyter_contrib_nbextensions
except ImportError as e:
raise RuntimeError("jupyter and jupyter_controb_nbextensions must be installed!") from e
dir = path.dirname(__file__)
sdir = path.join(dir, 'examples')
ddir = path.join(dir, 'docs', 'examples')
makedirs(ddir, exist_ok=True)
files = glob(sdir + "/*.ipynb")
for f in files:
try:
print(f"converting {f}")
r1 = run(rf"""jupyter nbconvert --output-dir="{ddir}" --to html_with_lenvs "{f}" """,
shell=True, text=True, capture_output=True)
# if latex_envs fails, try conversion without lenvs
if r1.returncode != 0:
r1 = run(rf"""jupyter nbconvert --output-dir="{ddir}" --to html "{f}" """,
shell=True, text=True, capture_output=True)
r1.check_returncode()
except CalledProcessError as e:
raise RuntimeError(f"{e.stderr}\n\nconverting {f} failed") from e
setup(
name='pm-mos-model',
version=version,
packages=['CryMOS', 'CryMOS.cpp'],
url='https://github.com/michi7x7/pm-mos-model',
license='Apache License 2.0',
classifiers=['License :: OSI Approved :: Apache Software License'],
author='<NAME>',
author_email='<EMAIL>',
description='A cryogenic model for the MOS transistor',
install_requires=['numpy>=1.16', 'scipy>=1.2', 'si_prefix>=1.2', 'fdint>=2.0'],
setup_requires=['pybind11>=2.4'],
ext_modules=[build_cpp['ext']],
cmdclass={
'download_boost': DwnlBoostCommand,
'build_doc': BuildDocCmd,
'build_ext': build_cpp['BuildExt'],
'build_py': BuildPyFixVersion,
}
)
| 1.867188
| 2
|
app/main/views.py
|
Abzed/post-blog
| 0
|
12774539
|
<gh_stars>0
from flask import render_template,request,redirect,url_for,abort,flash
from . import main
# from ..request import get_quotes
from .forms import BlogForm,BioForm, CommentForm
from ..models import Blog,User, Comment
from flask_login import login_required,current_user
from .. import db,photos
from ..request import get_quote
from werkzeug.contrib.atom import AtomFeed
from urllib.parse import urljoin
def get_abs_url(url):
""" Returns absolute url by joining post url with base url """
return urljoin(request.url_root, url)
@main.route('/')
@login_required
def index():
quotes = get_quote()
blogs = Blog.query.all()
return render_template('index.html',blogs=blogs, quotes=quotes)
@main.route('/user/<uname>')
@login_required
def profile(uname):
user = User.query.filter_by(username = uname).first()
user_id = current_user.id
blog = Blog.query.filter_by(user_id=user_id).all()
if user is None:
abort(404)
return render_template("profile/profile.html", user=user, blog=blog)
@main.route('/new_blog', methods=['GET','POST'])
@login_required
def new_blog():
form = BlogForm()
if form.validate_on_submit():
blog = form.blog.data
title = form.title.data
new_blog=Blog(blog=blog,title=title,user_id=current_user.id)
new_blog.save_blog()
return redirect(url_for('main.index'))
return render_template('blogs.html', form=form,legend='New Post')
@main.route('/comments/<int:blog_id>', methods=['GET','POST'])
@login_required
def new_comment(blog_id):
form = CommentForm
blogs = Blog.query.get(blog_id)
comment = Comment.query.filter_by(blog_id=blog_id).all()
form = CommentForm()
if form.validate_on_submit():
comments = form.comment.data
title = form.title.data
blog_id = blog_id
user_id = current_user._get_current_object().id
new_comment= Comment(comments=comments,title=title,blog_id=blog_id, user_id=user_id)
new_comment.save_comment()
return redirect(url_for('main.new_comment', blog_id=blog_id))
return render_template('comments.html', form=form, comment=comment, blog_id=blog_id,blogs=blogs)
@main.route('/user/<uname>/bio',methods = ['GET','POST'])
@login_required
def update_bio(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
bioform = BioForm()
if bioform.validate_on_submit():
user.bio = bioform.bio.data
db.session.add(user)
db.session.commit()
return redirect(url_for('.profile',uname=user.username))
return render_template('profile/bio.html',bioform=bioform)
@main.route('/user/<uname>/update/pic',methods= ['POST'])
@login_required
def update_pic(uname):
user = User.query.filter_by(username = uname).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = f'photos/{filename}'
user.profile_pic_path = path
db.session.commit()
return redirect(url_for('main.profile',uname=uname))
@main.route('/blogs/<int:blog_id>/delete', methods = ['POST'])
@login_required
def delete(blog_id):
quotes = get_quote()
blogs = Blog.query.all()
blog = Blog.query.get(blog_id)
if blog.blogger != current_user:
abort(403)
Blog.delete(blog)
return redirect(url_for('.index',quotes=quotes,blog=blog,blogs=blogs))
@main.route('/blog/<blog_id>/update', methods = ['GET','POST'])
@login_required
def update_blog(blog_id):
blog = Blog.query.get(blog_id)
if blog.blogger != current_user:
abort(403)
form = BlogForm()
if form.validate_on_submit():
blog.title = form.title.data
blog.blog = form.blog.data
db.session.commit()
flash("You have updated your Blog!")
return redirect(url_for('main.index',id = blog.id))
if request.method == 'GET':
form.title.data = blog.title
form.blog.data = blog.blog
return render_template('blogs.html', form = form, legend='Update Post')
@main.route('/comments/<int:comment_id>/delete', methods = ['POST'])
@login_required
def delete_comment(comment_id):
quotes = get_quote()
comment = Comment.query.all()
coment = Comment.query.get(comment_id)
if coment.feedback != current_user:
abort(403)
Comment.delete_comment(coment)
return redirect(url_for('.index',quotes=quotes, comment=comment, coment=coment))
@main.route('/subscribe',methods = ['POST','GET'])
def subscribe():
return render_template('subscribe.html')
@main.route('/feeds')
def feeds():
feed = AtomFeed(title='Latest Posts from My Blog',
feed_url=request.url, url=request.url_root)
# Sort post by created date
blogs = Blog.query.all()
for post in blogs:
feed.add(post.title, post.posted,
content_type='html',
id = post.id,
author= post.blogger.username,
published=post.posted,
updated=post.posted)
return feed.get_response()
| 2.421875
| 2
|
SDKs/Aspose.Imaging-Cloud-SDK-for-Python/tests/test_ImagingApi.py
|
naeem244/Aspose.Imaging-for-Cloud
| 0
|
12774540
|
<reponame>naeem244/Aspose.Imaging-for-Cloud
import unittest
import os.path
import json
import inspect
import requests
import asposeimagingcloud
from asposeimagingcloud.ImagingApi import ImagingApi
from asposeimagingcloud.ImagingApi import ApiException
from asposeimagingcloud.models import ImagingResponse
from asposeimagingcloud.models import SaaSposeResponse
import asposestoragecloud
from asposestoragecloud.StorageApi import StorageApi
class TestAsposeImagingCloud(unittest.TestCase):
def setUp(self):
with open('setup.json') as json_file:
data = json.load(json_file)
self.storageApiClient = asposestoragecloud.ApiClient.ApiClient(apiKey=str(data['app_key']),appSid=str(data['app_sid']),debug=True,apiServer=str(data['product_uri']))
self.storageApi = StorageApi(self.storageApiClient)
self.apiClient = asposeimagingcloud.ApiClient.ApiClient(apiKey=str(data['app_key']),appSid=str(data['app_sid']),debug=True,apiServer=str(data['product_uri']))
self.imagingApi = ImagingApi(self.apiClient)
self.output_path = str(data['output_location'])
def testGetImageBmp(self):
try:
name = "sample.bmp"
bitsPerPixel = 24
horizontalResolution = 300
verticalResolution = 300
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageBmp(name, bitsPerPixel, horizontalResolution, verticalResolution)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImageBmp(self):
try:
name = "sample.bmp"
bitsPerPixel = 24
horizontalResolution = 300
verticalResolution = 300
response = self.imagingApi.PostImageBmp(bitsPerPixel, horizontalResolution, verticalResolution, file='./data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetCropImage(self):
try:
fileName = "aspose"
name = fileName + ".jpg"
format = "png"
x = 30
y = 40
width = 100
height = 100
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetCropImage(name, format, x, y, width, height)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostCropImage(self):
try:
fileName = "aspose"
name = fileName + ".jpg"
format = "png"
x = 30
y = 40
width = 100
height = 100
response = self.imagingApi.PostCropImage(format, x, y, width, height, file='./data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageFrame(self):
try:
name = "sample-multi.tif"
frameId = 1
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageFrame(name, frameId)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageFrameProperties(self):
try:
name = "TestDemo.tif"
frameId = 0
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageFrameProperties(name, frameId)
self.assertIsInstance(response,ImagingResponse.ImagingResponse)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageGif(self):
try:
name = "sample.gif"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageGif(name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImageGif(self):
try:
name = "sample.gif"
backgroundColorIndex = 255
colorResolution = 7
response = self.imagingApi.PostImageGif(file='./data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageJpg(self):
try:
name = "aspose.jpg"
quality = 100
compressionType = "progressive"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageJpg(name, quality=quality, compressionType=compressionType)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImageJpg(self):
try:
name = "aspose.jpg"
quality = 100
compressionType = "progressive"
response = self.imagingApi.PostImageJpg(file = './data/' + name, quality=quality, compressionType=compressionType)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImagePng(self):
try:
name = "aspose_imaging_for_cloud.png"
fromScratch = True
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImagePng(name, fromScratch=fromScratch)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImagePng(self):
try:
name = "aspose_imaging_for_cloud.png"
fromScratch = True
response = self.imagingApi.PostImagePng(file='./data/' + name, fromScratch=fromScratch)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageProperties(self):
try:
name = "demo.tif"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageProperties(name)
self.assertIsInstance(response,ImagingResponse.ImagingResponse)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImagePsd(self):
try:
name = "sample.psd"
channelsCount = 3
compressionMethod = "rle"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImagePsd(name, channelsCount=channelsCount, compressionMethod=compressionMethod)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImagePsd(self):
try:
name = "sample.psd"
channelsCount = 3
compressionMethod = "rle"
response = self.imagingApi.PostImagePsd(file='./data/' + name, channelsCount=channelsCount, compressionMethod=compressionMethod)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetChangeImageScale(self):
try:
fileName = "aspose_imaging_for_cloud"
name = fileName + ".png"
format = "jpg"
newWidth = 200
newHeight = 200
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetChangeImageScale(name, format, newWidth, newHeight)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostChangeImageScale(self):
try:
fileName = "aspose_imaging_for_cloud"
name = fileName + ".png"
format = "jpg"
newWidth = 200
newHeight = 200
response = self.imagingApi.PostChangeImageScale(format, newWidth, newHeight, file='./data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageRotateFlip(self):
try:
fileName = "aspose"
name = fileName + ".jpg"
format = "png"
method = "Rotate180FlipX"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageRotateFlip(name, format, method)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImageRotateFlip(self):
try:
fileName = "aspose"
name = fileName + ".jpg"
format = "png"
method = "Rotate180FlipX"
response = self.imagingApi.PostImageRotateFlip(format, method, file = './data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetImageSaveAs(self):
try:
fileName = "aspose"
name = fileName + ".jpg"
format = "png"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetImageSaveAs(name, format)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImageSaveAs(self):
try:
fileName = "aspose"
name = fileName + ".jpg"
format = "png"
response = self.imagingApi.PostImageSaveAs(format, file = './data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetTiffToFax(self):
try:
name = "TestDemo.tif"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetTiffToFax(name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostProcessTiff(self):
try:
name = "demo.tif"
compression = "ccittfax3"
resolutionUnit = "inch"
bitDepth = 1
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.PostProcessTiff(file='./data/' + name, compression=compression, resolutionUnit=resolutionUnit, bitDepth=bitDepth)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostTiffAppend(self):
try:
name = "sample.tif"
appendFile = "TestDemo.tif"
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.storageApi.PutCreate(appendFile,'./data/' + appendFile)
response = self.imagingApi.PostTiffAppend(name, appendFile=appendFile)
self.assertIsInstance(response,SaaSposeResponse.SaaSposeResponse)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testGetUpdatedImage(self):
try:
fileName = "TestDemo"
name = fileName + ".tif"
format = "png"
x = 96
y = 96
newWidth = 300
newHeight = 300
rectWidth = 200
rectHeight = 200
rotateFlipMethod = ""
response = self.storageApi.PutCreate(name,'./data/' + name)
response = self.imagingApi.GetUpdatedImage(name, format, newWidth, newHeight, x, y, rectWidth, rectHeight, rotateFlipMethod)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
def testPostImageSaveAs_ImagingApi_0(self):
try:
fileName = "TestDemo"
name = fileName + ".tif"
format = "png"
x = 96
y = 96
newWidth = 300
newHeight = 300
rectWidth = 200
rectHeight = 200
rotateFlipMethod = ""
response = self.imagingApi.PostImageSaveAs_ImagingApi_0(format, newWidth, newHeight, x, y, rectWidth, rectHeight, rotateFlipMethod, file= './data/' + name)
self.assertEqual(response.Status,'OK')
except ApiException as ex:
print "Exception"
print "Code: " + str(ex.code)
print "Mesage: " + ex.message
raise ex
if __name__ == '__main__':
unittest.main()
| 2.421875
| 2
|
EquationModels/TaylorVortex.py
|
mroberto166/PinnsSub
| 12
|
12774541
|
from ImportFile import *
pi = math.pi
T = 10
a = [4, 0]
extrema_values = torch.tensor([[0, 1],
[-8, 8],
[-8, 8]])
def compute_res(network, x_f_train, space_dimensions, solid, computing_error=False):
x_f_train.requires_grad = True
u = (network(x_f_train))[:, 0].reshape(-1, )
v = (network(x_f_train))[:, 1].reshape(-1, )
p = (network(x_f_train))[:, 2].reshape(-1, )
inputs = torch.ones(x_f_train.shape[0], )
if not computing_error and torch.cuda.is_available():
inputs = inputs.cuda()
grad_u = torch.autograd.grad(u, x_f_train, grad_outputs=inputs, create_graph=True)[0]
grad_u_t = grad_u[:, 0].reshape(-1, )
grad_u_x = grad_u[:, 1].reshape(-1, )
grad_u_y = grad_u[:, 2].reshape(-1, )
grad_v = torch.autograd.grad(v, x_f_train, grad_outputs=inputs, create_graph=True)[0]
grad_v_t = grad_v[:, 0].reshape(-1, )
grad_v_x = grad_v[:, 1].reshape(-1, )
grad_v_y = grad_v[:, 2].reshape(-1, )
grad_p = torch.autograd.grad(p, x_f_train, grad_outputs=inputs, create_graph=True)[0]
grad_p_x = grad_p[:, 1].reshape(-1, )
grad_p_y = grad_p[:, 2].reshape(-1, )
res_u = grad_u_t + u * grad_u_x + v * grad_u_y + grad_p_x
res_v = grad_v_t + u * grad_v_x + v * grad_v_y + grad_p_y
res_d = grad_u_x + grad_v_y
mean_P = torch.mean(p).reshape(-1, )
res = torch.cat([mean_P, res_u, res_v, res_d], 0)
if torch.cuda.is_available():
del inputs
torch.cuda.empty_cache()
return res
def exact(inputs):
t = inputs[:, 0]
x = inputs[:, 1] - a[0] * t
y = inputs[:, 2] - a[1] * t
u_0 = (-y * torch.exp(0.5 * (1 - x ** 2 - y ** 2)) + a[0]).reshape(-1, 1)
v_0 = (x * torch.exp(0.5 * (1 - x ** 2 - y ** 2)) + a[1]).reshape(-1, 1)
return torch.cat([u_0, v_0], 1)
def convert(vector, extrema_values):
vector = np.array(vector)
max_val = np.max(np.array(extrema_values), axis=1)
min_val = np.min(np.array(extrema_values), axis=1)
vector = vector * (max_val - min_val) + min_val
return torch.from_numpy(vector).type(torch.FloatTensor)
def compute_generalization_error(model, extrema, images_path=None):
model.eval()
test_inp = convert(torch.rand([100000, extrema.shape[0]]), extrema)
Exact = exact(test_inp).detach().numpy()
test_out = model(test_inp).detach().numpy()
u_exact = (Exact[:, 0].reshape(-1, 1))
u = test_out[:, 0].reshape(-1, 1)
v_exact = (Exact[:, 1].reshape(-1, 1))
v = test_out[:, 1].reshape(-1, 1)
assert (v_exact.shape[1] == v.shape[1])
assert (u_exact.shape[1] == u.shape[1])
L2_test = np.sqrt(np.mean((u_exact - u) ** 2 + (v_exact - v) ** 2))
print("Error Test:", L2_test)
rel_L2_test = L2_test / np.sqrt(np.mean(u_exact ** 2 + v_exact ** 2))
print("Relative Error Test:", rel_L2_test)
if images_path is not None:
plt.figure()
plt.grid(True, which="both", ls=":")
plt.scatter(u_exact, u)
plt.xlabel(r'Exact Values')
plt.ylabel(r'Predicted Values')
plt.savefig(images_path + "/TV_Score_u.png", dpi=400)
if images_path is not None:
plt.figure()
plt.grid(True, which="both", ls=":")
plt.scatter(v_exact, v)
plt.xlabel(r'Exact Values')
plt.ylabel(r'Predicted Values')
plt.savefig(images_path + "/TV_Score_v.png", dpi=400)
return L2_test, rel_L2_test
def ub0(y):
type_BC = ["periodic", "periodic", "periodic"]
u = torch.tensor(()).new_full(size=(y.shape[0], 1), fill_value=0.0)
v = torch.tensor(()).new_full(size=(y.shape[0], 1), fill_value=0.0)
p = torch.tensor(()).new_full(size=(y.shape[0], 1), fill_value=0.0)
return torch.cat([u, v, p], 1), type_BC
def ub1(y):
type_BC = ["periodic", "periodic", "periodic"]
u = torch.tensor(()).new_full(size=(y.shape[0], 1), fill_value=0.0)
v = torch.tensor(()).new_full(size=(y.shape[0], 1), fill_value=0.0)
p = torch.tensor(()).new_full(size=(y.shape[0], 1), fill_value=0.0)
return torch.cat([u, v, p], 1), type_BC
def ub0y(x):
type_BC = ["periodic", "periodic", "periodic"]
u = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
v = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
p = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
return torch.cat([u, v, p], 1), type_BC
def ub1y(x):
type_BC = ["periodic", "periodic", "periodic"]
u = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
v = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
p = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
return torch.cat([u, v, p], 1), type_BC
list_of_BC = [[ub0, ub1], [ub0y, ub1y]]
def u0(input):
x = input[:, 0]
y = input[:, 1]
u_0 = (-y * torch.exp(0.5 * (1 - x ** 2 - y ** 2)) + a[0]).reshape(-1, 1)
v_0 = (x * torch.exp(0.5 * (1 - x ** 2 - y ** 2)) + a[1]).reshape(-1, 1)
p_0 = torch.tensor(()).new_full(size=(x.shape[0], 1), fill_value=0.0)
return torch.cat([u_0, v_0, p_0], 1)
def plotting(model, images_path, extrema, solid):
x = torch.linspace(extrema[1, 0], extrema[1, 1], 400).reshape(-1, 1)
y = torch.linspace(extrema[2, 0], extrema[2, 1], 400).reshape(-1, 1)
xy = torch.from_numpy(np.array([[x_i, y_i] for x_i in x for y_i in y]).reshape(x.shape[0] * y.shape[0], 2)).type(torch.FloatTensor)
for val in [0, 1]:
t = torch.tensor(()).new_full(size=(xy.shape[0], 1), fill_value=val)
input_vals = torch.cat([t, xy], 1)
input_vals.requires_grad = True
output = model(input_vals)
exact_solution = exact(input_vals)
u = output[:, 0]
v = output[:, 1]
grad_u = torch.autograd.grad(u, input_vals, grad_outputs=torch.ones(input_vals.shape[0], ), create_graph=True)[0]
grad_u_y = grad_u[:, 2].reshape(-1, )
grad_v = torch.autograd.grad(v, input_vals, grad_outputs=torch.ones(input_vals.shape[0], ), create_graph=True)[0]
grad_v_x = grad_v[:, 1].reshape(-1, )
w = -grad_u_y + grad_v_x
w = w.reshape(x.shape[0], y.shape[0])
w = w.detach().numpy()
u_ex = exact_solution[:, 0]
v_ex = exact_solution[:, 1]
grad_u_ex = torch.autograd.grad(u_ex, input_vals, grad_outputs=torch.ones(input_vals.shape[0], ), create_graph=True)[0]
grad_u_ex_y = grad_u_ex[:, 2].reshape(-1, )
grad_v_ex = torch.autograd.grad(v_ex, input_vals, grad_outputs=torch.ones(input_vals.shape[0], ), create_graph=True)[0]
grad_v_ex_x = grad_v_ex[:, 1].reshape(-1, )
w_ex = -grad_u_ex_y + grad_v_ex_x
w_ex = w_ex.reshape(x.shape[0], y.shape[0])
w_ex = w_ex.detach().numpy()
u = u.reshape(x.shape[0], y.shape[0])
u = u.detach().numpy()
v = v.reshape(x.shape[0], y.shape[0])
v = v.detach().numpy()
u_ex = u_ex.reshape(x.shape[0], y.shape[0])
u_ex = u_ex.detach().numpy()
v_ex = v_ex.reshape(x.shape[0], y.shape[0])
v_ex = v_ex.detach().numpy()
plt.figure()
plt.contourf(x.reshape(-1, ), y.reshape(-1, ), w.T, 40, cmap='Spectral')
plt.colorbar()
plt.xlabel(r'$x$')
plt.ylabel(r'$y$')
plt.title(r'$\omega(x,y)$,\quad t=' + str(val))
plt.savefig(images_path + "/TV_Samples_w_" + str(val) + ".png", dpi=400)
plt.close()
plt.figure()
plt.contourf(x.reshape(-1, ), y.reshape(-1, ), w_ex.T, 40, cmap='Spectral')
plt.colorbar()
plt.xlabel(r'$x$')
plt.ylabel(r'$y$')
plt.title(r'$\omega(x,y)$,\quad t=' + str(val))
plt.savefig(images_path + "/TV_Samples_w_ex_" + str(val) + ".png", dpi=400)
plt.close()
plt.figure()
plt.contourf(x.reshape(-1, ), y.reshape(-1, ), u.T, 40, cmap='Spectral')
plt.colorbar()
plt.savefig(images_path + "/TV_Samples_u_" + str(val) + ".png", dpi=400)
plt.close()
plt.figure()
plt.contourf(x.reshape(-1, ), y.reshape(-1, ), v.T, 40, cmap='Spectral')
plt.colorbar()
plt.savefig(images_path + "/TV_Samples_v_" + str(val) + ".png", dpi=400)
plt.close()
plt.figure()
plt.contourf(x.reshape(-1, ), y.reshape(-1, ), u_ex.T, 40, cmap='Spectral')
plt.colorbar()
plt.savefig(images_path + "/TV_Samples_u_ex_" + str(val) + ".png", dpi=400)
plt.close()
plt.figure()
plt.contourf(x.reshape(-1, ), y.reshape(-1, ), v_ex.T, 40, cmap='Spectral')
plt.colorbar()
plt.savefig(images_path + "/TV_Samples_v_ex_" + str(val) + ".png", dpi=400)
plt.close()
| 2.359375
| 2
|
tests/test_event_listener.py
|
ppd0705/supervisor-gateway
| 1
|
12774542
|
<reponame>ppd0705/supervisor-gateway<gh_stars>1-10
import asyncio
import os
import pytest
from pytest_mock import MockerFixture
from supervisor_gateway.event_listener import listener
from supervisor_gateway.event_listener import open_connection
@pytest.mark.asyncio
async def test_listener(mocker: MockerFixture):
rets = []
def handler(event: dict):
rets.append(event)
listener.set_handler(handler)
loop = asyncio.get_event_loop()
stdin_msg_template = (
"ver:3.0 server:lid001 serial:52611 "
"pool:supervisor_gateway poolserial:13190 eventname:PROCESS_STATE_%s len:69\n"
"processname:%s groupname:Counter from_state:STARTING pid:31168"
)
args = [
("AAA", "aaaaaaaaa"),
("BB", "bbbbbbbbb"),
]
read_fd, write_fd = os.pipe()
tmp_stdin_r = open(read_fd, "r")
tmp_stdin_w = open(write_fd, "w")
read_fd, write_fd = os.pipe()
tmp_stdout_w = open(write_fd, "w")
reader, writer = await open_connection(tmp_stdin_r, tmp_stdout_w)
mock = mocker.patch("supervisor_gateway.event_listener.open_connection")
mock.return_value = (reader, writer)
loop.create_task(listener.start())
for process_state, process_name in args:
msg = stdin_msg_template % (process_state, process_name)
tmp_stdin_w.write(msg)
tmp_stdin_w.flush()
await asyncio.sleep(0.5)
listener.stop()
assert len(rets) == 2
for i, event in enumerate(rets):
assert event["eventname"].rsplit("_", 1)[1] == args[i][0]
assert event["payload"]["processname"] == args[i][1]
| 2.0625
| 2
|
commands/nitrotype/verify.py
|
adl212/Lacan-NTSport-Source-Code
| 1
|
12774543
|
<gh_stars>1-10
'''Verify your account ownership after registering!'''
from discord.ext import commands
from packages.utils import Embed, ImproperType
from packages.nitrotype import Racer, cars
import requests
import os
import json
import random, copy
from mongoclient import DBClient
from nitrotype import verify, verify_race
import aiohttp
class Command(commands.Cog):
def __init__(self, client):
self.client = client
async def fetch(self, session, url, method='POST', data=None):
if method == 'POST':
async with session.post(url, data=data) as response:
return await response.text()
if method == 'GET':
async with session.get(url) as response:
return await response.text()
@commands.command()
async def verify(self, ctx, type="friend"):
#return await ctx.send('This command is currently under maintenance. The developers will try to get it up again as soon as possible. In the meantime feel free to use `n.help` to get the other commands. Thank you for your understanding!')
if type == 'car':
return await verify(ctx)
if type == 'race':
return await verify_race(ctx)
if type == 'friend':
dbclient = DBClient()
collection = dbclient.db.NT_to_discord
dbdata = await dbclient.get_array(collection, {})
async for elem in dbdata:
old = copy.deepcopy(elem)
if elem['userID'] == str(ctx.author.id):
if elem['verified'] == 'false':
username = elem['NTuser']
embed = Embed(':clipboard: Verify your Identity!', f'In order to verify, your ownership of **{elem["NTuser"]}**, friend me on nitrotype [here](https://www.nitrotype.com/racer/lacanverification)! \nAfter that run `n.verify` again.')
elem['verifyCar'] = None
elem['verified'] = 'in progress'
dbclient = DBClient()
collection = dbclient.db.NT_to_discord
await dbclient.update_array(collection, old, elem)
return await embed.send(ctx)
if elem['verified'] == 'in progress':
async with aiohttp.ClientSession() as session:
await self.fetch(session, 'https://www.nitrotype.com/api/login', data={'username': os.getenv('verification_username'), 'password': os.getenv('verification_password')})
friends = await self.fetch(session, 'https://www.nitrotype.com/api/friend-requests', method='GET')
friends = json.loads(friends)
for friend in friends['data']['requests']:
if friend['username'] == elem['NTuser']:
break
else:
embed = Embed(':warning: Nearly there!', f'Nitro Type user **{elem["NTuser"]}** did not friend request me yet. In order to verify your ownership for **{elem["NTuser"]}**, click [here](https://www.nitrotype.com/racer/lacanverification) and friend request me. \nAfter that make sure to run `n.verify` again.')
return await embed.send(ctx)
elem['verified'] = 'true'
dbclient = DBClient()
await dbclient.update_array(collection, old, elem)
embed = Embed('<a:Check:797009550003666955> Success', 'You\'ve been verified! In case this is a premium 💠 server do `n.update` to update your roles.')
return await embed.send(ctx)
if elem['verified'] == 'true':
embed = Embed('Error!', 'You are already verified :rofl:', 'joy')
return await embed.send(ctx)
else:
embed = Embed('Error!', 'You have not registered yet. Make sure to run `n.register <username>`', 'warning')
return await embed.send(ctx)
def setup(client):
client.add_cog(Command(client))
| 2.65625
| 3
|
src/view/pair_plots.py
|
sand-ci/ps-dash
| 0
|
12774544
|
import urllib.parse as urlparse
from urllib.parse import parse_qs
import utils.helpers as hp
import pandas as pd
import model.queries as qrs
import view.templates as tmpl
import numpy as np
import plotly.graph_objects as go
import plotly as py
import plotly.express as px
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
import dash_table
import dash
import dash_core_components as dcc
import dash_bootstrap_components as dbc
import dash_html_components as html
from view.problematic_pairs import ProblematicPairsPage
from model.DataLoader import GeneralDataLoader
class PairPlotsPage():
indx_dict = {'ps_packetloss': 'Packet loss', 'ps_owd': 'One-way delay',
'ps_retransmits': 'Retransmits', 'ps_throughput': 'Throughput'}
def __init__(self):
self.parent = ProblematicPairsPage()
self.root_parent = GeneralDataLoader()
def getData(self, src, dest):
time_list = hp.GetTimeRanges(self.root_parent.dateFrom, self.root_parent.dateTo)
df = pd.DataFrame(qrs.queryAllValues(self._idx, src, dest, time_list))
df.rename(columns={hp.getValueField(self._idx): 'value'}, inplace=True)
if len(df) > 0:
df['log_value'] = np.log(df['value'].replace(0, np.nan))
df['sqrt'] = df['value']**(1/2)
return df
def buildGraph(self, df, host_src, host_dest):
fig = go.Figure()
title = f'{self.indx_dict[self._idx]}: {host_src} ⇒ {host_dest}'
title = title if len(title)<80 else "<br>".join([f'{self.indx_dict[self._idx]}: ', f'{host_src} ⇒ {host_dest}'])
if len(df) > 0:
df = df.sort_values('timestamp', ascending=False)
df['dt'] = pd.to_datetime(df['timestamp'], unit='ms')
fig.add_trace(go.Scatter(x=df['dt'], y=df['value'],
mode='markers',
marker=dict(
color='navy'),
name='measures',
yaxis="y1"),
)
fig.add_trace(go.Scatter(x=df['dt'], y=df['sqrt'],
mode='markers',
marker=dict(
color='#F03A47'),
name='sqrt',
yaxis="y2",
visible='legendonly'),
)
fig.add_trace(go.Scatter(x=df['dt'], y=df['log_value'],
mode='markers',
marker=dict(
color='#00BCD4'),
name='log',
yaxis="y3",
visible='legendonly'),
)
fig.update_layout(
xaxis=dict(
domain=[0.05, 0.9]
),
yaxis1=dict(
title="measures",
anchor="free",
side="left",
position=0.05,
titlefont=dict(
color="navy"
),
tickfont=dict(
color="navy"
)
),
yaxis2=dict(
title="sqrt",
anchor="x",
overlaying="y",
side="right",
titlefont=dict(
color="#F03A47"
),
tickfont=dict(
color="#F03A47"
),
),
yaxis3=dict(
title="log",
anchor="free",
overlaying="y",
side="right",
position=0.98,
titlefont=dict(
color="#00BCD4"
),
tickfont=dict(
color="#00BCD4"
),
)
)
fig.update_layout(title=title,
template = 'plotly_white')
else:
fig.update_layout(title=title,
template = 'plotly_white',
annotations = [
{
"text": "No data found",
"xref": "paper",
"yref": "paper",
"showarrow": False,
"font": {
"size": 18
}
}
])
return fig
def defaultLayout(self):
return html.Div([
dbc.Row([
dbc.Col([
dbc.Row([
dbc.Col(html.Label('Index:'), width=3, className='dd-fields'),
dbc.Col(html.Label('Source:'), width=3, className='dd-fields'),
dbc.Col(html.Label('Destination:'), width=3, className='dd-fields'),
dbc.Col(width=1),
], no_gutters=True, justify="center", className='dd-field-names'),
dbc.Row([
dbc.Col(dcc.Dropdown(
id='idx-dropdown',
options=[{'label':idx, 'value':idx} for idx in hp.INDECES],
), width=3, className='dd-fields'),
dbc.Col(dcc.Dropdown(
id='src-dropdown',
placeholder="First select an index",
), width=3, className='dd-fields'),
dbc.Col(dcc.Dropdown(
id='dest-dropdown',
placeholder="First select an index",
), width=3, className='dd-fields'),
dbc.Col(dbc.Button('Plot'.upper(),
id={
'type': 'plot',
'index': 'default'
}, className='plot-input-button', n_clicks=0), width=1),
], align="center", no_gutters=True, justify="center"),
dbc.Row([
dbc.Col(html.P(id='total-pairs', className='dd-count'), width=3, className='dd-fields'),
dbc.Col(html.P(id='total-srcs', className='dd-count'), width=3, className='dd-fields'),
dbc.Col(html.P(id='total-dests', className='dd-count'), width=4, className='dd-fields'),
], no_gutters=True, justify="center"),
], width=12, className='fields-wrapper'),
], justify="center", className='dd-container boxwithshadow'),
])
def phraseProblem(self, ptype, idx):
if ptype == 'high_sigma' or ptype == 'all_packets_lost':
phrase = 'overall'
elif ptype == 'has_bursts':
phrase = 'periods of'
if idx == 'ps_throughput':
return (f'The pair shows {phrase} low throughout')
if idx == 'ps_retransmits':
return (f'The pair shows {phrase} high number of retransmitted packages')
if idx == 'ps_owd':
return (f'The pair shows {phrase} high latency')
if idx == 'ps_packetloss':
return (f'The pair shows {phrase} high packet loss')
def createCards(self):
other_issues_div = html.Div('None', className="card-text")
if self.parent.problems[['src', 'dest']].isin({'src': [self._src], 'dest': [self._dest]}).any().all():
data = self.parent.problems[(self.parent.problems['src']==self._src) &
(self.parent.problems['dest']==self._dest)].set_index('idx').to_dict('index')
watch4 = ['high_sigma', 'all_packets_lost', 'has_bursts']
'''Store the sentences in a dictionary'''
ddict = {}
no_issues = []
for idx in data:
for k, v in data[idx].items():
if k in watch4 and v == 1:
ddict[idx] = {'text':self.phraseProblem(k, idx), 'avg':data[idx]['value']}
if idx not in ddict:
ddict[idx] = {'text':f'{self.parent.indx_dict[idx]}: None found', 'avg':data[idx]['value']}
'''Search for other problems for the same pair and show them. Otherwise return None'''
other_indeces = [item for item in ddict.keys() if item != self._idx]
if len(other_indeces) > 0:
other_issues_div = html.Div([
html.Div([
html.Div(ddict[item]['text'], className="card-text"),
html.H3(f"{int(round(ddict[item]['avg'], 0))} {hp.getValueUnit(item)}", className="card-text")
]) for item in other_indeces
])
itext = html.Div(ddict[self._idx]['text'], className="card-text")
ival = html.H2(f"{int(round(ddict[self._idx]['avg'], 0))} {hp.getValueUnit(self._idx)}", className="card-text")
else:
itext = html.Div('None found', className="card-text")
ival = html.Div(className="card-text")
# TOFIX: the case when dest -> src exists in problems is not covered
src = self.root_parent.all_df[(self.root_parent.all_df['ip']==self._src)]
dest = self.root_parent.all_df[(self.root_parent.all_df['ip']==self._dest)]
return dbc.Col(
html.Div([
html.H2('Issue for this type of measure', className="card-title"),
itext,
ival
], className='issue ppage-header'), width=3), dbc.Col(
dbc.Row([
dbc.Col([
html.Div([
html.H2('SOURCE', className="card-title"),
html.Div(src['host'].values, className="card-text"),
html.Div(src['ip'].values, className="card-text"),
html.Div(src['site'].values, className="card-text")
], className='src-issue ppage-header'),
], width=6, className='issue-wrapper src'),dbc.Col(
html.Div([
html.H2('DESTINATION', className="card-title"),
html.Div(dest['host'].values, className="card-text"),
html.Div(dest['ip'].values, className="card-text"),
html.Div(dest['site'].values, className="card-text")
], className='dest-issue ppage-header'), width=6, className='issue-wrapper dest')
], justify="center", align="center", className='issue-wrapper')
, width=6), dbc.Col(
html.Div([
html.H2('Other issues for the same pair', className="card-title"),
other_issues_div
], className='other-issue ppage-header')
, width=3)
def specificPairLayout(self, url):
data = parse_qs(urlparse.urlparse(url).query)
# self._src and self._dest are the orginal values
self._src = data['src'][0]
self._dest = data['dest'][0]
self._idx = data['idx'][0]
host_src = data['src_host'][0]
host_dest = data['dest_host'][0]
pair = self.getData(self._src, self._dest)
reversed_pair = self.getData(self._dest, self._src)
return html.Div([
dbc.Row(
self.createCards(), className='issue-header boxwithshadow', no_gutters=True, justify='center'
),
dbc.Row([
dbc.Col(
html.Div([
dcc.Graph(figure=self.buildGraph(pair, host_src, host_dest))
], className='pair-plot boxwithshadow')
),
dbc.Col(
html.Div([
dcc.Graph(figure=self.buildGraph(reversed_pair, host_dest, host_src))
], className='pair-plot boxwithshadow')
)
], className='page-cont')
])
| 2.203125
| 2
|
Solutions/0012.intToRoman.py
|
lyhshang/LeetCode-Solutions
| 0
|
12774545
|
class Solution:
def intToRoman(self, num: int) -> str:
res = ""
s = ['I', 'V', 'X', 'L', 'C', 'D', 'M']
index = 0
while num > 0:
x = num % 10
if x < 5:
if x == 4:
temp = s[index] + s[index + 1]
else:
temp = ""
while x > 0:
temp += s[index]
x -= 1
else:
if x == 9:
temp = s[index] + s[index + 2]
else:
temp = s[index + 1]
while x > 5:
temp += s[index]
x -= 1
index += 2
res = temp + res
num = num // 10
return res
if __name__ == '__main__':
print(
Solution().intToRoman(3), "III",
Solution().intToRoman(4), "IV",
Solution().intToRoman(9), "IX",
Solution().intToRoman(58), "LVIII",
Solution().intToRoman(1994), "MCMXCIV",
)
| 3.203125
| 3
|
md5_util.py
|
weiwei11/wind
| 0
|
12774546
|
<reponame>weiwei11/wind<filename>md5_util.py
# Author: weiwei
import os
import glob
from hashlib import md5
def generate_str_md5(s: str, encoding='utf-8'):
"""
Generate md5 str for str object
:param s: str
:param encoding: str encoding, default is 'utf-8'
:return: md5 of str
>>> s = 'abcdefghijklmnopqrstuvwxyz'
>>> generate_str_md5(s, 'utf-8')
'c3fcd3d76192e4007dfb496cca67e13b'
"""
m = md5(s.encode('utf-8'))
# m.update(s)
str_md5 = m.hexdigest()
return str_md5
def generate_file_md5(filename):
"""
Generate md5 str for file
:param filename: file path
:return: md5 of file
>>> f = './LICENSE'
>>> generate_file_md5(f)
'08c536e577c5736f6ca90dc4d5bd7a26'
"""
m = md5(open(filename, 'rb').read())
file_md5 = m.hexdigest()
return file_md5
def generate_file_status_md5(filename, mode='simple'):
"""
Generate md5 for file status information
:param filename:
:param mode: 'simple' or 'all'
:return: md5 of file status
>>> generate_file_status_md5('./LICENSE')
'a94f91fd96b9f8e666964dcc9f0f52e4'
>>> generate_file_status_md5('./LICENSE', 'all')
'9146c0b3761e180cf24de4e200972756'
"""
if not os.path.exists(filename):
raise FileExistsError('{} not exist!'.format(filename))
file_info = os.stat(filename)
if mode == 'simple':
status_md5 = generate_str_md5(f'{file_info.st_size}{file_info.st_mtime}')
elif mode == 'all':
status_md5 = generate_str_md5(str(file_info))
else:
raise ValueError('The mode must be \'simple\' or \'all\'')
return status_md5
def generate_files_status_md5(filename_list, mode='simple'):
"""
Generate md5 for many files
:param filename_list:
:param mode: 'simple' or 'all'
:return: list of md5 of file status
>>> file_list = glob.glob('./test_resource/*')
>>> print(file_list)
['./test_resource/test_write.yaml', './test_resource/test_config.yml', './test_resource/test_read.yml']
>>> generate_files_status_md5(file_list)
['aa56bc3a1e18d4fc597346d9c4b738e9', 'f397d56519d6bb19ba96338cb9d70901', '3a2304556dec7c829bda2ddd37ec1f29']
>>> generate_files_status_md5(file_list, 'all')
['ecff29e9a10b5db8550763221056c414', '8171c82b8c6b3c2ab52e45044e500c21', 'd9dfaecc52b49287a74b29af8bf8cafe']
"""
status_md5_list = list(map(lambda x: generate_file_status_md5(x, mode), filename_list))
return status_md5_list
def save_md5_sum_file(filename, md5_str_list, md5_name_list):
"""
Save md5 sum file
:param filename: path of md5 sum file
:param md5_str_list: list of md5 str
:param md5_name_list: list of name of md5 str
:return:
>>> file_list = glob.glob('./test_resource/*')
>>> print(file_list)
['./test_resource/test_write.yaml', './test_resource/test_config.yml', './test_resource/test_read.yml']
>>> md5_list = generate_files_status_md5(file_list)
>>> print(md5_list)
['aa56bc3a1e18d4fc597346d9c4b738e9', 'f397d56519d6bb19ba96338cb9d70901', '3a2304556dec7c829bda2ddd37ec1f29']
>>> save_md5_sum_file('./test_md5/test_md5.md5', md5_list, file_list)
"""
with open(filename, 'w') as f:
for md5_str, md5_name in zip(md5_str_list, md5_name_list):
f.write(f'{md5_str} {md5_name}\n')
def read_md5_sum_file(filename):
"""
Read md5 sum file
:param filename: path of md5 sum file
:return: md5_str_list, md5_name_list
>>> md5_list, md5_names = read_md5_sum_file('./test_md5/test_md5.md5')
>>> print(md5_list)
['aa56bc3a1e18d4fc597346d9c4b738e9', 'f397d56519d6bb19ba96338cb9d70901', '3a2304556dec7c829bda2ddd37ec1f29']
>>> print(md5_names)
['./test_resource/test_write.yaml', './test_resource/test_config.yml', './test_resource/test_read.yml']
"""
with open(filename, 'r') as f:
line_list = f.readlines()
data_list = list(map(lambda line: line.split(), line_list))
md5_str_list = list(map(lambda x: x[0], data_list))
md5_name_list = list(map(lambda x: x[1], data_list))
return md5_str_list, md5_name_list
if __name__ == '__main__':
import doctest
doctest.testmod()
# s = 'abcdefghijklmnopqrstuvwxyz'
# s_md5 = generate_md5(s)
# print(s_md5)
| 3.015625
| 3
|
python/app.py
|
Software-Engineering-Group-4-Maamy/chat-bot
| 1
|
12774547
|
from tkinter import *
from chatbot import Botler
BG_COLOR = "#272727"
TEXT_COLOR = "#FAFAFA"
FONT = "Helvetica 14"
FONT_BOLD = "Helvetica 13 bold"
class ChatApplication:
"""Runs application """
def __init__(self):
"""Generates window GUI an initializes Chatbot"""
self._init_window()
self.chat = Botler()
def _init_window(self):
"""Initializes window with corresponding settings"""
self.window = Tk()
# Add title to window
self.window.title("Botler the Butler")
self.window.resizable(width=False, height=False)
self.window.configure(width=420, height=720, bg=BG_COLOR)
# text widget
self.text_widget = Text(self.window, width=20, height=2, bg=BG_COLOR, fg=TEXT_COLOR, font=FONT, padx=5, pady=5)
self.text_widget.place(relheight=0.9, relwidth=1)
self.text_widget.configure(cursor="arrow", state=DISABLED)
# scroll bar
scrollbar = Scrollbar(self.text_widget)
scrollbar.place(relheight=1, relx=0.974)
scrollbar.configure(command=self.text_widget.yview)
# bottom label
label = Label(self.window, bg=BG_COLOR, height=80)
label.place(relwidth=1, rely=0.9)
# message entry box
self.msg_entry = Entry(label, bg=BG_COLOR, fg=TEXT_COLOR, font=FONT)
self.msg_entry.place(relwidth=0.74, relheight=0.04, rely=0.008, relx=0.011)
self.msg_entry.focus()
self.msg_entry.bind("<Return>", self._on_enter_pressed)
# send button
send_button = Button(label, text="Send", font=FONT_BOLD, width=20, bg=BG_COLOR, command=lambda: self._on_enter_pressed(None))
send_button.place(relx=0.77, rely=0.008, relheight=0.04, relwidth=0.22)
def run(self):
"""Runs main loop for window"""
self.window.mainloop()
def _on_enter_pressed(self, event):
"""If user submits its message, it processes the input to generate a response"""
msg = self.msg_entry.get()
if not msg:
return
self._insert_message(msg, "You")
response = self.chat.generate_response(msg.lower())
self._insert_message(response, self.chat.name)
def _insert_message(self, msg, sender):
"""Inserts a message to the screen"""
if not msg:
return
self.msg_entry.delete(0, END)
msg1 = f"{sender}: {msg}\n\n"
self.text_widget.configure(state=NORMAL)
self.text_widget.insert(END, msg1)
self.text_widget.configure(state=DISABLED)
self.text_widget.see(END)
| 3.65625
| 4
|
derobertis_project_logo/project_logo.py
|
nickderobertis/derobertis-project-logo
| 0
|
12774548
|
<gh_stars>0
import os
from derobertis_project_logo.logo import Logo
class ProjectLogo:
def __init__(self, name: str, logo: Logo):
self.name = name
self.logo = logo
def rst(self, images_folder: str, width: float = 700):
image_path = os.path.join(images_folder, f'{self.name}.svg')
rst = f"""
{self.name} Logo
============================================================================
.. image:: {image_path}
:width: {width}
:alt: Logo for {self.name}
""".strip()
return rst
| 2.5625
| 3
|
Coursera/Python_IT_Google/T01/testtest.py
|
brianshen1990/KeepLearning
| 4
|
12774549
|
<reponame>brianshen1990/KeepLearning
#!/usr/bin/env python3
import os
import sys
import subprocess
BASEPATH = "/home/student-04-59327def21d0"
with open(sys.argv[1]) as f:
for item in f.readlines():
old_file = item.strip()
new_file = old_file.replace("jane", "jdoe")
# print(BASEPATH + old_file + "," + BASEPATH + new_file)
subprocess.run(["mv", BASEPATH + old_file, BASEPATH + new_file ])
f.close()
# string.replace(old_substring, new_substring)
| 3
| 3
|
ui/pypesvds/controllers/index.py
|
onfire73/pypeskg
| 117
|
12774550
|
import logging
from pylons import request, response, session, tmpl_context as c
from pylons.controllers.util import abort
# added for auth
from authkit.authorize.pylons_adaptors import authorize
from authkit.permissions import RemoteUser, ValidAuthKitUser, UserIn
from pypesvds.lib.base import BaseController, render
log = logging.getLogger(__name__)
class IndexController(BaseController):
@authorize(ValidAuthKitUser())
def index(self):
# Return a rendered template
#return render('/index.mako')
# or, return a response
return render('/pypesvds.mako')
def signout(self):
return render('/signin.html')
| 2.125
| 2
|