text stringlengths 8 6.05M |
|---|
# pylint: disable = C0103, C0111, C0301, R0913, R0903
import tensorflow as tf
import numpy as np
# from tensorflow.python.framework import ops
from scipy.ndimage.interpolation import rotate
def py_func(func, inp, Tout, stateful=True, name=None, grad=None):
# Need to generate a unique name to avoid duplicates:
rnd_name = 'PyFuncGrad' + str(np.random.randint(0, 1E+8))
tf.RegisterGradient(rnd_name)(grad) # see _MySquareGrad for grad example
g = tf.get_default_graph()
with g.gradient_override_map({"PyFunc": rnd_name}):
return tf.py_func(func, inp, Tout, stateful=stateful, name=name)
def image_rotate(images, angles):
rotated_images = np.zeros_like(images)
for i in range(images.shape[0]):
rotated_images[i] = rotate(images[i], angles[i]*180.0/np.pi, axes=(1, 0),
reshape=False, order=0,
mode='constant', cval=0.0, prefilter=False)
return rotated_images
def image_rotate_grad(op, grad):
images = op.inputs[0] # the first argument (normally you need those to calculate the gradient, like the gradient of x^2 is 2x. )
angles = op.inputs[1] # the second argument
grad_reshaped = tf.reshape(grad, images.get_shape())
return tf.contrib.image.rotate(grad_reshaped, -angles), None
def tf_image_rotate(images, angles, name=None):
with tf.name_scope(name, "image_rotate", [images, angles]) as name:
z = py_func(image_rotate,
[images, angles],
[tf.float32],
name=name,
grad=image_rotate_grad) # <-- here's the call to the gradient
return z[0]
|
#!/usr/bin/env python
from PIL import Image
from pilkit.processors import ResizeToFit
from flask import Flask, request, send_from_directory, send_file
import os
from tempfile import TemporaryFile, NamedTemporaryFile
from zipfile import ZipFile
import random, string
from pprint import pprint
from werkzeug import secure_filename
import io
import shutil
app = Flask(__name__)
dir = os.path.dirname(__file__)
def image_sizes(min, max, steps):
sizes = []
max = float(max)
min = float(min)
steps = int(steps)
step_size = (max - min)/(steps - 1)
for step in range(0, steps):
size = step * step_size + min
sizes.append(int(size))
return sizes
def img_src_formatter(info):
"""
Takes a tuple ('name', width) and generates
the srclist item for a responsive image tag.
"""
return "{} {}w".format(info[0], info[1])
def take_closest(num,collection):
"""
Thanks for the advice stack overflow:
http://stackoverflow.com/questions/12141150/from-list-of-integers-get-number-closest-to-a-given-value
"""
return min(collection,key=lambda x:abs(x-num))
def zip_from_image(file, sizes, quality=75, default_size=1600, alt_text="YOU MUST ENTER ALT TEXT", image_tag_path="images/"):
rand = ''.join(random.sample(string.letters, 15))
os.mkdir(os.path.join(dir, 'temp',rand))
file_path, file_name = os.path.split(file.filename)
base, ext = os.path.splitext(file_name)
base = base.lower()
ext = ext.lower()
image_name = "{}_{}{}".format(base, 'orig', ext)
image_path = os.path.join(dir, 'temp', rand, image_name)
f = file.save(image_path)
zip_name = "{}.zip".format(base).lower()
zip_path = os.path.join(dir, 'temp', rand, zip_name)
image = Image.open(image_path)
img_srcset_tuples = []
with ZipFile(zip_path, 'w') as zipfile:
zipfile.write(image_path, image_name)
os.remove(image_path)
for size in sizes:
image_name = "{}_{}{}".format(base, size, ext)
img_srcset_tuples.append(("{}{}".format(image_tag_path, image_name), size))
image_path = os.path.join(dir, 'temp', rand, image_name)
img = ResizeToFit(width=size, upscale=True).process(image)
img.save(image_path, progressive=True, exif="", optimize=True,
quality = quality, icc_profile=img.info.get('icc_profile'))
img.close()
zipfile.write(image_path, image_name)
os.remove(image_path)
# src_file = open(os.path.join(dir, 'temp', rand, 'img_tag.txt')
# src_file.write('<img srcset="')
img_srcset_strings = map(img_src_formatter, img_srcset_tuples)
best_size = take_closest(default_size, sizes)
img_src_string = img_srcset_tuples[sizes.index(best_size)][0]
img_tag_html = '<img srcset="{}" sizes="(min-width: 1px) 100vw, 100vw" src="{}" alt="{}">'.format(', '.join(img_srcset_strings), img_src_string, alt_text)
img_tag_html_file_name = os.path.join(dir, 'temp', rand, 'html.txt')
img_tag_html_file = open(img_tag_html_file_name, 'w')
img_tag_html_file.write(img_tag_html)
img_tag_html_file.close()
zipfile.write(img_tag_html_file_name, 'html.txt')
return zip_path
@app.route("/")
def index():
return send_file('static/index.html')
@app.route("/zip", methods = ['POST',])
def make_zip_file():
f = request.files['photo']
min_size = float(request.form['minSize'])
max_size = float(request.form['maxSize'])
size_steps = int(request.form['sizeSteps'])
quality = int(request.form['quality'])
sizes = image_sizes(min_size, max_size, size_steps)
sizes.sort(reverse=True)
zipfile = zip_from_image(f, sizes, quality=quality)
file_path, file_name = os.path.split(zipfile)
bytes = ""
with open(zipfile, 'r') as zipr:
bytes = io.BytesIO(zipr.read())
shutil.rmtree(file_path)
return send_file(bytes, as_attachment=True, attachment_filename=file_name)
app.debug = False
if __name__ == "__main__":
app.debug = True
app.run()
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import time
import json
import logging
import homie
from modules.homiedevice import HomieDevice
from modules.mysql import db
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class SetMixin:
def _set(self, param, type, msg, node=None):
payload = msg.payload.decode("UTF-8").lower()
logger.info(
"{id} {param}: {payload}".format(id=self._id, param=param, payload=payload)
)
try:
if type == "float":
val = float(payload)
elif type == "bool":
val = payload == "1"
else:
logger.error("Unknown payload type {type}".format(type=type))
setattr(self, "_" + param, val)
if type == "bool":
ret = 1 if val else 0
else:
ret = payload
if node:
node.setProperty(param).send(ret)
else:
self.setProperty(param).send(ret)
except ValueError as e:
logger.error(
"{param} was not a valid {type}".format(param=param, type=type)
)
return val
class ZoneNode(homie.HomieNode, SetMixin):
_temperatureset = 20
_enabled = True
_heating = None
_scheduled = False
_boost = False
_boosttime = 20
_boostset = 25
_booststart = 0
_away = False
_awayset = 15
def __init__(self, homie_instance, zid, read_property, control_property):
self._read_property = read_property
self._control_property = control_property
self._id = zid
super(ZoneNode, self).__init__(
homie_instance, "zheatingzone" + str(zid), "heatingzone"
)
self.advertise("enabled").settable(self.enable_handler)
self.advertise("scheduled").settable(self.scheduled_handler)
self.advertise("temperatureset").settable(self.temperature_sp_handler)
self.advertise("boost").settable(self.boost_handler)
self.advertise("boosttime").settable(self.boost_time_handler)
self.advertise("boostset").settable(self.boost_sp_handler)
self.advertise("away").settable(self.away_handler)
self.advertise("awayset").settable(self.away_sp_handler)
@property
def _db(self):
return db()
def enable_handler(self, mqttc, obj, msg):
self._set("enabled", "bool", msg)
def scheduled_handler(self, mqttc, obj, msg):
self._set("scheduled", "bool", msg)
def temperature_sp_handler(self, mqttc, obj, msg):
self._set("temperatureset", "float", msg)
def boost_handler(self, mqttc, obj, msg):
boost = self._set("boost", "bool", msg)
if boost:
self._booststart = time.time()
def boost_time_handler(self, mqttc, obj, msg):
self._set("boosttime", "float", msg)
def boost_sp_handler(self, mqttc, obj, msg):
self._set("boostset", "float", msg)
def away_handler(self, mqttc, obj, msg):
self._set("away", "bool", msg)
def away_sp_handler(self, mqttc, obj, msg):
self._set("awayset", "float", msg)
def set(self, property, payload, retain=True):
self.homie.mqtt.publish(
self.homie.baseTopic
+ "/{device}/{node}/{property}/set".format(
device=property["devicestring"],
node=property["nodestring"],
property=property["propertystring"],
),
payload=str(payload),
retain=retain,
)
def set_status(self, state):
address = "{devicestring}/{nodestring}/{propertystring}".format(
devicestring=self._control_property["devicestring"],
nodestring=self._control_property["nodestring"],
propertystring=self._control_property["propertystring"],
)
logger.info(
"Setting status to {state} for {address}".format(
state=state, address=address
)
)
self.set(self._control_property, "1" if state else "0")
self._heating = state
@property
def active(self):
if not self._enabled:
return
# logger.info("{zid} enabled".format(zid=self._id))
if not self._scheduled and not self.boost and not self.away:
if self._heating:
self.set_status(False)
return
# logger.info("{zid} running".format(zid=self._id))
read = self._db.pq(
"""SELECT p.value FROM property p
WHERE p.devicestring=%s AND p.nodestring=%s AND p.propertystring = %s""",
[
self._read_property["devicestring"],
self._read_property["nodestring"],
self._read_property["propertystring"],
],
)
if not read:
logger.error("Couldnt find a heating reading device")
return
now = time.time()
if self._boost:
if (now - self._booststart) > self._boosttime * 60:
self.boost = False
set_point = self._boostset
elif self.away:
set_point = self._awayset
else:
set_point = self._temperatureset
newstate = read[0]["value"] < set_point
if newstate != self._heating:
logger.debug(
"{zid} set {set} read {read}".format(
zid=self._id, set=set_point, read=read[0]["value"]
)
)
logger.debug(
"{zid} new {new} old {old}".format(
zid=self._id, new=newstate, old=self._heating
)
)
self.set_status(newstate)
return newstate
@property
def boost(self):
return self._boost
@boost.setter
def boost(self, state):
self._boost = state
self.setProperty("boost").send(1 if state else 0)
if state:
self._booststart = time.time()
@property
def away(self):
return self._away
@away.setter
def away(self, state):
self._away = state
self.setProperty("away").send(1 if state else 0)
@property
def scheduled(self):
return self._scheduled
@scheduled.setter
def scheduled(self, state):
self._scheduled = state
self.setProperty("scheduled").send(1 if state else 0)
class Zonedheating(HomieDevice, SetMixin):
_id = "root"
_enabled = True
_boost = None
_status = None
_zones = []
def setup(self):
self._node = self._homie.Node("zheating", "heating")
self._node.advertise("enabled").settable(self.enable_handler)
self._node.advertise("scheduled").settable(self.scheduled_handler)
self._node.advertise("boost").settable(self.boost_handler)
self._node.advertise("away").settable(self.away_handler)
config = self._db.pq("""SELECT config FROM pages WHERE template='zheating'""")
if not config:
raise Exception("Could not find config")
config = json.loads(config[0]["config"])
for z in config["zones"]:
properties = self._db.pq(
"""SELECT p.devicestring, p.nodestring, p.propertystring, pt.name as type
FROM property p
INNER JOIN propertytype pt ON p.propertytypeid = pt.propertytypeid
INNER JOIN propertysubgroupcomponent psgc ON psgc.propertyid = p.propertyid
WHERE psgc.propertysubgroupid = %s""",
[z["propertysubgroupid"]],
)
read = {}
control = {}
for p in properties:
if p["type"] == "temperature":
read = p
if p["type"] == "switch":
control = p
node = ZoneNode(self._homie, z["id"], read, control)
self._homie.nodes.append(node)
self._zones.append(node)
def enable_handler(self, mqttc, obj, msg):
enabled = self._set("enabled", "bool", msg, node=self._node)
if not enabled:
self.set_status(False)
def scheduled_handler(self, mqttc, obj, msg):
scheduled = self._set("scheduled", "bool", msg, node=self._node)
if not scheduled:
self.set_status(False)
for z in self._zones:
z.scheduled = scheduled
def boost_handler(self, mqttc, obj, msg):
state = self._set("boost", "bool", msg, node=self._node)
for z in self._zones:
z.boost = state
def away_handler(self, mqttc, obj, msg):
state = self._set("away", "bool", msg, node=self._node)
for z in self._zones:
z.away = state
def is_active(self):
active = False
for z in self._zones:
if z.active:
active = True
return active
def is_boost(self):
boost = False
for z in self._zones:
if z.boost:
boost = True
return boost
def set_status(self, state):
device = self._db.pq(
"""SELECT p.devicestring, p.nodestring, p.propertystring FROM property p
INNER JOIN options o ON o.name='heating_control_property' AND o.value = p.propertyid"""
)
if not device:
logger.error("Couldnt find a heating control device")
return
address = "{devicestring}/{nodestring}/{propertystring}".format(
devicestring=device[0]["devicestring"],
nodestring=device[0]["nodestring"],
propertystring=device[0]["propertystring"],
)
logger.info(
"Root: Setting status to {state} for {address}".format(
state=state, address=address
)
)
self.set(device[0], "1" if state else "0")
self._status = state
def loopHandler(self):
if not self._enabled:
return
active = self.is_active()
if active != self._status:
self.set_status(active)
boost = self.is_boost()
if boost != self._boost:
self._node.setProperty("boost").send("1" if boost else "0")
self._boost = boost
def main():
d = db()
config = homie.loadConfigFile("configs/zoned_heating.json")
Homie = homie.Homie(config)
# Homie = homie.Homie("configs/zoned_heating.json")
heating = Zonedheating(d, Homie)
Homie.setFirmware("zheating-controller", "1.0.0")
Homie.setup()
while True:
heating.loopHandler()
time.sleep(5)
if __name__ == "__main__":
try:
main()
except (KeyboardInterrupt, SystemExit):
logger.info("Quitting.")
|
from wtforms import Form
from wtforms import StringField, TextAreaField
from wtforms.fields.html5 import EmailField
from wtforms import PasswordField
from wtforms import HiddenField
from wtforms import BooleanField
from wtforms import SelectField
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms import DecimalField
from wtforms import IntegerField
from wtforms import DateField, DateTimeField
from flask_wtf.file import FileField, FileAllowed, FileRequired
from wtforms import validators
from models import User, tipoVehiculos, Resguardante, Vehiculo, Ticket, Ciudades, Compras, Model_Proveedor
from sqlalchemy.sql import distinct
from wtforms_components import TimeField, read_only
import flask
from models import db
#images = UploadSet('images', IMAGES)
def get_pk(obj): # def necesario para que el QuerySelectField pueda mostrar muchos registros.
return str(obj)
def ciudad():
return Ciudades.query.order_by('ciudad')
def length_honeypot(form, field):
if len(field.data) > 0:
raise validators.ValidationError('El Campo debe estar vacio.')
def ciudad():
return Ciudades.query.order_by('ciudad')
def proveedor():
lugar = flask.session.get('ciudad')
return Model_Proveedor.query.filter_by(idCiudad=lugar)
def Query_placas():
lugar = flask.session.get('ciudad')
x = Vehiculo.query.filter_by(idCiudad=lugar).order_by('placa').all()
return x
def Query_placa_Ticket():
lugar = flask.session.get('ciudad')
return Vehiculo.query.filter_by(idCiudad=lugar).order_by('placa').all()
# def tipos():
# return tipoVehiculos.query.order_by('tipo')
def resguard():
lugar = flask.session.get('ciudad')
return Resguardante.query.filter_by(idCiudad=lugar).order_by('nombre')
def QProv():
lugar = flask.session.get('ciudad')
return Model_Proveedor.query.filter_by(idCiudad=lugar).order_by('razonSocial')
class Create_Form(Form):
username = StringField('Usuario',
[validators.Required(message='El user es requerido!.'),
validators.length(min=8, max=20, message='ingrese un username valido!.')
])
password = PasswordField('Password', [validators.Required(message='El password es Indispensable!.'),
validators.EqualTo('confirm', message='Las contraseñas deben ser iguales')])
confirm = PasswordField('Repita la Contraseña')
email = EmailField('Correo electronico',
[validators.Required(message='El Email es requerido!.'),
validators.Email(message='Ingrese un email valido!.'),
validators.length(min=4, max=40, message='Ingrese un email valido!.')
])
ciudad = QuerySelectField(label="Ciudad", query_factory=ciudad, get_pk=get_pk, allow_blank=True)
honeypot = HiddenField('', [length_honeypot])
vehiculos = BooleanField('Inventarios')
proveedores = BooleanField('Combustibles')
tipo_vehiculos = BooleanField('Mantenimientos')
crear = BooleanField('Administrador - Control Total')
organismo = BooleanField('Organismo')
def validate_username(form, field):
username = field.data
user = User.query.filter_by(username=username).first()
if user is not None:
raise validators.ValidationError('El usuario ya existe en la base de datos.')
class FormVehiculos(Form):
numInv = StringField('Núm. Inventario',
[validators.DataRequired(message='El Número de inventario es necesario'),
validators.length(min=8, max=18, message='ingrese un numero de inventario valido!.')
])
numSicopa = StringField('Núm. Sicopa',
[validators.DataRequired(message='El Número de inventario es necesario'),
validators.length(min=8, max=18, message='ingrese un numero de inventario valido!.')
])
numTarCir = StringField('Folio Tarjeta de circulacion',
[validators.DataRequired(message='El Número de Tarjeta de circulacion es necesario'),
])
marca = StringField('Marca',
[validators.DataRequired(message='La marca del vehiculo es necesario'),
validators.length(min=4, max=15, message='Ingrese una marca valida')
])
modelo = StringField('Modelo',
[validators.DataRequired(message='La marca es necesaria'),
validators.length(min=4, max=15, message='Ingrese una marca valida')
])
color = StringField('Color',
[validators.DataRequired(message='La marca del vehiculo es necesario'),
validators.length(min=4, max=15, message='Ingrese un color')
])
anio = SelectField('Año',
choices=[('', ''),('1995','1995'), ('1996','1996'), ('1997','1997'), ('1998','1998'), ('1999','1999'), ('2000','2000'), ('2001','2001'), ('2002','2002'), ('2003','2003'),
('2004','2004'), ('2005','2006'), ('2007','2007'), ('2008','2009'), ('2010','2010'), ('2011','2011'), ('2012','2012'), ('2013','2013'), ('2014','2014'), ('2015','2015'),
('2016','2016'), ('2017','2018'), ('2019','2019'), ('2020','2020'), ('2021','2021'), ('2022','2022'), ('2023','2023'), ('2024','2024')], )
tipoVehiculo = SelectField('T. Vehiculo',
choices=[('', ''), ('camioneta', 'camioneta'),('moto','Motocicleta'), ('bidon','Bidon'), ('Estaquitas', 'Estaquitas'), ("Automovil", 'Automovil'), ("Pipa", 'Pipa'), ("coche", 'coche')], )
nSerie = StringField('Núm. Serie',
[validators.DataRequired(message='El Número de serie es Obligatorio'),
validators.length(min=17, max=20, message='El Numero de serie es un campo obligatorio')
])
nMotor = StringField('Núm. Motor',
[validators.DataRequired('El Número de motor es requerido')])
costo = StringField('Costo $',
[validators.DataRequired('El Número de motor es requerido')])
tCombus = SelectField('T. Combistible',
choices=[('', ''), ('Magna', 'Magna'), ('Premium', 'Premium'), ("Diesel", 'Diesel')], )
odome = SelectField('Odometro', choices=[('', ''), ('Si', 'Si'), ('No', 'No')])
kmInicio = StringField('Km Inicial')
nVehi = StringField('Nombre del Vehiculo',
{validators.DataRequired(
message='El nombre de vehiculo ayuda a identificar el vehiculo más fácil'),
})
resguardo = QuerySelectField(label='Resguardante', query_factory=resguard,
get_pk=get_pk, allow_blank=True, get_label="nombreCompleto")
resguardo2 = StringField('Resguardante')
resguardoAnte = StringField('Resguardante Anterior')
cSeguros = StringField('Compañía de seguros',
[validators.DataRequired(message='Debe de ingresar el nombre de la compañía de seguros'),
validators.length(min=4, max=25)])
nPoliza = StringField('Número de Poliza',
[validators.DataRequired('El Número de poliza es un campo obligatorio'),
validators.length(min=4, max=20, message='Inserte un numero de poliza Valido')
])
placa = StringField('Placa del vehiculo',
[validators.DataRequired('La Placa es indispensable para el control vehicular')
])
frontal = FileField('Imagen Vehiculo vertice Frontal')
izq = FileField('Imagen Vehiculo lado vertice trasero')
der = FileField('Imagen Vehiculo interior')
tras = FileField('Imagen Vehiculo motor')
inte = FileField('Imagen Vehiculo Serie')
factura = FileField('imagen de la factura')
tarjeta = FileField('Imagen tarjeta de circulacion')
poliza = FileField('Imagen Poliza de seguro')
tipoCarga = SelectField('Disp. de carga', choices=[('', ''), ('vales', 'Vales'), ('arillo', 'Arillo'), ('tarjeta', 'Tarjeta'), ('efectivo', 'Efectivo')])
numDispositivo = StringField('Num. Dispositivo',
[validators.DataRequired('El numero de Dispositivo es indispensable')])
class Form_resguardos(Form):
nombre = StringField("Nombre",
[validators.DataRequired(message="El campo nombre es obligatorio"),
validators.length(min=4, max=20, message="Ingrese un nombre valido")])
apellidoPat = StringField("Apellido Pat.",
[validators.DataRequired(message="El campo apellido Pat. es obligatorio"),
validators.length(min=3, max=15, message="Ingrese un apellido valido")])
apellidoMat = StringField("Apellido Mat.",
[validators.DataRequired(message="El campo Apellido Mat. es obligatorio"),
validators.length(min=3, max=15, message="Ingrese un apellido valido")])
area = StringField("Area",
[validators.DataRequired(message="El campo Area es obligatorio"),
validators.length(min=4, max=20, message="Ingrese un Area valido")])
departamento = StringField("Departamento",
[validators.DataRequired(message="El campo Departamento es obligatorio"),
validators.length(min=4, max=35, message="Ingrese un Departamento valido")])
licencia = StringField("Licencia",
[validators.DataRequired(message="El campo Licencia es obligatorio"),
validators.length(min=4, max=15, message="Ingrese un licencia valido")])
lVigencia = DateField('Vigencia (dd/mm/aaaa)', format='%d/%m/%Y', validators=(validators.Optional(),))
class ResSearchForm(Form):
choices = [('', ''),
('td', 'Todos'),
('Nombre', 'Nombre'),
('Area', 'Area'),
('Departamento', 'Departamento')]
select1 = SelectField('Buscar por', choices=choices)
search = StringField('-')
class TelephoneForm(Form):
country_code = IntegerField('Codigo de l Pais', [validators.required()])
area_code = IntegerField('Codigo de area', [validators.required()])
number = StringField('Numero')
class Form_Proveedor(Form):
razonSocial = StringField('Razón social',
[validators.DataRequired(message='El campo es obligatorio'),
validators.length(min=2, max=100, message='El campo tiene un maximo de 100 caracteres')])
propietario = StringField('Propietario',
[validators.DataRequired(message='Campo es obligatorio'),
validators.length(min=4, max=50, message='el campo solo soporta 50 caracteres')])
direccion = StringField('Direccion',
[validators.DataRequired(message='La direccion debe capturarse'),
validators.length(min=4, max=120, message='Maximo 120 caracteres')])
rfc = StringField('R. F. C. (XXXx-aammdd-XXX)',
[validators.DataRequired(message='El RFC es un campo obligatorio'),
validators.length(min=12, max=15,
message='El RFC debe contar minimo con 14 y maximo 15 caracteres')])
municipio = StringField('Municipio',
[validators.DataRequired(message='Campo es obligatorio'),
validators.length(min=4, max=35, message='el campo solo soporta 35 caracteres')])
estado = StringField('Estado',
[validators.DataRequired(message='Campo es obligatorio'),
validators.length(min=4, max=20, message='el campo solo soporta 20 caracteres')])
telefono = StringField('Telefono',
[validators.DataRequired(message='Campo es obligatorio'),
validators.length(min=10, max=15, message='el campo solo soporta 15 caracteres')])
contacto = StringField('Contacto',
[validators.length(min=4, max=50, message='el campo solo soporta 50 caracteres')])
email = EmailField('Correo electronico',
[validators.Required(message='El Email es requerido!.'),
validators.Email(message='Ingrese un email valido!.'),
validators.length(min=4, max=40, message='Ingrese un email valido!.')
])
class ProvSearchForm(Form):
choices = [('', ''),
('td', 'Todos'),
('rs', 'Razon Social'),
('P', 'Propietario'),
('rfc', 'R. F. C.')]
select1 = SelectField('Buscar por', choices=choices, )
search = StringField('-')
class VehiSearchForm(Form):
choices = [('', ''),
('td', 'Todos'),
('ni', 'Núm. Inv.'),
('ns', 'Núm. Serie'),
('res', 'Resguardante')]
select1 = SelectField('Buscar por', choices=choices, )
search = StringField('-')
class Form_Ticket(Form):
plancha = BooleanField('Planchado?')
adicion = BooleanField('Adicional?')
transaccion = StringField('Número de Transaccion')
fecha = DateTimeField('Fecha y hora de Carga', format='%d/%m/%Y %H:%M:%S')
odometro = IntegerField('Odometro')
cantidad = DecimalField('Cantidad de liros', places=4, rounding=None)
tipoComb = SelectField('T. Combistible',
choices=[('', ''), ('Magna', 'Magna'), ('Premium', 'Premium'), ("Diesel", 'Diesel')],)
precio = DecimalField('Precio combustible', places=4, rounding=None)
subtotal = DecimalField('Subtotal', places=4, rounding=None)
iva = DecimalField('I. V. A.', places=4, rounding=None)
total = DecimalField('Total', places=4, rounding=None)
placa = QuerySelectField(label='Placas', allow_blank=True, query_factory=Query_placas, get_pk=get_pk)
obser = TextAreaField('Observaciones')
oficio = StringField("Núm. Oficio")
class FormConsultaTicket(Form):
placas = QuerySelectField('Selecciones una placa', allow_blank=True, query_factory=Query_placa_Ticket, get_pk=get_pk)
fechaI= DateTimeField('Fecha inicial', format='%d/%m/%Y %H:%M:%S', validators=(validators.Optional(),))
fechaF = DateTimeField('Fecha Final', format='%d/%m/%Y %H:%M:%S', validators=(validators.Optional(),))
class FormConsultaTicket2(Form):
fechaI= DateTimeField('Fecha inicial', format='%d/%m/%Y %H:%M:%S', validators=(validators.Optional(),))
fechaF = DateTimeField('Fecha Final', format='%d/%m/%Y %H:%M:%S', validators=(validators.Optional(),))
class Form_Grafica(Form):
placa = QuerySelectField('Selecciones una placa', allow_blank=True, query_factory=Query_placa_Ticket, get_pk=get_pk)
anio = SelectField('Año', choices=[('', ''), ('2018', '2018'), ('2019', '2019'), ('2020', '2020'),('2021', '2021'),('2022', '2022')], )
class Form_Solicitud(Form):
nServicio = StringField("Solicitud de Servicio: ")
fecha = StringField("Fecha: ")
nOficio = StringField("Núm. Oficio", [
validators.length(min=5, max=25,message="El campo está limitado a 25 caracteres")])
placa = QuerySelectField(label='Placas', allow_blank=True, query_factory=Query_placas, get_pk=get_pk)
odome = StringField("Odometro:",[
validators.length(min=1,max=9,message="maximo de caracteres 9")])
solicitante = StringField("Solicitante", [
validators.length(min=5, max=35,message="El campo está limitado a 35 caracteres")])
observaciones = TextAreaField("Observaciones",)
def __init__(self, *args, **kwargs):
super(Form_Solicitud, self).__init__(*args, **kwargs)
read_only(self.nServicio)
read_only(self.fecha)
class Form_CapSol(Form):
numSol = IntegerField("Núm. de Solicitud", [validators.DataRequired(message="El número de solicitud es necesario")])
cotizacion1 = BooleanField("Cotización 1")
proveedor1 = StringField("Proveedor", [
validators.DataRequired(message="Debe capturar minimo una cotización"),
validators.length(min=5, max=50, message="El nombre del proveedor debe contener min 5 y max 50 caracteres")])
costo1 = DecimalField("Costo", places=2, rounding=None)
descripcion1 = TextAreaField("Descripcion del servicio", [validators.required()])
cotizacion2 = BooleanField("Cotización 2")
proveedor2 = StringField("Proveedor")
costo2 = DecimalField("Costo", places=2, rounding=None)
descripcion2 = TextAreaField("Descripcion del servicio")
cotizacion3 = BooleanField("Cotización 3")
proveedor3 = StringField("Proveedor")
costo3 = DecimalField("Costo", places=2, rounding=None)
descripcion3 = TextAreaField("Descripcion del servicio")
class Factura(Form):
placas = StringField('Placas',
[validators.Required(message = 'El campo es Requerido!.'),
validators.length(max = 8, message='El campo debe contener 8 caracteres como Maximo')
])
observaciones = StringField('Observaciones',
[validators.Required('El campo es Requerido'),
validators.length(min=5, max=150, message='Ingrese un comentarios valido')
])
class Factura(Form):
placas = StringField('Placas',
[validators.Required(message = 'El campo es Requerido!.'),
validators.length(max = 8, message='El campo debe contener 8 caracteres como Maximo')
])
observaciones = StringField('Observaciones',
[validators.Required('El campo es Requerido'),
validators.length(min=5, max=150, message='Ingrese un comentarios valido')
])
class capturaFactura(Form):
fecha = DateField('Fecha y Hora', format='%d/%m/%Y')
total = DecimalField('Total', places=4, rounding=None)
subtotal = DecimalField('SubTotal', places=4, rounding=None)
iva = DecimalField('I. V. A.', places=4, rounding=None)
rfc = StringField('R. F. C.',
[validators.DataRequired(message='El RFC es un campo obligatorio'),
validators.length(min=14, max=15,
message='El RFC debe contar minimo con 14 y maximo 15 caracteres')])
nombre = QuerySelectField(label="Proveedor", query_factory=proveedor, allow_blank=True, get_pk=get_pk)
uuid = StringField("UUiD",
[validators.DataRequired(message="El campo nombre es obligatorio"),
validators.length(min=4, max=36, message="Ingrese un UUId valido")])
placas = StringField('Placas',
[validators.DataRequired(message="Las placas son necesarias para identificar la unidad"),
validators.length(min=6, max=9, message="La longitud no debe se menor a 6 caracteres ni mayot a 9")])
obser = TextAreaField('Observaciones',[validators.Required(message='Text is required')])
cantidad = DecimalField('', places=4, rounding=None)
descripcion = StringField('', [
validators.DataRequired(message='Tiene que especificar la descripcion del Serviocio o articulo'),
validators.length(min=5, max=35)])
pUnit = DecimalField('', places=4, rounding=None)
importe = DecimalField('', places=4, rounding=None)
class filtroServ(Form):
bProv = BooleanField(label=None)
sProv = QuerySelectField(label="Proveedor", query_factory=proveedor, allow_blank=True, get_pk=get_pk)
bFecha = BooleanField(label=None)
sFechaI = DateField("Fecha Ini", format='%d/%m/%Y', validators=(validators.Optional(),))
sFechaF = DateField("Fecha Fin", format='%d/%m/%Y', validators=(validators.Optional(),))
bPlaca = BooleanField(label=None)
qPlaca = QuerySelectField(label='Placas', allow_blank=True, query_factory=Query_placas, get_pk=get_pk)
class formCotizacion(Form):
solicitud = StringField("Núm. Solicitud",[
validators.DataRequired(message="Tiene que capturar el numero de Solicitud")])
Cotizacion= QuerySelectField(label='Proveedores', query_factory=QProv, allow_blank=True, get_pk=get_pk)
class formBitacora(Form):
choices = [('na', ''), ('ni', 'Núm. Inv.'), ('placa', 'Placa'), ('res', 'Resguardante')]
select1 = SelectField('Buscar por', choices=choices, )
select2 = SelectField('Opciones', choices=[('', ''),])
resguardo = QuerySelectField(label='Resguardante', query_factory=resguard,
get_pk=get_pk, allow_blank=True, get_label="nombreCompleto")
class formBitacora2(Form):
choices = [('na', ''), ('td', 'Todos'), ('ni', 'Núm. Inv.'), ('placa', 'Placa'), ('res', 'Resguardante')]
select1 = SelectField('Buscar por', choices=choices, )
select2 = SelectField('Opciones', choices=[('', ''),])
class Oficialia(Form):
kilometraje = StringField("Kilometraje",[
validators.DataRequired(message="Tiene que capturar el Kilometraje")])
observaciones = TextAreaField("Observaciones") |
#!/usr/bin/env python
import os
from datetime import datetime
from flask import Blueprint, request, render_template, flash, redirect, url_for
from flask_login import login_user, logout_user, current_user, login_required
from app.helpers.getuser import get_user
from app.helpers.generaterandom import generate_random_str
from app.web.datasets.form import AddNewDatasets
from app import SqlDB
from app.models import Datasets
datasets = Blueprint('datasets', __name__, url_prefix='/datasets')
@datasets.route('/')
@login_required
def index():
logged_in_user = get_user(current_user)
data = {
'logged_in_user': logged_in_user
}
# load active datasets
datasets = Datasets.query.filter((Datasets.status==1) & (Datasets.created_by==logged_in_user['id'])).order_by(Datasets.id.desc()).all()
data['datasets'] = datasets
return render_template('cms/datasets/index.html', data=data)
@datasets.route('/add', methods=['GET', 'POST'])
@login_required
def add():
logged_in_user = get_user(current_user)
data = {
'logged_in_user': logged_in_user
}
form = AddNewDatasets()
if form.validate_on_submit():
hashcode = generate_random_str(16)
new_datasets = Datasets()
new_datasets.hashcode = hashcode
new_datasets.name = form.data['name']
new_datasets.description = form.data['description']
new_datasets.status = 1
new_datasets.created_at = datetime.now()
new_datasets.created_by = logged_in_user['id']
# check attachment
if form.attachment.data:
name, ext = os.path.splitext(form.attachment.data.filename)
upload_file_path = 'app/contentfiles/'
hashed_filename = ''.join([hashcode, ext])
local_dirs = ''.join([upload_file_path, datetime.now().strftime('%Y-%m-%d'), '/'])
local_path = ''.join([local_dirs, hashed_filename])
# create dirs if not exists
if not os.path.exists(os.path.dirname(local_dirs)):
os.makedirs(os.path.dirname(local_dirs))
# move to local dirs
form.attachment.data.save(local_path)
# update new_datasets
new_datasets.original_file_name = name
new_datasets.original_file_ext = ext
new_datasets.original_file_size = os.stat(local_path).st_size
new_datasets.hashed_filename = hashed_filename
new_datasets.hashed_filepath = local_dirs
# insert
SqlDB.session.add(new_datasets)
SqlDB.session.commit()
flash('Success add new dataset')
return redirect(url_for('.index'))
else:
for field, errors in form.errors.items():
for error in errors:
flash(u"Error in the %s field - %s" % (
getattr(form, field).label.text,
error
))
return render_template('cms/datasets/add.html', data=data, form=form)
return render_template('cms/datasets/add.html', data=data, form=form)
@datasets.route('/delete/<int:dataset_id>')
@login_required
def delete(dataset_id):
logged_in_user = get_user(current_user)
dataset = Datasets.query.filter((Datasets.id==dataset_id) & (Datasets.status == 1) & (Datasets.created_by==logged_in_user['id'])).first()
if dataset == None:
return "Access Denied"
else:
dataset.status = 0
dataset.updated_at = datetime.now()
dataset.updated_by = logged_in_user['id']
try:
SqlDB.session.commit()
except Exception as e:
SqlDB.session.rollback()
return redirect(url_for('.index')) |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
# ## This is a test notebook.
# In[ ]:
|
from abc import abstractmethod, ABC
class Bank(ABC):
@abstractmethod
def getROI(self):
pass
def getName(self):
return "Bank Name: PARENT BANK"
class SBI(Bank):
# pass
# @abstractmethod
def getROI(self):
return 8
# pass
# def getROI(self):
# return 8
def getName(self):
return "Bank Name: SBI BANK"
class ICICI(Bank):
# pass
# def getROI(self):
# return 9
# @abstractmethod
def getROI(self):
return 9
def getName(self):
return "Bank Name: ICICI BANK"
# pass
# b1 = Bank()
b2 = SBI()
b3 = ICICI()
# print("Base Rate of Interest:", b1.getROI())
print("SBI Rate of Interest:", b2.getROI())
print("Bank Name:", b2.getName())
print("ICICI Rate of Interest:", b3.getROI())
print("Bank Name:", b3.getName())
|
import json
from pycocotools.coco import COCO
from utils.data import save_coco_anns
from utils.data import get_category_based_anns
def filter_coco_by_cats(coco):
category_based_anns = get_category_based_anns(coco)
ann_ids_to_keep = []
img_ids_to_keep = set()
cat_ids_to_keep = set()
for sample in category_based_anns:
if len(sample['anns']) < 20:
img_ids_to_keep.add(sample['image_id'])
for ann in sample['anns']:
ann_ids_to_keep.append(ann['id'])
cat_ids_to_keep.add(ann['category_id'])
cats_to_keep = [coco.cats[cat_id] for cat_id in cat_ids_to_keep]
anns_to_keep = coco.loadAnns(ann_ids_to_keep)
imgs_to_keep = coco.loadImgs(list(img_ids_to_keep))
filtered_coco = {
'images': imgs_to_keep,
'annotations': anns_to_keep,
'categories': cats_to_keep
}
return filtered_coco
if __name__ == '__main__':
path_to_anns_train = '/data/fsod/annotations/fsod_train.json'
path_to_save_train = '/data/fsod/annotations/fsod_train_filtered.json'
path_to_anns_test = '/data/fsod/annotations/fsod_test.json'
path_to_save_test = '/data/fsod/annotations/fsod_test_filtered.json'
coco_train = COCO(path_to_anns_train)
coco_test = COCO(path_to_anns_test)
coco_train_filtered_json = filter_coco_by_cats(coco_train)
coco_test_filtered_json = filter_coco_by_cats(coco_test)
save_coco_anns(coco_train_filtered_json, path_to_save_train)
save_coco_anns(coco_test_filtered_json, path_to_save_test)
|
# Parameter estimation by optimization
# When doing statistical inference, we speak the language of probability. A probability distribution that describes your data has parameters. So, a major goal of statistical inference is to estimate the values of these parameters, which allows us to concisely and unambiguously describe our data and draw conclusions from it. In this chapter, you will learn how to find the optimal parameters, those that best describe your data.
# How often do we get no-hitters?
# The number of games played between each no-hitter in the modern era (1901-2015) of Major League Baseball is stored in the array nohitter_times.
# If you assume that no-hitters are described as a Poisson process, then the time between no-hitters is Exponentially distributed. As you have seen, the Exponential distribution has a single parameter, which we will call τ, the typical interval time. The value of the parameter τ that makes the exponential distribution best match the data is the mean interval time (where time is in units of number of games) between no-hitters.
# Compute the value of this parameter from the data. Then, use np.random.exponential() to "repeat" the history of Major League Baseball by drawing inter-no-hitter times from an exponential distribution with the τ you found and plot the histogram as an approximation to the PDF.
# NumPy, pandas, matlotlib.pyplot, and seaborn have been imported for you as np, pd, plt, and sns, respectively.
# Seed random number generator
np.random.seed(42)
# Compute mean no-hitter time: tau
tau = np.mean(nohitter_times)
# Draw out of an exponential distribution with parameter tau: inter_nohitter_time
inter_nohitter_time = np.random.exponential(tau, 100000)
# Plot the PDF and label axes
_ = plt.hist(inter_nohitter_time,
bins =50, normed = True, histtype ='step')
_ = plt.xlabel('Games between no-hitters')
_ = plt.ylabel('PDF')
# Show the plot
plt.show()
# Do the data follow our story?
# You have modeled no-hitters using an Exponential distribution. Create an ECDF of the real data. Overlay the theoretical CDF with the ECDF from the data. This helps you to verify that the Exponential distribution describes the observed data.
# It may be helpful to remind yourself of the function you created in the previous course to compute the ECDF, as well as the code you wrote to plot it
# Create an ECDF from real data: x, y
x, y = ecdf(nohitter_times)
# Create a CDF from theoretical samples: x_theor, y_theor
x_theor, y_theor = ecdf(inter_nohitter_time)
# Overlay the plots
plt.plot(x_theor, y_theor)
plt.plot(x, y, marker='.', linestyle='none')
# Margins and axis labels
plt.margins(0.02)
plt.xlabel('Games between no-hitters')
plt.ylabel('CDF')
# Show the plot
plt.show()
# How is this parameter optimal?
# Now sample out of an exponential distribution with τ being twice as large as the optimal τ. Do it again for τ half as large. Make CDFs of these samples and overlay them with your data. You can see that they do not reproduce the data as well. Thus, the τ you computed from the mean inter-no-hitter times is optimal in that it best reproduces the data.
# Note: In this and all subsequent exercises, the random number generator is pre-seeded for you to save you some typing.
# Plot the theoretical CDFs
plt.plot(x_theor, y_theor)
plt.plot(x, y, marker='.', linestyle='none')
plt.margins(0.02)
plt.xlabel('Games between no-hitters')
plt.ylabel('CDF')
# Take samples with half tau: samples_half
samples_half = np.random.exponential(tau/2, size=10000)
# Take samples with double tau: samples_double
samples_double = np.random.exponential(2*tau, size=10000)
# Generate CDFs from these samples
x_half, y_half = ecdf(samples_half)
x_double, y_double = ecdf(samples_double)
# Plot these CDFs as lines
_ = plt.plot(x_half, y_half)
_ = plt.plot(x_double, y_double)
# Show the plot
plt.show()
# EDA of literacy/fertility data
# In the next few exercises, we will look at the correlation between female literacy and fertility (defined as the average number of children born per woman) throughout the world. For ease of analysis and interpretation, we will work with the illiteracy rate.
# It is always a good idea to do some EDA ahead of our analysis. To this end, plot the fertility versus illiteracy and compute the Pearson correlation coefficient. The Numpy array illiteracy has the illiteracy rate among females for most of the world's nations. The array fertility has the corresponding fertility data.
# Here, it may be useful to refer back to the function you wrote in the previous course to compute the Pearson correlation coefficient.
# Plot the illiteracy rate versus fertility
_ = plt.plot(illiteracy, fertility, marker='.', linestyle='none')
# Set the margins and label axes
plt.margins(0.02)
_ = plt.xlabel('percent illiterate')
_ = plt.ylabel('fertility')
# Show the plot
plt.show()
# Show the Pearson correlation coefficient
print(pearson_r(illiteracy, fertility))
# Linear regression
# We will assume that fertility is a linear function of the female illiteracy rate. That is, f=ai+b, where a is the slope and b is the intercept. We can think of the intercept as the minimal fertility rate, probably somewhere between one and two. The slope tells us how the fertility rate varies with illiteracy. We can find the best fit line using np.polyfit().
# Plot the data and the best fit line. Print out the slope and intercept. (Think: what are their units?)
# Plot the illiteracy rate versus fertility
_ = plt.plot(illiteracy, fertility, marker='.', linestyle='none')
plt.margins(0.02)
_ = plt.xlabel('percent illiterate')
_ = plt.ylabel('fertility')
# Perform a linear regression using np.polyfit(): a, b
a, b = np.polyfit(illiteracy,fertility,1)
# Print the results to the screen
print('slope =', a, 'children per woman / percent illiterate')
print('intercept =', b, 'children per woman')
# Make theoretical line to plot
x = np.array([0,100])
y = a * x + b
# Add regression line to your plot
_ = plt.plot(x, y)
_ = plt.xlabel("Slope")
_ = plt.ylabel('Outcome')
# Draw the plot
plt.show()
# How is it optimal?
# The function np.polyfit() that you used to get your regression parameters finds the optimal slope and intercept. It is optimizing the sum of the squares of the residuals, also known as RSS (for residual sum of squares). In this exercise, you will plot the function that is being optimized, the RSS, versus the slope parameter a. To do this, fix the intercept to be what you found in the optimization. Then, plot the RSS vs. the slope. Where is it minimal?
# Specify slopes to consider: a_vals
a_vals = np.linspace(0,0.1,200)
# Initialize sum of square of residuals: rss
rss = np.empty_like(a_vals)
# Compute sum of square of residuals for each value of a_vals
for i, a in enumerate(a_vals):
rss[i] = np.sum((fertility - a*illiteracy - b)**2)
# Plot the RSS
plt.plot(a_vals,rss, '-')
plt.xlabel('slope (children per woman / percent illiterate)')
plt.ylabel('sum of square of residuals')
plt.show()
# Linear regression on appropriate Anscombe data
# For practice, perform a linear regression on the data set from Anscombe's quartet that is most reasonably interpreted with linear regression.
# Perform linear regression: a, b
a, b = np.polyfit(x,y,1)
# Print the slope and intercept
print(a, b)
# Generate theoretical x and y data: x_theor, y_theor
x_theor = np.array([3, 15])
y_theor = x_theor * a + b
# Plot the Anscombe data and theoretical line
_ = plt.plot(x,y,marker ='.',linestyle ='none')
_ = plt.plot(x_theor,y_theor,marker ='.',linestyle ='none')
# Label the axes
plt.xlabel('x')
plt.ylabel('y')
# Show the plot
plt.show()
# Linear regression on all Anscombe data
# Now, to verify that all four of the Anscombe data sets have the same slope and intercept from a linear regression, you will compute the slope and intercept for each set. The data are stored in lists; anscombe_x = [x1, x2, x3, x4] and anscombe_y = [y1, y2, y3, y4], where, for example, x2 and y2 are the x and y values for the second Anscombe data set.
# Iterate through x,y pairs
for x, y in zip(anscombe_x, anscombe_y):
# Compute the slope and intercept: a, b
a, b = np.polyfit(x,y,1)
# Print the result
print('slope:', a, 'intercept:', b)
|
from adapters.adapter_with_battery import AdapterWithBattery
from devices.sensor.door_contact import DoorContactSensor
class SensorMagnet(AdapterWithBattery):
def __init__(self, devices):
super().__init__(devices)
self.devices.append(DoorContactSensor(devices, 'sensor', 'contact'))
|
#!/usr/bin/python
from __future__ import print_function
import logging
from fabric.api import task,run,local,put,get,execute,settings
from fabric.decorators import *
from fabric.context_managers import shell_env,quiet
from fabric.exceptions import *
from fabric.utils import puts,fastprint
from time import sleep
from contextlib import contextmanager
import traceback
import os,sys,datetime,re,ast
import itertools
import glob,shlex,subprocess
import pprint
sys.path.append('..')
from environment import *
from experiments import *
from experiments import configs
from helper import get_cfgs,get_outfile_name,get_execfile_name,get_args,CONFIG_PARAMS,FLAG
# (see https://github.com/fabric/fabric/issues/51#issuecomment-96341022)
logging.basicConfig()
paramiko_logger = logging.getLogger("paramiko.transport")
paramiko_logger.disabled = True
COLORS = {
"info" : 32, #green
"warn" : 33, #yellow
"error" : 31, #red
"debug" : 36, #cyan
}
#OUT_FMT = "[{h}] {p}: {fn}:".format
PP = pprint.PrettyPrinter(indent=4)
NOW=datetime.datetime.now()
STRNOW=NOW.strftime("%Y%m%d-%H%M%S")
os.chdir('../..')
#MAX_TIME_PER_EXP = 60 * 2 # in seconds
MAX_TIME_PER_EXP = 60 * 10 # in seconds
EXECUTE_EXPS = True
SKIP = False
CC_ALG = ""
set_env()
@task
@hosts('localhost')
def using_vcloud():
set_env_vcloud()
@task
@hosts('localhost')
def using_istc():
set_env_istc()
@task
@hosts('localhost')
def using_ec2():
set_env_ec2()
@task
@hosts('localhost')
def using_local():
set_env_local()
## Basic usage:
## fab using_vcloud run_exps:experiment_1
## fab using_local run_exps:experiment_1
## fab using_istc run_exps:experiment_1
@task
@hosts('localhost')
def run_exps(exps,skip_completed='False',exec_exps='True',dry_run='False',iterations='1',check='True',delay='',same_node='False',overlap='False',shmem='True',cram='False'):
global SKIP, EXECUTE_EXPS,NOW,STRNOW
ITERS = int(iterations)
SKIP = skip_completed == 'True'
EXECUTE_EXPS = exec_exps == 'True'
CHECK = check == 'True'
env.dry_run = dry_run == 'True'
env.same_node = same_node == 'True'
env.overlap = overlap == 'True'
env.cram = cram == 'True'
if env.cluster != "ec2":
env.shmem = shmem == 'True'
if env.dry_run:
with color(level="warn"):
puts("this will be a dry run!",show_prefix=True)
with color():
puts("running experiment set:{}".format(exps),show_prefix=True)
# Make sure all experiment binaries exist
if CHECK:
execute(check_binaries,exps)
# Run experiments
for i in range(ITERS):
NOW=datetime.datetime.now()
STRNOW=NOW.strftime("%Y%m%d-%H%M%S")
execute(run_exp_old,exps,delay=delay)
# execute(run_exp,exps,delay=delay)
## Basic usage:
## fab using_vcloud network_test
## fab using_istc network_test:4
@task
@hosts(['localhost'])
def network_test(num_nodes=16,exps="network_experiment",skip_completed='False',exec_exps='True'):
env.batch_mode = False
global SKIP, EXECUTE_EXPS, MAX_TIME_PER_EXP
SKIP = skip_completed == 'True'
EXECUTE_EXPS = exec_exps == 'True'
MAX_TIME_PER_EXP = 60
num_nodes = int(num_nodes)
execute(check_binaries,exps)
if num_nodes < 2 or len(env.hosts) < num_nodes:
with color(level="error"):
puts("not enough hosts in ifconfig!",show_prefix=True)
abort()
exp_hosts=env.hosts[0:num_nodes]
pairs = list(itertools.combinations(exp_hosts,2))
for pair in pairs:
set_hosts(list(pair))
execute(run_exp,exps,network_test=True)
@task
@parallel
def check_cpu():
put("test_cpu.out",env.rem_homedir)
run("chmod a+x test_cpu.out; time ./test_cpu.out")
@task
@hosts('localhost')
def delete_local_results():
local("rm -f results/*");
@task
#@hosts('localhost')
@parallel
def delete_remote_results():
if env.cluster == "istc":
if env.shmem:
run("rm -f /dev/shm/results*.out")
else:
run("rm -f /home/%s/results*.out" % env.user)
else:
run("rm -f /home/ubuntu/results*.out")
@task
@parallel
def copy_schema():
if env.dry_run:
return
schemas = ["benchmarks/TPCC_full_schema.txt","benchmarks/YCSB_schema.txt","benchmarks/PPS_schema.txt"]
# Copying regular files should always succeed unless node is down
for schema in schemas:
if env.shmem:
put(schema,"/dev/shm/")
else:
put(schema,env.rem_homedir)
@task
@parallel
def copy_binaries(exp_fname):
if env.dry_run:
return
executable_files = ["rundb","runcl"]
succeeded = True
# Copying executable files may fail if a process is running the executable
with settings(warn_only=True):
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm/","{}{}".format(exp_fname,f))
else:
remote_fpath = os.path.join(env.rem_homedir,"{}{}".format(exp_fname,f))
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("warn"):
puts("WARN: put: {} -> {} failed!".format(f,env.rem_homedir),show_prefix=True)
succeeded = False
break
if not succeeded:
with color("warn"):
puts("WARN: killing all executables and retrying...",show_prefix=True)
killall()
# If this fails again then we abort
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm",f)
else:
remote_fpath = os.path.join(env.rem_homedir,f)
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("error"):
puts("ERROR: put: {} -> {} failed! (2nd attempt)... Aborting".format(f,env.rem_homedir),show_prefix=True)
abort()
@task
@parallel
def copy_ifconfig():
files = ["ifconfig.txt"]
# Copying regular files should always succeed unless node is down
for f in files:
if env.shmem:
put(f,"/dev/shm/")
else:
put(f,env.rem_homedir)
@task
@parallel
def copy_files(schema,exp_fname):
if env.dry_run:
return
executable_files = ["rundb","runcl"]
# if CC_ALG == "CALVIN":
# executable_files.append("runsq")
files = ["ifconfig.txt"]
files.append(schema)
succeeded = True
# Copying regular files should always succeed unless node is down
for f in files:
if env.shmem:
put(f,"/dev/shm/")
else:
put(f,env.rem_homedir)
# Copying executable files may fail if a process is running the executable
with settings(warn_only=True):
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm/",f)
else:
remote_fpath = os.path.join(env.rem_homedir,f)
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("warn"):
puts("WARN: put: {} -> {} failed!".format(f,env.rem_homedir),show_prefix=True)
succeeded = False
break
if not succeeded:
with color("warn"):
puts("WARN: killing all executables and retrying...",show_prefix=True)
killall()
# If this fails again then we abort
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm",f)
else:
remote_fpath = os.path.join(env.rem_homedir,f)
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("error"):
puts("ERROR: put: {} -> {} failed! (2nd attempt)... Aborting".format(f,env.rem_homedir),show_prefix=True)
abort()
#delay is in ms
@task
@parallel
def set_delay(delay='10'):
run("sudo tc qdisc add dev eth0 root netem delay {}ms".format(delay))
#delay is in ms
@task
@parallel
def reset_delay():
run("sudo tc qdisc del dev eth0 root")
@task
@parallel
def sync_clocks(max_offset=0.01,max_attempts=1,delay=15):
if env.dry_run:
return True
offset = sys.float_info.max
attempts = 0
while attempts < max_attempts:
if env.cluster == "ec2":
res = run("ntpdate -q 0.amazon.pool.ntp.org")
else:
res = run("ntpdate -q clock-2.cs.cmu.edu")
offset = float(res.stdout.split(",")[-2].split()[-1])
#print "Host ",env.host,": offset = ",offset
if abs(offset) < max_offset:
break
sleep(delay)
if env.cluster == "ec2":
res = run("sudo ntpdate -b 0.amazon.pool.ntp.org")
else:
res = run("sudo ntpdate -b clock-2.cs.cmu.edu")
sleep(delay)
attempts += 1
return attempts < max_attempts
@task
@hosts('localhost')
def compile():
compiled = False
with quiet():
compiled = local("make clean; make -j8",capture=True).succeeded
if not compiled:
with settings(warn_only=True):
compiled = local("make -j8") # Print compilation errors
if not compiled:
with color("error"):
puts("ERROR: cannot compile code!",show_prefix=True)
@task
@parallel
def killall():
with settings(warn_only=True):
if not env.dry_run:
run("pkill -f rundb")
run("pkill -f runcl")
# run("pkill -f runsq")
@task
@parallel
def run_cmd(cmd):
run(cmd)
@task
@parallel
def put_cmd(cmd):
put(cmd,env.rem_homedir,mirror_local_mode=True)
@task
@parallel
def deploy(schema_path,nids,exps,runfiles,fmt):
nid = iter(nids[env.host])
exp = iter(exps[env.host])
runfile = iter(runfiles[env.host])
succeeded = True
with shell_env(SCHEMA_PATH=schema_path):
with settings(warn_only=True,command_timeout=MAX_TIME_PER_EXP):
# if env.same_node:
cmd = ''
for r in env.roledefs["servers"]:
if r == env.host:
nn = nid.next()
rfile = runfile.next()
args = get_args(fmt,exp.next())
if env.shmem:
cmd += "(/dev/shm/{}rundb -nid{} {}>> /dev/shm/results{}.out 2>&1 &);".format(rfile,nn,args,nn)
# cmd += "(/dev/shm/rundb -nid{} >> /dev/shm/results{}.out 2>&1 &);".format(nn,nn)
else:
cmd += "(./{}rundb -nid{} {}>> results{}.out 2>&1 &);".format(rfile,nn,args,nn)
for r in env.roledefs["clients"]:
if r == env.host:
nn = nid.next()
rfile = runfile.next()
args = get_args(fmt,exp.next())
if env.shmem:
cmd += "(/dev/shm/{}runcl -nid{} {}>> /dev/shm/results{}.out 2>&1 &);".format(rfile,nn,args,nn)
else:
cmd += "(./{}runcl -nid{} {}>> results{}.out 2>&1 &);".format(rfile,nn,args,nn)
# for r in env.roledefs["sequencer"]:
# if r == env.host:
# nn = nid.next()
# args = get_args(fmt,exp.next())
# if env.shmem:
# cmd += "(/dev/shm/runsq -nid{} {}>> /dev/shm/results{}.out 2>&1 &);".format(nn,args,nn)
# else:
# cmd += "(./runsq -nid{} {}>> results{}.out 2>&1 &);".format(nn,args,nn)
cmd = cmd[:-3]
cmd += ")"
try:
res = run("echo $SCHEMA_PATH")
if not env.dry_run:
run(cmd)
else:
print(cmd)
except CommandTimeout:
pass
except NetworkError:
pass
# else:
# if env.host in env.roledefs["servers"]:
# nn = nid.next();
# cmd = "./rundb -nid{} >> results{}.out 2>&1".format(nn,nn)
# elif env.host in env.roledefs["clients"]:
# nn = nid.next();
# cmd = "./runcl -nid{} >> results{}.out 2>&1".format(nn,nn)
# elif "sequencer" in env.roledefs and env.host in env.roledefs["sequencer"]:
# nn = nid.next();
# cmd = "./runsq -nid{} >> results{}.out 2>&1".format(nn,nn)
# else:
# with color('error'):
# puts("host does not belong to any roles",show_prefix=True)
# puts("current roles:",show_prefix=True)
# puts(pprint.pformat(env.roledefs,depth=3),show_prefix=False)
#
# try:
# res = run("echo $SCHEMA_PATH")
# if not env.dry_run:
# run(cmd)
# except CommandTimeout:
# pass
# except NetworkError:
# pass
return True
@task
@parallel
def get_results(outfiles,nids):
succeeded = True
# if env.same_node:
for n in nids[env.host]:
if env.shmem:
rem_path=os.path.join(env.rem_homedir,"/dev/shm/results{}.out".format(n))
else:
rem_path=os.path.join(env.rem_homedir,"results{}.out".format(n))
loc_path=os.path.join(env.result_dir, "{}_{}".format(n,outfiles[env.host]))
with settings(warn_only=True):
if not env.dry_run:
res1 = get(remote_path=rem_path, local_path=loc_path)
succeeded = succeeded and res1.succeeded
with settings(warn_only=True):
if not env.dry_run:
if env.shmem:
res2 = run("rm -f /dev/shm/results*.out")
else:
res2 = run("rm -f results*.out")
succeeded = succeeded and res2.succeeded
# else:
# nid = env.hosts.index(env.host)
# rem_path=os.path.join(env.rem_homedir,"results.out")
# loc_path=os.path.join(env.result_dir, outfiles[env.host])
# with settings(warn_only=True):
# if not env.dry_run:
# res1 = get(remote_path=rem_path, local_path=loc_path)
# res2 = run("rm -f results.out")
# succeeded = res1.succeeded and res2.succeeded
return succeeded
@task
@hosts('localhost')
def write_config(cfgs):
dbx_cfg = os.path.join(env.local_path,"config.h")
f = open(dbx_cfg,'r');
lines = f.readlines()
f.close()
with open(dbx_cfg,'w') as f_cfg:
for line in lines:
found_cfg = False
for c in cfgs:
found_cfg = re.search("#define "+c + "\t",line) or re.search("#define "+c + " ",line);
if found_cfg:
f_cfg.write("#define " + c + " " + str(cfgs[c]) + "\n")
break
if not found_cfg: f_cfg.write(line)
@task
@hosts('localhost')
def write_ifconfig(roles,exp,rfile):
with color():
puts("writing roles to the ifconfig file:",show_prefix=True)
puts(pprint.pformat(roles,depth=3),show_prefix=False)
nids = {}
exps = {}
rfiles = {}
nid = 0
print(roles)
with open("ifconfig.txt",'w') as f:
for server in roles['servers']:
f.write(server + "\n")
if server not in nids:
nids[server] = [nid]
exps[server] = [exp]
rfiles[server] = [rfile]
else:
nids[server].append(nid)
exps[server].append(exp)
rfiles[server].append(rfile)
nid += 1
for client in roles['clients']:
f.write(client + "\n")
if client not in nids:
nids[client] = [nid]
exps[client] = [exp]
rfiles[client] = [rfile]
else:
nids[client].append(nid)
exps[client].append(exp)
rfiles[client].append(rfile)
nid += 1
# if "sequencer" in roles:
# assert CC_ALG == "CALVIN"
# sequencer = roles['sequencer'][0]
# f.write(sequencer + "\n")
# nids[sequencer] = [nid]
# exps[sequencer] = [exp]
# nid += 1
return nids,exps,rfiles
@task
@hosts('localhost')
def assign_roles(server_cnt,client_cnt,append=False):
if env.same_node:
servers=[env.hosts[0]] * server_cnt
clients=[env.hosts[0]] * client_cnt
elif env.cram:
ncnt = max(max(server_cnt,client_cnt) / 8,1)
servers = []
clients = []
for r in range(server_cnt):
servers.append(env.hosts[r%ncnt])
for r in range(client_cnt):
clients.append(env.hosts[r%ncnt])
else:
# if len(env.hosts) < server_cnt+client_cnt:
# with color("error"):
# puts("ERROR: not enough hosts to run experiment",show_prefix=True)
# puts("\tHosts required: {}".format(server_cnt+client_cnt))
# puts("\tHosts available: {} ({})".format(len(env.hosts),pprint.pformat(env.hosts,depth=3)))
# assert len(env.hosts) >= server_cnt+client_cnt
servers=env.hosts[0:server_cnt]
if env.overlap:
clients=env.hosts[0:client_cnt]
else:
clients=env.hosts[server_cnt:server_cnt+client_cnt]
new_roles = {}
# if CC_ALG == 'CALVIN':
# sequencer = env.hosts[server_cnt+client_cnt:server_cnt+client_cnt+1]
if env.roledefs is None or len(env.roledefs) == 0:
env.roledefs={}
env.roledefs['clients']=[]
env.roledefs['servers']=[]
env.roledefs['sequencer']=[]
if append:
env.roledefs['clients'].extend(clients)
env.roledefs['servers'].extend(servers)
# if CC_ALG == 'CALVIN':
# env.roledefs['sequencer'].extend(sequencer)
else:
env.roledefs['clients']=clients
env.roledefs['servers']=servers
# if CC_ALG == 'CALVIN':
# env.roledefs['sequencer']=sequencer
new_roles['clients']=clients
new_roles['servers']=servers
# if CC_ALG == 'CALVIN':
# new_roles['sequencer']=sequencer
with color():
puts("Assigned the following roles:",show_prefix=True)
puts(pprint.pformat(new_roles,depth=3) + "\n",show_prefix=False)
puts("Updated env roles:",show_prefix=True)
puts(pprint.pformat(env.roledefs,depth=3) + "\n",show_prefix=False)
return new_roles
def get_good_hosts():
# good_hosts = []
set_hosts()
good_hosts = env.hosts
# Find and skip bad hosts
ping_results = execute(ping)
for host in ping_results:
if ping_results[host] == 0:
# good_hosts.append(host)
continue
else:
with color("warn"):
puts("Skipping non-responsive host {}".format(host),show_prefix=True)
good_hosts.remove(host)
return good_hosts
@task
@hosts('localhost')
def compile_binary(fmt,e):
ecfgs = get_cfgs(fmt,e)
cfgs = dict(configs)
for c in dict(ecfgs):
if c not in CONFIG_PARAMS and c in FLAG:
del ecfgs[c]
cfgs.update(ecfgs)
# if env.remote and not env.same_node:
if env.cluster == "ec2":
cfgs["ENVIRONMENT_EC2"]="true"
else:
cfgs["ENVIRONMENT_EC2"]="false"
if env.cluster == "istc":
cfgs["CORE_CNT"]=64
else:
cfgs["CORE_CNT"]=8
if env.remote:
cfgs["TPORT_TYPE"]="TCP"
if env.shmem:
cfgs["SHMEM_ENV"]="true"
else:
cfgs["SHMEM_ENV"]="false"
execute(write_config,cfgs)
execute(compile)
# output_f = get_outfile_name(cfgs,fmt,env.hosts)
output_f = get_execfile_name(cfgs,fmt,env.hosts)
local("cp rundb binaries/{}rundb".format(output_f))
local("cp runcl binaries/{}runcl".format(output_f))
# local("cp runsq binaries/{}runsq".format(output_f))
local("cp config.h binaries/{}cfg".format(output_f))
if EXECUTE_EXPS:
cmd = "mkdir -p {}".format(env.result_dir)
local(cmd)
set_hosts() #????
execute(copy_binaries,output_f)
#cmd = "cp config.h {}.cfg".format(os.path.join(env.result_dir,output_f))
#local(cmd)
@task
@hosts('localhost')
def compile_binaries(exps):
local("mkdir -p binaries")
local("rm -rf binaries/*")
fmt,experiments = experiment_map[exps]()
# for e in experiments:
# execute(compile_binary,fmt,e)
@task
@hosts('localhost')
def check_binaries(exps):
# if not os.path.isdir("binaries"):
# execute(compile_binaries,exps)
# return
# if len(glob.glob("binaries/*")) == 0:
# execute(compile_binaries,exps)
# return
if not os.path.isdir("binaries") or len(glob.glob("binaries/*")) == 0:
local("mkdir -p binaries")
local("rm -rf binaries/*")
fmt,experiments = experiment_map[exps]()
for e in experiments:
cfgs = get_cfgs(fmt,e)
# if env.remote and not env.same_node:
if env.cluster == "ec2":
cfgs["ENVIRONMENT_EC2"]="true"
else:
cfgs["ENVIRONMENT_EC2"]="false"
if env.cluster == "istc":
cfgs["CORE_CNT"]=64
else:
cfgs["CORE_CNT"]=8
if env.remote:
cfgs["TPORT_TYPE"]="TCP"
if env.shmem:
cfgs["SHMEM_ENV"]="true"
else:
cfgs["SHMEM_ENV"]="false"
# output_f = get_outfile_name(cfgs,fmt,env.hosts)
output_f = get_execfile_name(cfgs,fmt,env.hosts)
executables = glob.glob("{}*".format(os.path.join("binaries",output_f)))
has_rundb,has_runcl,has_config=False,False,False
# has_rundb,has_runcl,has_runsq,has_config=False,False,False,False
for executable in executables:
if executable.endswith("rundb"):
has_rundb = True
elif executable.endswith("runcl"):
has_runcl = True
# elif executable.endswith("runsq"):
# has_runsq = True
elif executable.endswith("cfg"):
has_config = True
# if not has_rundb or not has_runcl or not has_runsq or not has_config:
if not has_rundb or not has_runcl or not has_config:
execute(compile_binary,fmt,e)
@task
@hosts(['localhost'])
def run_exp_old(exps,network_test=False,delay=''):
if env.shmem:
schema_path = "/dev/shm/"
else:
schema_path = "{}/".format(env.rem_homedir)
good_hosts = []
if not network_test and EXECUTE_EXPS:
good_hosts = get_good_hosts()
with color():
puts("good host list =\n{}".format(pprint.pformat(good_hosts,depth=3)),show_prefix=True)
execute(copy_schema)
fmt,experiments = experiment_map[exps]()
batch_size = 0
nids = {}
outfiles = {}
exps = {}
runfiles = {}
for e in experiments:
print(e)
cfgs = get_cfgs(fmt,e)
output_fbase = get_outfile_name(cfgs,fmt,env.hosts)
output_exec_fname = get_execfile_name(cfgs,fmt,env.hosts)
output_f = output_fbase + STRNOW
last_exp = experiments.index(e) == len(experiments) - 1
skip_exp = False
# Check whether experiment has been already been run in this batch
if SKIP:
if len(glob.glob('{}*{}*.out'.format(env.result_dir,output_fbase))) > 0:
with color("warn"):
puts("experiment exists in results folder... skipping",show_prefix=True)
if last_exp:
skip_exp = True
else:
continue
global CC_ALG
CC_ALG = cfgs["CC_ALG"]
if EXECUTE_EXPS:
cfg_srcpath = "{}cfg".format(os.path.join("binaries",output_exec_fname))
cfg_destpath = "{}.cfg".format(os.path.join(env.result_dir,output_exec_fname+STRNOW))
local("cp {} {}".format(cfg_srcpath,cfg_destpath))
nnodes = cfgs["NODE_CNT"]
nclnodes = cfgs["CLIENT_NODE_CNT"]
try:
ntotal = nnodes + nclnodes
except TypeError:
nclnodes = cfgs[cfgs["CLIENT_NODE_CNT"]]
ntotal = nnodes + nclnodes
# if CC_ALG == 'CALVIN':
# ntotal += 1
if env.same_node:
ntotal = 1
if env.overlap:
ntotal = max(nnodes,nclnodes)
if env.cram:
ntotal = max(max(nnodes,nclnodes)/8,1)
if env.remote:
if not network_test:
set_hosts(good_hosts)
# if ntotal > len(env.hosts):
# msg = "Not enough nodes to run experiment!\n"
# msg += "\tRequired nodes: {}, ".format(ntotal)
# msg += "Actual nodes: {}".format(len(env.hosts))
# with color():
# puts(msg,show_prefix=True)
# cmd = "rm -f config.h {}".format(cfg_destpath)
# local(cmd)
# continue
if not skip_exp:
if env.batch_mode:
# If full, execute all exps in batch and reset everything
full = (batch_size + ntotal) > len(env.hosts)
if full:
if env.cluster != 'istc' and not env.dry_run:
# Sync clocks before each experiment
execute(sync_clocks)
with color():
puts("Batch is full, deploying batch...{}/{}".format(batch_size,len(good_hosts)),show_prefix=True)
with color("debug"):
puts(pprint.pformat(outfiles,depth=3),show_prefix=False)
set_hosts(env.hosts[:batch_size])
with color():
puts("Starttime: {}".format(datetime.datetime.now().strftime("%H:%M:%S")),show_prefix=True)
execute(deploy,schema_path,nids,exps,runfiles,fmt)
with color():
puts("Endtime: {}".format(datetime.datetime.now().strftime("%H:%M:%S")),show_prefix=True)
execute(get_results,outfiles,nids)
if not env.dry_run:
good_hosts = get_good_hosts()
env.roledefs = None
batch_size = 0
nids = {}
exps = {}
runfiles = {}
outfiles = {}
set_hosts(good_hosts)
else:
with color():
puts("Adding experiment to current batch: {}".format(output_f), show_prefix=True)
machines = env.hosts[batch_size : batch_size + ntotal]
batch_size += ntotal
else:
machines = env.hosts[:ntotal]
set_hosts(machines)
new_roles=execute(assign_roles,nnodes,nclnodes,append=env.batch_mode)[env.host]
new_nids,new_exps,new_runfiles = execute(write_ifconfig,new_roles,e,output_exec_fname)[env.host]
nids.update(new_nids)
exps.update(new_exps)
runfiles.update(new_runfiles)
for host,nid in new_nids.iteritems():
outfiles[host] = "{}.out".format(output_f)
# if env.same_node:
# outfiles[host] = "{}.out".format(output_f)
# else:
# outfiles[host] = "{}_{}.out".format(nid[0],output_f)
print(nids)
if cfgs["WORKLOAD"] == "TPCC":
schema = "benchmarks/TPCC_full_schema.txt"
# schema = "benchmarks/TPCC_short_schema.txt"
elif cfgs["WORKLOAD"] == "YCSB":
schema = "benchmarks/YCSB_schema.txt"
elif cfgs["WORKLOAD"] == "PPS":
schema = "benchmarks/PPS_schema.txt"
# NOTE: copy_files will fail if any (possibly) stray processes
# are still running one of the executables. Setting the 'kill'
# flag in environment.py to true to kill these processes. This
# is useful for running real experiments but dangerous when both
# of us are debugging...
# execute(copy_files,schema,output_exec_fname)
execute(copy_ifconfig)
if not env.batch_mode or last_exp and len(exps) > 0:
if env.batch_mode:
set_hosts(good_hosts[:batch_size])
puts("Deploying last batch...{}/{}".format(batch_size,len(good_hosts)),show_prefix=True)
else:
print("Deploying: {}".format(output_f))
if env.cluster != 'istc':
# Sync clocks before each experiment
print("Syncing Clocks...")
execute(sync_clocks)
if delay != '':
execute(set_delay,delay=delay)
with color():
puts("Starttime: {}".format(datetime.datetime.now().strftime("%H:%M:%S")),show_prefix=True)
execute(deploy,schema_path,nids,exps,runfiles,fmt)
with color():
puts("Endtime: {}".format(datetime.datetime.now().strftime("%H:%M:%S")),show_prefix=True)
if delay != '':
execute(reset_delay)
execute(get_results,outfiles,nids)
if not env.dry_run:
good_hosts = get_good_hosts()
set_hosts(good_hosts)
batch_size = 0
nids = {}
exps = {}
outfiles = {}
env.roledefs = None
else:
pids = []
print("Deploying: {}".format(output_f))
for n in range(ntotal):
if n < nnodes:
cmd = "./rundb -nid{}".format(n)
elif n < nnodes+nclnodes:
cmd = "./runcl -nid{}".format(n)
# elif n == nnodes+nclnodes:
# assert(CC_ALG == 'CALVIN')
# cmd = "./runsq -nid{}".format(n)
else:
assert(false)
print(cmd)
cmd = shlex.split(cmd)
ofile_n = "{}{}_{}.out".format(env.result_dir,n,output_f)
ofile = open(ofile_n,'w')
p = subprocess.Popen(cmd,stdout=ofile,stderr=ofile)
pids.insert(0,p)
for n in range(ntotal):
pids[n].wait()
def succeeded(outcomes):
for host,outcome in outcomes.iteritems():
if not outcome:
return False
return True
@task
@parallel
def ping():
with settings(warn_only=True):
res=local("ping -w8 -c1 {}".format(env.host),capture=True)
assert res != None
return res.return_code
@task
@hosts('localhost')
def ec2_run_instances(
dry_run="False",
image_id="ami-d05e75b8",
count="12",
security_group="dist-sg",
instance_type="m4.2xlarge",
# instance_type="m4.xlarge",
key_name="devenv-key",
):
opt = "--{k} {v} ".format
cmd = "aws ec2 run-instances "
if dry_run == "True":
cmd += "--dry-run "
cmd += opt(k="image-id",v=image_id)
cmd += opt(k="count",v=count)
cmd += opt(k="security-groups",v=security_group)
cmd += opt(k="instance-type",v=instance_type)
cmd += opt(k="key-name",v=key_name)
local(cmd)
@task
@hosts('localhost')
def ec2_run_spot_instances(
dry_run="False",
image_id="ami-d05e75b8",
price="0.10",
count="12",
security_group="dist-sg",
instance_type="m4.2xlarge",
# instance_type="m4.xlarge",
key_name="devenv-key",
):
opt = "--{k} {v} ".format
cmd = "aws ec2 request-spot-instances "
if dry_run == "True":
cmd += "--dry-run "
# cmd += opt(k="ami-id",v=image_id)
cmd += opt(k="spot-price",v=price)
cmd += opt(k="instance-count",v=count)
# cmd += opt(k="instance-type",v=instance_type)
# cmd += opt(k="group",v=security_group)
# cmd += opt(k="key",v=key_name)
cmd += opt(k="launch-specification",v="file://ec2_specification.json")
local(cmd)
@task
@hosts('localhost')
def ec2_get_status():
cmd = "aws ec2 describe-instance-status --query 'InstanceStatuses[*].{InstanceId:InstanceId,SystemStatus:SystemStatus.Status,InstanceStatus:InstanceStatus.Status}'"
res = local(cmd,capture=True)
statuses = ast.literal_eval(res)
for status in statuses:
if status['SystemStatus'] != "ok":
print("{}: ERROR: bad system status {}".format(status['InstanceId'],status['SystemStatus']))
sys.exit(1)
elif status['InstanceStatus'] == "initializing":
print("{}: ERROR: still initializing...".format(status['InstanceId']))
sys.exit(1)
elif status['InstanceStatus'] != "ok":
print("{}: ERROR: bad instance status {}".format(status['InstanceId'],status['InstanceStatus']))
sys.exit(1)
print("READY!")
return 0
@task
@hosts('localhost')
def ec2_write_ifconfig():
cmd = "aws ec2 describe-instances --query 'Reservations[*].Instances[*].{ID:InstanceId,IP:PublicIpAddress,TYPE:InstanceType}'"
res = local(cmd,capture=True)
# Skip any previously terminated VMs (terminate VM state remains for 1 hour)
res = res.replace("null","\"\"")
ip_info = ast.literal_eval(res)
with open("ec2_ifconfig.txt","w") as f:
for entry in ip_info:
for ip in entry:
if ip["IP"] != "":
f.write(ip["IP"] + "\n")
@task
@hosts('localhost')
def ec2_terminate_instances():
cmd = "aws ec2 describe-instances --query 'Reservations[*].Instances[*].InstanceId'"
res = local(cmd,capture=True)
ids = ast.literal_eval(res)
id_list = []
for id_entry in ids:
for id in id_entry:
id_list.append(id)
cmd = "aws ec2 terminate-instances --instance-ids {}".format(" ".join(id_list))
res = local(cmd,capture=True)
print(res)
@contextmanager
def color(level="info"):
if not level in COLORS:
level = "info"
print("\033[%sm" % COLORS[level],end="")
yield
print("\033[0m",end="")
@task
@hosts(['localhost'])
def run_exp(exps,network_test=False,delay=''):
if env.shmem:
schema_path = "/dev/shm/"
else:
schema_path = "{}/".format(env.rem_homedir)
good_hosts = []
if not network_test and EXECUTE_EXPS:
good_hosts = get_good_hosts()
with color():
puts("good host list =\n{}".format(pprint.pformat(good_hosts,depth=3)),show_prefix=True)
fmt,experiments = experiment_map[exps]()
batch_size = 0
nids = {}
outfiles = {}
exps = {}
if SKIP:
for e in experiments[:]:
cfgs = get_cfgs(fmt,e)
output_fbase = get_outfile_name(cfgs,fmt,env.hosts)
if len(glob.glob('{}*{}*.out'.format(env.result_dir,output_fbase))) > 0:
with color("warn"):
puts("experiment exists in results folder... skipping",show_prefix=True)
experiments.remove(e)
experiments.sort(key=lambda x: x[fmt.index("NODE_CNT")] + x[fmt.index("CLIENT_NODE_CNT")],reverse=True)
# Fill experiment pool
while len(experiments) > 0 :
round_exps = []
batch_total = 0
for e in experiments[:]:
cfgs = get_cfgs(fmt,e)
nnodes = cfgs["NODE_CNT"]
nclnodes = cfgs["CLIENT_NODE_CNT"]
ccalg = cfgs["CC_ALG"]
ntotal = cfgs["NODE_CNT"] + cfgs["CLIENT_NODE_CNT"]
# if ccalg == 'CALVIN':
# ntotal += 1
if env.same_node:
ntotal = 1
if env.overlap:
ntotal = max(nnodes,nclnodes)
if env.cram:
ntotal = max(max(nnodes,nclnodes)/8,1)
if ntotal > len(env.hosts):
msg = "Not enough nodes to run experiment!\n"
msg += "\tRequired nodes: {}, ".format(ntotal)
msg += "Actual nodes: {}".format(len(env.hosts))
with color():
puts(msg,show_prefix=True)
experiments.remove(e)
continue
if (batch_total + ntotal) > len(env.hosts):
continue
batch_total += ntotal
round_exps.append(e)
experiments.remove(e)
if not EXECUTE_EXPS: continue
batch_size = 0
for e in round_exps:
set_hosts(good_hosts)
cfgs = get_cfgs(fmt,e)
global CC_ALG
nnodes = cfgs["NODE_CNT"]
nclnodes = cfgs["CLIENT_NODE_CNT"]
CC_ALG = cfgs["CC_ALG"]
ntotal = cfgs["NODE_CNT"] + cfgs["CLIENT_NODE_CNT"]
# if ccalg == 'CALVIN':
# ntotal += 1
if env.same_node:
ntotal = 1
if env.overlap:
ntotal = max(nnodes,nclnodes)
if env.cram:
ntotal = max(max(nnodes,nclnodes)/8,1)
output_fbase = get_outfile_name(cfgs,fmt,env.hosts)
output_exec_fname = get_execfile_name(cfgs,fmt,env.hosts)
output_f = output_fbase + STRNOW
cfg_srcpath = "{}cfg".format(os.path.join("binaries",output_exec_fname))
cfg_destpath = "{}.cfg".format(os.path.join(env.result_dir,output_exec_fname+STRNOW))
local("cp {} {}".format(cfg_srcpath,cfg_destpath))
with color():
puts("Adding experiment to current batch: {}".format(output_f), show_prefix=True)
machines = env.hosts[batch_size : batch_size + ntotal]
batch_size += ntotal
set_hosts(machines)
new_roles=execute(assign_roles,nnodes,nclnodes,append=env.batch_mode)[env.host]
new_nids,new_exps = execute(write_ifconfig,new_roles,e)[env.host]
nids.update(new_nids)
exps.update(new_exps)
for host,nid in new_nids.iteritems():
outfiles[host] = "{}.out".format(output_f)
if cfgs["WORKLOAD"] == "TPCC":
schema = "benchmarks/TPCC_full_schema.txt"
# schema = "benchmarks/TPCC_short_schema.txt"
elif cfgs["WORKLOAD"] == "YCSB":
schema = "benchmarks/YCSB_schema.txt"
elif cfgs["WORKLOAD"] == "PPS":
schema = "benchmarks/PPS_schema.txt"
# NOTE: copy_files will fail if any (possibly) stray processes
# are still running one of the executables. Setting the 'kill'
# flag in environment.py to true to kill these processes. This
# is useful for running real experiments but dangerous when both
# of us are debugging...
# execute(copy_files,schema,output_exec_fname)
execute(copy_ifconfig)
if env.remote:
set_hosts(good_hosts[:batch_size])
if env.cluster != 'istc' and not env.dry_run:
# Sync clocks before each experiment
execute(sync_clocks)
with color():
puts("Batch is full, deploying batch...{}/{}".format(batch_size,len(good_hosts)),show_prefix=True)
with color("debug"):
puts(pprint.pformat(outfiles,depth=3),show_prefix=False)
with color():
puts("Starttime: {}".format(datetime.datetime.now().strftime("%H:%M:%S")),show_prefix=True)
execute(deploy,schema_path,nids,exps,runfiles,fmt)
with color():
puts("Endtime: {}".format(datetime.datetime.now().strftime("%H:%M:%S")),show_prefix=True)
execute(get_results,outfiles,nids)
good_hosts = get_good_hosts()
batch_size = 0
nids = {}
exps = {}
outfiles = {}
set_hosts(good_hosts)
env.roledefs = None
|
#!/usr/bin/python
if __name__=='__main__':
factorial= lambda n:1 if n==1 else n*factorial(n-1)
print '3!=',factorial(3)
print '9!=',factorial(9)
print '15!=',factorial(15)
|
import heapq
def merge_boxs(prime_box_heap, version_map, regular_box):
output = [] # final output result will be stored here
while prime_box_heap:
version = heapq.heappop(prime_box_heap)
box_ids = version_map[version]
box_id = box_ids.pop()
output.append(box_id + ' ' + version)
while regular_box:
output.append(regular_box.pop())
return output
def orderedJunctionBoxes(numberOfBoxes, boxList):
prime_box_heap = [] # holds version of prime boxs in priority queue/heap
version_map = {} # holds mapping of each prime box version with its corresponding box id
regular_box = [] # holds regular boxs (in reversed box of their occerance for tail poping)
for box in reversed(boxList):
entry = box.split()
if entry[1].isdigit(): # put regular boxs in regular_box list
regular_box.append(box)
else: # split the version and box_id of prime boxs
box_id = entry[0]
version = " ".join(entry[1:]) # put the version as a string in Python min heap
heapq.heappush(prime_box_heap, version)
box_ids = version_map.get(version, [])
box_ids.append(box_id) # for each version store its id or possibly sorted list of ids
version_map[version] = sorted(box_ids, reverse=True)
result = merge_boxs(prime_box_heap, version_map, regular_box) # get merged list of the two box types
return result
|
# List supports Modifications(insert,update) and duplicates and no order
List = list(("a", "bcd", 1, 2, 20, 1.5, 2, 10.0)) # ["a","bcd",1,2,20,1.5,2,10.0]
# Set supports Modifications(insert,update) but not duplicates and stores in an order
Set = set(("a", "bcd", 1, 2, 20, 1.5, 2, 10.0)) # {"a","bcd",1,2,20,1.5,2,10.0}
Set2 = set((1,2,"bcd"))
# tuple doesn't support Modifications but supports indexing
Tuple = tuple(("a", "bcd", 1, 2, 20, 1.5, 2, 10.0)) # ("a","bcd",1,2,20,1.5,2,10.0)
# dictonary supports modifications and accessed using key and values
dictionary = {"name": "rposam", "age": 33, "gender": "Male"} # {"name":"rposam","age":33,"gender":"Male"}
if __name__ == "__main__":
print(List.index("bcd")) # Get Index of a value that exists in list
List.append(564654) # Add an item to list at the end
print(List.count(2)) # count of same item in a list
List.insert(2,"ram posam") # Add item to a list using index
List.remove("bcd") # Remove item from list using value
List.pop(0) # Remove item from list using index
print(List)
Set.pop()
Set.remove(1.5)
Set.add("ram posam") #
union = Set.union(Set2)
intersection=Set.intersection(Set2)
print(union) # union of two tuples
print(intersection) # intersection of two tuples
print(Set)
print(Tuple.index(20)) # index of a value in tuple
print(Tuple)
print(dictionary.keys()) # Display all keys of dictonary
print(dictionary.values()) # Display all values of a dictonary
print(dictionary.items()) # Dispaly all items of a dictonary
print(dictionary)
r = [range(0,20,3)] #
print(r)
|
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
sess=tf.Session()
# seed
tf.set_random_seed(5)
np.random.seed(42)
batch_size = 50
a1 = tf.Variable( tf.random_normal(shape=[1,1]))
b1 = tf.Variable( tf.random_normal(shape=[1,1]))
a2 = tf.Variable( tf.random_normal(shape=[1,1]))
b2 = tf.Variable( tf.random_normal(shape=[1,1]))
x = np.random.normal(2,0.1,500)
x_data=tf.placeholder(shape=[None,1],dtype=tf.float32)
# two models
sigmoid_activation = tf.sigmoid( tf.add( tf.matmul(x_data,a1), b1))
relu_activation = tf.nn.relu( tf.add( tf.matmul(x_data,a2), b2))
# loss functions
loss1 = tf.reduce_mean(tf.square(tf.subtract(sigmoid_activation,0.75)))
loss2 = tf.reduce_mean(tf.square(tf.subtract(relu_activation,0.75)))
# declare optimization algoritm and initialize variables
my_opt = tf.train.GradientDescentOptimizer(0.01)
train_step_sigmoid = my_opt.minimize(loss1)
train_step_relu = my_opt.minimize(loss2)
init = tf.global_variables_initializer()
sess.run(init)
# loop
loss_vec_sigmoid = []
loss_vec_relu = []
activation_sigmoid = []
activation_relu = []
for i in range(750):
rand_indices = np.random.choice( len(x), size = batch_size )
x_vals = np.transpose( [ x[rand_indices] ] )
fd = {x_data: x_vals}
sess.run( train_step_sigmoid, fd)
sess.run( train_step_relu, fd)
# store losses
temp_loss_sigmoid = sess.run( loss1, fd )
temp_loss_relu = sess.run( loss2, fd )
loss_vec_sigmoid.append(temp_loss_sigmoid)
loss_vec_relu.append(temp_loss_relu)
# store activation values
temp_sigmoid_activation = np.mean( sess.run( sigmoid_activation, fd ) )
temp_relu_activation = np.mean( sess.run( relu_activation, fd ) )
activation_sigmoid.append(temp_sigmoid_activation)
activation_relu.append(temp_relu_activation)
# plot
plt.plot(activation_sigmoid,'k-',label='Sigmoid Activation')
plt.plot(activation_relu,'r--',label='Relu Activation')
plt.ylim([0.0,1.0])
plt.title('Activation Outputs')
plt.xlabel('Generation')
plt.ylabel('Outputs')
plt.legend(loc='upper right')
plt.show()
plt.plot(loss_vec_sigmoid,'k-',label='Sigmoid Loss')
plt.plot(loss_vec_relu,'r--',label='Relu Loss')
plt.ylim([0.0,1.0])
plt.title('Loss per Generation')
plt.xlabel('Generation')
plt.ylabel('Loss')
plt.legend(loc='upper right')
plt.show()
|
import sys, os, re
file_list = []
with open('./log') as f:
for line in f:
m = 'Processing'
L = len(m)
if line[0:L] == m:
filename = line[L+1:-1]
print( filename )
file_list.append(filename)
print('Number of files: ',len(file_list))
|
#creating your own object/datatype
# Class - Definition of a abstract data type in a program
# Object - Particular instance of class definition
# aName = name('john','Doe')
# HEre name is class and aName is object
class Name:
#constuctor methods-- declaration/instantiation
#This first part talks about attributes of class
def __init__(self,first,middle,last):
self.first=first
self.middle=middle
self.last=last
#this talks about behavior /current state of object
def __str__(self):
return self.first+ " " + self.middle + " " + self.last
def initials(self):
return self.first[0] + self.middle[0] + self.last[0]
# Here we are instantiating it
# Now in teh above class the self.first is basically assigned with Ashwani
var_name=Name('Ashwani' ,'Kumar','Kammara')
print(var_name)
print(var_name.initials())
|
from django.apps import AppConfig
class AutoriConfig(AppConfig):
name = 'autori'
|
# -*- coding: utf-8
"""
Created on 17:07 27/07/2018
Snakemake de novo transcriptomics and transcript abundance estimation
- Find the the fastq files used by DCC read alignments workflow
- compute de novo tx with StringTie [doi:10.1038/nprot.2016.095]
- use de novo annotation to compute transcript abundance with salmon [https://doi.org/10.1038/nmeth.4197]
If you use this workflow, please cite
Patro, R., et al. "Salmon provides fast and bias-aware quantification of transcript expression. Nat Meth. 2017; 14 (4): 417–9."
Pertea, Mihaela, et al. "Transcript-level expression analysis of RNA-seq experiments with HISAT, StringTie and Ballgown." Nature protocols 11.9 (2016): 1650.
"""
__author__ = "Thiago Britto Borges"
__copyright__ = "Copyright 2019, Dieterichlab"
__email__ = "Thiago.BrittoBorges@uni-heidelberg.de"
__license__ = "MIT"
from itertools import groupby
import re
def extract_samples_replicates(samples, _pattern=re.compile("^(.+)_(.+)$")):
"""
Extract pairs of condition and replicate name from sample files
:param str _pattern: pattern to . Default uses {condition}_{replicate} template
:param list samples:
:return:
:rtype: list
"""
return list(zip(*[re.match(_pattern, x).groups() for x in samples]))
strand = {"fr-firststrand": "--fr", "fr-secondstrand": "--rf"}
workdir: config.get("path", ".")
container: "docker://tbrittoborges/stringtie:2.1.5"
cond, rep = extract_samples_replicates(config["samples"].keys())
name = config["samples"].keys()
raw_name = config["samples"].values()
sample_path = config["sample_path"]
d = {k: list(v) for k, v in groupby(sorted(zip(cond, rep)), key=lambda x: x[0])}
cond = set(cond)
include: "symlink.smk"
rule all:
input:
expand("mappings/{name}.bam", name=name),
expand("stringtie/merged_bam/{group}.bam", group=cond),
expand("stringtie/stringtie/{group}.gtf", group=cond),
"stringtie/merged/merged.combined.gtf",
rule stringtie_merge_bam:
input:
lambda wc: ["mappings/{}_{}.bam".format(*x) for x in d[wc.group]],
output:
bam="stringtie/merged_bam/{group}.bam",
bai="stringtie/merged_bam/{group}.bam.bai",
threads: 10
log:
"logs/stringtie_merge_bam/{group}.log",
wildcard_constraints:
group="|".join(cond),
shadow:
"shallow"
shell:
"samtools merge {output.bam} {input} --threads {threads};samtools index {output.bam} {output.bai} 2> {log} "
rule stringtie_denovo_transcriptomics:
input:
"stringtie/merged_bam/{group}.bam",
output:
"stringtie/stringtie/{group}.gtf",
params:
strandness=strand.get(config.get("strandness", ""), ""),
min_junct_coverage=config.get("min_junct_coverage", 3),
min_isoform_proportion=config.get("min_isoform_proportion", 0.001),
minimum_read_per_bp_coverage=config.get("minimum_read_per_bp_coverage", 3),
wildcard_constraints:
group="|".join(cond),
log:
"logs/stringtie_denovo_transcriptomics/{group}.log",
shadow:
"shallow"
shell:
"stringtie {input} -o {output} -p {threads} {params.strandness} -c {params.minimum_read_per_bp_coverage} -j {params.min_junct_coverage} -f {params.min_isoform_proportion} 2> {log} "
rule gffcompare:
input:
expand("stringtie/stringtie/{cond}.gtf", cond=cond),
output:
"stringtie/merged/merged.combined.gtf",
log:
"logs/gffcompare.log",
params:
gtf=config["ref"],
out="stringtie/merged/merged",
shadow:
"shallow"
shell:
"gffcompare {input} -r {params.gtf} -R -V -o {params.out} 2>{log}"
|
# 딕셔너리에서 아예 값을 뺌으로써 딕셔너리의 길이로 보석을 다 모았는지의 여부를 확인할 수 있도록
# 딕셔너리를 사용함으로써 시간 복잡도를 줄일 수 있도록
def solution(gems):
include = {gems[0]: 1}
g_len = len(gems)
i_len = len(list(set(gems))) # 보석 종류의 개수
answer = [0, g_len-1]
start, end = 0, 0 # 투 포인터
while end < g_len and start < g_len: # 두개의 포인터가 모두 범위 내에 있다면
if len(include) == i_len: # 보석 종류를 다 모았다면
if (answer[1]-answer[0]) > (end-start): # 기존 값보다 새로운 값이 길이가 짧다면
answer = [start, end] # 새로운 값으로 갱신
if include[gems[start]] == 1: # 맨 앞을 다음 칸으로 이동
del include[gems[start]] # 한번 나왔으면 지우고
else:
include[gems[start]] -= 1 # 여러번 나왔으면 횟수에서 한번 빼고
start += 1
else: # 보석 종류를 다 못 모았다면
end += 1 # 뒤로 한칸 추가
if end == g_len: # 맨 뒤에 도달했다면
break
if gems[end] in include.keys(): # 이미 나온적이 있는 보석이라면
include[gems[end]] += 1
else:
include[gems[end]] = 1
answer = [answer[0]+1, answer[1]+1]
return answer
print(solution(["DIA", "RUBY", "RUBY", "DIA", "DIA", "EMERALD", "SAPPHIRE", "DIA"]))
print(solution(["AA", "AB", "AC", "AA", "AC"]))
print(solution(["XYZ", "XYZ", "XYZ"]))
print(solution(["ZZZ", "YYY", "NNNN", "YYY", "BBB"])) |
import os
import psutil
for process in psutil.process_iter():
if "spotify" in process.name().lower():
try:
os.system(f"taskkill /F /PID {process.pid}")
except:
print(f"couldnt kill Process {process.name()} with PID {process.pid}")
os.system("start C:/Users/Dom/AppData/Roaming/Spotify/spotify.exe")
|
from __init__ import print_msg_box
import time
def maxSubArraySum(a,size):
max_so_far =a[0]
curr_max = a[0]
for i in range(1,size):
curr_max = max(a[i], curr_max + a[i])
max_so_far = max(max_so_far,curr_max)
return max_so_far
def kadanes_algorithm(arr,hint=False):
start = time.time()
if hint:
kadanes_algorithm_hint()
ans = maxSubArraySum(arr,len(arr))
end = time.time()
print("Time Taken: ",end-start)
return ans
def kadanes_algorithm_hint():
message="""
Kadane’s Algorithm
------------------------------------------------------------------------------------------
Purpose : In this problem, we are given an array. Our task is to find out the maximum subarray sum.
Method : Dynamic Programming
Time Complexity : O(n)
Space Complexity : O(1)
Hint :
Apply Dynamic Programming on contiguous array to find max contiguous sum.
Pseudocode:
Initialize:
max_so_far = a[0]
curr_max = a[0]
Loop(i=1,size)
(a) curr_max = max(a[i],curr_max + a[i])
(b) max_so_far = max(max_so_far,curr_max)
return max_so_far
Visualization:
Input: [-2,1,2,-1]
max_so_far = -2
curr_max = -2
for i=1, a[0] = -2
curr_max = max(1,-2+1) = 1
max_so_far = max(-2,1) = 1
for i=2,
curr_max = max(2,1+2) = 3
max_so_far = max(1,3) = 3
for i=3,
curr_max = max(-1,3-1) = 2
max_so_far = max(3,2) = 3
FINAL RESULT = max_so_far = 3
Learn More:
- Maximum Subarray Problem - https://en.wikipedia.org/wiki/Maximum_subarray_problem
"""
print_msg_box(message)
# print(kadanes_algorithm([-2,1,2,-1],True))
|
def powTwo38():
return 2 ** 38;
if __name__ == '__main__':
print(powTwo38()); # print the suffix of the next html doc
|
import wrcX.core
import wrcX.core.data as d
import wrcX.charts.overall as c
from pandas import merge
from wrcX.core.enrichers import addGroupClassFromCarNo
from wrcX.core.filters import groupClassFilter
def table_stageResult(stagenum,groupClass='',lower=0,upper=10):
pretxt = d.df_stage[d.df_stage['stage']==stagenum]
pretxt=groupClassFilter(pretxt,groupClass)
if upper is None: upper=len(pretxt)
tcols=['pos','carNo','driverName','time','diffPrev','diffFirst']
if groupClass=='':
tcols.append('groupClass')
pretxt=addGroupClassFromCarNo(pretxt)
pretxt=pretxt[tcols][lower:upper]
pretxt['pos']=pretxt['pos'].fillna(0).astype(int).replace(0,'')
txt=pretxt.set_index('pos').to_html()
return txt
def table_overallEndOfStage(stagenum,groupClass='',lower=0,upper=10):
df_overall=d.df_overall[d.df_overall['stage']==stagenum]
pretxt=groupClassFilter(df_overall, groupClass)
if upper is None: upper=len(pretxt)
tcols=['pos','carNo','driverName','time','diffPrev','diffFirst']
if groupClass=='':
tcols.append('groupClass')
pretxt=addGroupClassFromCarNo(pretxt)
pretxt=pretxt[tcols][lower:upper]
pretxt['pos']=pretxt['pos'].fillna(0).astype(int).replace(0,'')
txt=pretxt.set_index('pos').to_html()
return txt
def table_stageSectorDelta(stagenum,groupClass='',ddf_sectors=None):
if ddf_sectors is None:
ddf_sectors=d.df_splitSectorTimes_all[stagenum-int(wrcX.core.first_stage)]
xcols=[c for c in ddf_sectors.columns if c.startswith('sector')]
if len(xcols)==1: xcols=[]
tcols=['start','carNo','driverName']+xcols+['time_stageTime']
pretxt=ddf_sectors.sort_values('time_stageTime')[tcols]
pretxt=groupClassFilter(pretxt,groupClass)
unclassified=pretxt[pretxt['start'].isnull()]
pretxt=pretxt[pretxt['start'].notnull()]
pretxt['start']=pretxt['start'].astype(int)
txt=pretxt.to_html()
if len(unclassified): txt=txt+'\n#### Non-starters, non-finishers:'+unclassified.to_html(index=False)
return txt
def table_stageSectorDeltaRebase(stagenum,rebase,groupClass=None,eligibility=None,df_splitTimes=None,ddf_sectors=None):
if df_splitTimes is None:
df_splitTimes=d.df_splitTimes_all[stagenum-int(wrcX.core.first_stage)]
if ddf_sectors is None:
ddf_sectors=d.df_splitSectorTimes_all[stagenum-int(wrcX.core.first_stage)]
rebasedSectorTimes,y= c._chart_stage_sector_delta_base(wrcX.core.data.df_entry,df_splitTimes,
ddf_sectors,gc=groupClass,eligibility=eligibility,rebase=rebase)
if rebasedSectorTimes.empty:
return '\nINCOMPLETE STAGE - NO REBASING\n'+ groupClassFilter(ddf_sectors,groupClass).to_html(index=False)
rebasedSectorTimes=groupClassFilter(rebasedSectorTimes,groupClass)
rebasedSectorTimes=rebasedSectorTimes.pivot_table(index=['carNo','driverName'],
columns='control',values='sectordelta_s').reset_index()#.sort_values('time_stageTime')
tcols=[c for c in rebasedSectorTimes.columns if c.startswith('d_sector_')]
rebasedSectorTimes['stagediff']=rebasedSectorTimes[tcols].sum(axis=1).round(1)
txt=rebasedSectorTimes.merge(df_splitTimes[['carNo','time_stageTime']],
on='carNo').sort_values('time_stageTime').to_html(index=False)
return txt
def table_stageSplitTimeDelta(stagenum,groupClass='',df_splitTimes=None, df_stage=None):
if df_splitTimes is None:
df_splitTimes=d.df_splitTimes_all[stagenum-int(wrcX.core.first_stage)]
if df_stage is None:
df_stage=d.df_stage[d.df_stage['stage']==stagenum]
tcols=['start','carNo','driverName']+[c for c in df_splitTimes.columns if c.startswith('split_')]+['stageTime']
pretxt=df_splitTimes.sort_values('time_stageTime')[tcols]
pretxt=groupClassFilter(pretxt,groupClass)
pretxt=pretxt.merge( df_stage[df_stage['stage']==stagenum][['carNo','diffFirst','diffPrev']],on='carNo')
unclassified=pretxt[pretxt['start'].isnull()]
pretxt=pretxt[pretxt['start'].notnull()]
pretxt['start']=pretxt['start'].astype(int)
cols=['carNo','driverName']+ [c for c in pretxt.columns if c.startswith('split_')]+['stageTime','diffFirst','diffPrev','groupClass']
txt=pretxt[cols].to_html()
if len(unclassified): txt=txt+'\n#### Non-starters, non-finishers:'+unclassified.to_html(index=False)
return txt
def table_stageSplitTimeDeltaRebase(stagenum,rebase,groupClass='',df_splitTimes=None):
if df_splitTimes is None:
df_splitTimes=d.df_splitTimes_all[stagenum-int(wrcX.core.first_stage)]
rebasedSplitTimes,y=c._chart_stage_delta_s_base(wrcX.core.data.df_entry, df_splitTimes, rebase=rebase)
if rebasedSplitTimes.empty:
return '\nINCOMPLETE STAGE - NO REBASING\n'+groupClassFilter(df_splitTimes,groupClass).to_html(index=False)
rebasedSplitTimes=groupClassFilter(rebasedSplitTimes,groupClass)
txt = rebasedSplitTimes.pivot_table(index=['carNo','driverName'],
columns='control',
values='delta_s').sort_values('time_stageTime').reset_index().to_html(index=False)
return txt
|
#coding:utf-8
import pymysql
from text9.dbcon import *
import sys
import pandas
bookzhekou=[]
bookcomment=[]
def getData():
conn = pymysql.connect("localhost","root","root","school_db")
cursor = conn.cursor()
sql = "select * from booktest1"
cursor.execute(sql)
books = cursor.fetchall()
for v in books:
if v[1]==None:
continue
else:
bookzhekou.append(v[1])
bookcomment.append(v[2])
conn.commit()
conn.close()
if __name__ == '__main__':
getData()
print('最高折扣:',max(bookzhekou))
print('最低折扣:',min(bookzhekou))
print('平均折扣:',"%.2f"%float(sum(bookzhekou)/len(bookzhekou)))
x,y = max((bookzhekou.count(x), x) for x in set(bookzhekou))
print('折扣众数:',y)
print('最高评论数:',max(bookcomment))
print('最低评论数:',min(bookcomment))
print('平均评论数:',"%.2f"%float(sum(bookcomment)/len(bookcomment)))
x, y = max((bookzhekou.count(x), x) for x in set(bookcomment))
print('评论数众数:', y)
|
# Generated by Django 2.2.3 on 2019-11-12 15:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('log_in', '0003_auto_20191112_0232'),
]
operations = [
migrations.AlterField(
model_name='useraddition',
name='customer_dist',
field=models.CharField(blank=True, max_length=100, null=True, verbose_name='Улица'),
),
migrations.AlterField(
model_name='useraddition',
name='customer_house',
field=models.CharField(blank=True, max_length=30, verbose_name='Дом'),
),
]
|
'''
File defining base types
'''
from state import State
from ode import *
import pdb
import numpy as np
def matchParen(s,i):
l=s[i]
if l=='(':
r=")"
elif l=="{":
r="}"
elif l=="[":
r="]"
elif l=="<":
r=">"
else:
raise Exception()
cnt=0
for j in range(i,len(s)):
if s[j]==l:
cnt+=1
elif s[j]==r:
cnt-=1
if cnt==0:return j
return None
findend = lambda st,w: st.find(w)+len(w)
def strideq(s,w,i):
if(i+len(w)>len(s)):return False
return s[i:i+len(w)]==w
class HP:
pass
'''
expand(self, state) returns a list with (choices, e) elements,
where choices is a list of top to bottom choices to take to get from state to e,
where e is the end state.
eval(self, state, choices) returns a list of states, where states are appended
for ODE runs
'''
class Form:
def __init__(self, arg):
self.arg = arg.strip().replace("\n"," ")
#strip out parens ((like this))
while self.arg[0]=='(' and matchParen(self.arg,0)==len(self.arg)-1:
self.arg=self.arg[1:-1].strip()
#replace = with ==
i=1
while i<len(self.arg)-1:
if self.arg[i]=='=' and (self.arg[i-1] not in ['<',">","="]) and (self.arg[i+1] not in ['<',">","="]):
#insert another =
self.arg=self.arg[:i]+'='+self.arg[i:]
i+=1
self.arg=self.arg.replace("true","True").replace("false","False")
def eval(self, state):
i=0
while(i<len(self.arg)):
if self.arg[i]=='(':
i=matchParen(self.arg,i)
else:
if strideq(self.arg,"<->",i):
#split the string at the <-> and recurse
lhs=Form(self.arg[:i])
rhs=Form(self.arg[i+3:])
lv=lhs.eval(state)
rv=rhs.eval(state)
return (lv and rv) or (not lv and not rv)
elif strideq(self.arg,'->',i):
lhs=Form(self.arg[:i])
rhs=Form(self.arg[i+2:])
lv=lhs.eval(state)
rv=rhs.eval(state)
return (not lv) or rv
elif strideq(self.arg,'<-',i):
lhs=Form(self.arg[:i])
rhs=Form(self.arg[i+2:])
lv=lhs.eval(state)
rv=rhs.eval(state)
return (not rv) or lv
i+=1
#if we didn't find any special chars just evaluate it
val=eval(self.arg, state.vars())
if val is None:
print(state.vars())
raise Exception("Called eval on formula with undefined variables")
return val
def get_heuristic(self,state,arg=None):
#returns the "distance" from the truth boundary
if arg is None:
arg=self.arg.replace("not ","").strip()
if '->' in arg or '<->' in arg or '<-' in arg:
raise Exception("Sorry, implies are not implemented in auto heuristic evaluation")
#might be parens around whole thing
while arg[0]=='(' and matchParen(arg,0)==len(arg)-1:
arg=arg[1:-1].strip()
if 'and' not in arg and 'or' not in arg:
#basecase: no conjunctions
#return a big number since we don't want
#==/!= to factor into our heuristic
if '==' in arg:return None
#if <,>,<=,>= then return difference of the sides to each other
separators = ['<',">","<=",">="]
i=0
sep=None
run=True
while i<len(arg) and run:
i+=1
for s in range(len(separators)):
if strideq(arg,separators[s],i):
if(arg[i+1]=="=" and s<2):continue
sep=s
run=False
break
lhs=arg[:i].strip()
rhs=arg[i+len(separators[sep]):].strip()
return abs(Term(lhs).eval(state)-Term(rhs).eval(state))
else:
#for or: return max of both sides
#for and: return min
#find the top level conjunction
op_rank=[' or ',' and ']
bestid=0
i=0
bestop=len(op_rank)+1
while i<len(arg) and bestop!=0:
if arg[i]=='(':
i=matchParen(arg,i)
continue
for r in range(len(op_rank)):
if strideq(arg,op_rank[r],i):
if r<bestop:
bestid=i
bestop=r
break
i+=1
lhs=arg[:bestid].strip()
rhs=arg[bestid+len(op_rank[bestop]):].strip()
if bestop==0:
lval=self.get_heuristic(state,lhs)
rval=self.get_heuristic(state,rhs)
if lval is None and rval is None:return None
elif lval is None: return rval
elif rval is None: return lval
return max(lval,rval)
else:
lval=self.get_heuristic(state,lhs)
rval=self.get_heuristic(state,rhs)
if lval is None and rval is None:return None
elif lval is None: return rval
elif rval is None: return lval
return min(lval,rval)
def print(self,level=0):
print(" "*level+"Form:",self.arg,end='')
def tostring(self, level=0):
return self.arg
class Term:
def __init__(self, arg):
prohibited=["=","<",">","!","++","{","}",";"]
for p in prohibited:
if p in arg:
raise Exception("Tried to construct a term with invalid string: %s"%arg)
self.arg = arg.strip().replace("\n"," ")
def eval(self, state):
val=eval(self.arg, state.vars())
if val is None:
print(state.vars())
raise Exception("Called eval on term with undefined variables")
return val
def print(self,level=0):
print(" "*level+"Term:",self.arg,end='')
def tostring(self, level=0):
return self.arg
'''
Hybrid Programs
'''
class Choice(HP):
def __init__(self, lhs, rhs):
self.lhs = lhs
self.rhs = rhs
def print(self,level=0):
print(" "*level+"Choice:")
self.lhs.print(level+1)
self.rhs.print(level+1)
def expand(self, start_state):
traceR = self.rhs.expand(start_state)
traceL = self.lhs.expand(start_state)
res= [(["L"] + c,e) for c,e in traceL]
res.extend([(["R"] + c,e) for c,e in traceR])
return res
def eval(self, state, choices):
choice = choices[0]
if choice == "R":
return self.rhs.eval(state, choices[1:])
elif choice == "L":
return self.lhs.eval(state, choices[1:])
else:
raise Exception("Invalid choice %s for eval Choice. Must be R or L", choice)
def toString(self, choices=None, level=0):
choice = choices[0]
if choice == "R":
rhs, choices = self.rhs.toString(choices[1:], level)
ret = choice + rhs + "\n"
return ret, choices
elif choice == "L":
lhs, choices = self.lhs.toString(choices[1:], level)
ret = choice + lhs + "\n"
return ret, choices
else:
raise Exception("Invalid choice %s for eval Choice. Must be R or L", choice)
class Loop(HP):
def __init__(self, arg):
self.arg = arg
def print(self,level=0):
print(' '*level+"Loop:")
self.arg.print(level+1)
class Assign(HP):
def __init__(self, x, e):
#x is passed in as a term because of recursion in parseHP so we convert
# to the underlying string representation here
self.x = x.arg#string
#check whether this is a star assignment
if e.arg[0]=="*":
self.star=True
entries=e.arg[e.arg.find("(")+1:e.arg.find(")")].split(",")
self.range=(Term(entries[0]),Term(entries[1]))
self.numsamples=int(entries[2])
else:
self.star=False
self.e = e
def print(self,level=0):
print(' '*level+"Assign:",self.x,self.e.arg)
def expand(self, start_state):
if self.star:
traces=[]
for val in np.linspace(self.range[0].eval(start_state),
self.range[1].eval(start_state),self.numsamples):
state=start_state.copy()
choice=f"*({val})"
state.updateVar(self.x,val)
traces.append(([choice],state))
return traces
else:
state = start_state.copy()
state.updateVar(self.x, self.e.eval(state))
return [([], state)]
def eval(self, state, choices):
if self.star:
assert(choices[0][0]=="*")
state_c=state.copy()
state_c.updateVar(self.x, float(choices[0][2:-1]))
return [state_c],choices[1:]
else:
state_c=state.copy()
state_c.updateVar(self.x, self.e.eval(state_c))
return [state_c],choices
def toString(self, choices=None, level=0):
if self.star:
return self.x+" := "+choices[0],choices[1:]
else:
return self.x+" := "+self.e.arg, choices
class Compose(HP):
def __init__(self, lhs, rhs):
self.lhs = lhs
self.rhs = rhs
def print(self,level=0):
print(' '*level + "Compose:")
self.lhs.print(level+1)
self.rhs.print(level+1)
def expand(self, start_state):
TraceL = self.lhs.expand(start_state)
res=[]
for choices, end_state in TraceL:
TraceR = self.rhs.expand(end_state)
augmented_traces=[(choices+c,e) for c,e in TraceR]
res.extend(augmented_traces)
return res
def eval(self, state, choices):
state_list,new_choices = self.lhs.eval(state, choices)
rhs_start = state_list[-1] if len(state_list)>0 else state
rhs_states,rhs_choices=self.rhs.eval(rhs_start, new_choices)
state_list.extend(rhs_states)
return state_list,rhs_choices
def toString(self, choices=None, level=0):
lhs, new_choices = self.lhs.toString(choices, level)
rhs, rhs_choices = self.rhs.toString(new_choices, level)
return lhs+"; "+rhs, rhs_choices
class Test(HP):
def __init__(self, arg):
self.arg = arg
def print(self,level=0):
print(' '*level+"Test:",self.arg.arg)
def expand(self, start_state):
if self.arg.eval(start_state):
return [([], start_state)]
else:
return []
def eval(self, state, choices):
if self.arg.eval(state):
return [],choices
else:
raise Exception("Test failed in eval. All traces passed passed to eval should pass tests")
def toString(self, choices=None, level=0):
return "?"+self.arg.arg, choices
class ODE(HP):
def __init__(self, stringarg):
self.stringarg = stringarg#type string
a=stringarg.find("and")
if a<0:
self.constraint=Form("True")
ode_string=stringarg
else:
cons=stringarg[a+3:].strip()
self.constraint=Form(cons)
ode_string=stringarg[:a].strip()
#now parse out the primes
self.derivs=dict()
while True:
#find a prime
i=ode_string.find("'")
if i<0:break
varname=ode_string[:i].strip()
e=ode_string.find(",")
if e<0:
term=Term(ode_string[ode_string.find("=")+1:])
self.derivs[varname]=term
break;
else:
term=Term(ode_string[ode_string.find("=")+1:e])
self.derivs[varname]=term
ode_string=ode_string[ode_string.find(",")+1:]
assert(len(self.derivs)>0)
def expand(self,start_state):
#see if contraint holds now, if it doesnt return no traces
# pick times to expand for (including 0 time) [0,max]
if(not self.constraint.eval(start_state)):
return []
time_candidates=[1,2,4]
traces=[([0],start_state)]
prev_end_t=0
for t in time_candidates:
end_time,end_state=integrate_ODE_runge_kutta(self,start_state,t)
if end_time-prev_end_t<.01:
break;
prev_end_t=end_time
traces.append(([end_time],end_state))
if(abs(end_time-t)>dt):
break#we reached a constraint so we shouldnt expand the next one
return traces
def eval(self, state, choices):
choice = choices[0]
# Strip off the first choice
if not isinstance(choice, (int, float)):
raise Exception("Invalid choice for ODE eval. Must be int or float")
# Return list of states
return integrate_ODE_runge_kutta(self, state, choice,withSteps=True),choices[1:]
def print(self,level=0):
print(" "*level+"ODE: Derivs: {", end='')
for d in self.derivs:
print(d,'=',self.derivs[d].arg,end=' ')
print("} Constraint: ",self.constraint.arg)
def toString(self, choices=None, level=0):
string = '\n'
string += '{'
for d in self.derivs:
string += d+'='+self.derivs[d].arg+','
string += "& "+self.constraint.arg+"}"
if choices is not None:
choice = choices[0]
if not isinstance(choice, (int, float)):
raise Exception("Invalid choice for ODE eval. Must be int or float")
choices = choices[1:]
string += ' (Eval for ' + str(round(choice, 2)) + ') '
return string, choices
if __name__=="__main__":
form=Form("(x<=0 or x<2 or x<0)")
state=State()
state.addVar('x',0)
print(form.get_heuristic(state))
# state=State()
# state.addVar('x', 0)
# ode = ODE('x\' = 1 and x < 0.5')
# t, end_state = integrate_ODE(ode, state, 1)
# print(end_state.vars)
|
import socket
def createSocket(port,host):
try:
s = socket.socket()
s.connect((host,port))
return s
except:
print('Socket created.')
def receiveFile(s,filename):
try:
with open(filename, 'wb') as f:
print ('file opened')
while True:
print('receiving data...')
data = s.recv(1024)
print('data=%s', (data))
if not data:
break
# write data to a file
f.write(data)
f.close()
print('Successfully get the file')
except:
print("Error receiving file")
def sendData(s,data):
s.send(data)
def receiveData(s,bufferSize):
data = None
while data == None:
data = s.recv(bufferSize)
print (data)
return data
def disconnect(s):
s.close()
print('Connection Closed')
|
###############################################################################
#
# cspace.py
#
# test ideas with OpenCV color spaces
#
###############################################################################
import cv2
import opencvconst as cv
import numpy as np
x0,y0 = -1,-1
clicked = False
def main():
global x0,y0,clicked
pFlag = False
cv2.namedWindow('Image', cv2.WINDOW_GUI_NORMAL+cv2.WINDOW_AUTOSIZE)
cv2.setMouseCallback('Image',mclick)
cap = cv2.VideoCapture(0)
cap.set(cv.CV_CAP_PROP_FRAME_WIDTH, 320)
cap.set(cv.CV_CAP_PROP_FRAME_HEIGHT, 240)
ret, frame = cap.read()
print 'frame', frame.shape
while True:
ret, frame = cap.read()
cv2.imshow('Image', frame)
# cvtYCrCb = cv2.cvtColor(frame, cv2.COLOR_BGR2YCrCb)
# Y = cvtYCrCb[:,:,0]
# Cr = cvtYCrCb[:,:,1]
# Cb = cvtYCrCb[:,:,2]
#
# if not pFlag:
# print 'cvtYCrCb=', cvtYCrCb.shape, 'Y=', Y.shape, 'Cr=', Cr.shape, 'Cb=', Cb.shape
# pFlag = True
#
# if clicked:
# print '(x={},y={}) Y={}, Cr={}, Cb={}'.format(x0,y0,Y[y0,x0],Cr[y0,x0],Cb[y0,x0])
# clicked = False
#
# cv2.imshow('Y', Y)
# cv2.imshow('Cr', Cr)
# cv2.imshow('Cb', Cb)
# cvtHSV = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# H = cvtHSV[:,:,0]
# S = cvtHSV[:,:,1]
# V = cvtHSV[:,:,2]
#
# if not pFlag:
# print 'cvtHSV=', cvtHSV.shape, 'H=', H.shape, 'S=', S.shape, 'V=', V.shape
# pFlag = True
#
# if clicked:
# print '(x={},y={}) H={}, S={}, V={}'.format(x0,y0,H[y0,x0],S[y0,x0],V[y0,x0])
# clicked = False
#
# cv2.imshow('H', H)
# cv2.imshow('S', S)
# cv2.imshow('V', V)
cvtLab = cv2.cvtColor(frame, cv2.COLOR_BGR2LAB)
L = cvtLab[:,:,0]
a = cvtLab[:,:,1]
b = cvtLab[:,:,2]
if not pFlag:
print 'cvtLab=', cvtLab.shape, 'L=', L.shape, 'a=', a.shape, 'b=', b.shape
pFlag = True
if clicked:
print '(x={},y={}) L={}, a={}, b={}'.format(x0,y0,L[y0,x0],a[y0,x0],b[y0,x0])
clicked = False
cv2.imshow('L', L)
cv2.imshow('a', a)
cv2.imshow('b', b)
key = cv2.waitKey(1) & 0xFF
if key == 27:
break
#
# when done, release the capture
#
cap.release()
cv2.destroyAllWindows()
def mclick(event,x,y,flags,param):
global x0,y0,clicked
if event == cv2.EVENT_LBUTTONDOWN:
x0,y0 = x,y
clicked = True
#
# start
#
if __name__ == '__main__':
main()
|
#!/usr/bin/python3
import sys
import heapq
def dijkstra(s, g):
v = {k: 0 for k in g}
e = {k: sys.maxint for k in g}
e[s] = 0
q = [(0, s)]
while (q != []):
_, a = heapq.heappop(q)
if v[a]: continue
v[a] = 1
for (n, w) in g[a]:
if (e[a] + w < e[n]):
e[n] = e[a] + w
heapq.heappush(q, (e[n], n))
return e
def testDijkstraShortestPath():
graph = {
1: [(4, 9), (5, 1), (2, 5)],
2: [(5, 3)],
3: [(2, 2)],
4: [(3, 6)],
5: [(4, 2)]
}
e = dijkstra(1, graph)
print(e)
testDijkstraShortestPath() |
import tensorflow as tf
import numpy
from sklearn.datasets import load_digits
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelBinarizer
#加载数据
digits=load_digits()
x=digits.data
y=digits.target
y=LabelBinarizer().fit_transform(y)
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=.3)
def add_layer(inputs,input_size,output_size,layer_name,activation_function=None):
Weigths=tf.Variable(tf.random_normal([input_size,output_size]))
#在机器学习中,biases的推荐值不为0,所以我们这里是在0向量的基础上又加了0.1。
biases=tf.Variable(tf.zeros([1,output_size])+0.1)
#未激活的值
y=tf.matmul(inputs,Weigths)+biases
#此处使用dropout的地方,丢掉一部分y
y=tf.nn.dropout(y,keep_prob)
if activation_function is None:
outputs=y
else:
outputs=activation_function(y)
tf.summary.histogram(layer_name + '/outputs', outputs)
return outputs
keep_prob=tf.placeholder(tf.float32)
xs=tf.placeholder(tf.float32,[None,64])#8x8
ys=tf.placeholder(tf.float32,[None,10])#十个类别
layer1=add_layer(xs,64,50,'layer1',activation_function=tf.nn.tanh)
prediction=add_layer(layer1,50,10,'layer2',activation_function=tf.nn.softmax)
cross_entropy=tf.reduce_mean(-tf.reduce_sum(ys*tf.log(prediction),reduction_indices=[1]))
tf.summary.scalar('loss',cross_entropy)
train_step=tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
init = tf.global_variables_initializer()
with tf.Session() as sess:
merged=tf.summary.merge_all()
train_writer=tf.summary.FileWriter("logs/train",sess.graph)
test_writer=tf.summary.FileWriter("logs/test",sess.graph)
sess.run(init)
for i in range(500):
sess.run(train_step,feed_dict={xs:x_train,ys:y_train,keep_prob:0.5})
if i%50==0:
train_result=sess.run(merged,feed_dict={xs:x_train,ys:y_train,keep_prob:1})
test_result=sess.run(merged,feed_dict={xs:x_test,ys:y_test,keep_prob:1})
train_writer.add_summary(train_result,i)
test_writer.add_summary(test_result,i)
|
import logging
from typing import Callable
backup_logger = logging.getLogger(__name__)
logger_callbacks = []
class ErrorLogger:
@staticmethod
def add_logging_callback(callback: Callable):
"""
Adds a callback for logging purposes.
:param callback: A function that takes in an exception
"""
logger_callbacks.append(callback)
@staticmethod
def log_error(exception: Exception, message: str = None, logger: logging.Logger = backup_logger):
"""
Logs an exception that occurs in the case that we can not throw an error.
This will show the stack trace along with the exception.
Uses a default logger if none is provided.
:param exception: The exception that occured.
:param message: An optional message.
:param logger: A logger to use. one is provided if nothing is used.
:return:
"""
if message is None:
message = str(exception)
logger.exception(message)
try:
for callback in logger_callbacks:
callback(exception)
except Exception as e:
backup_logger.exception(e)
|
from django.contrib import admin
from file_keeper.models import File
class FileAdmin(admin.ModelAdmin):
fields = ('hash', 'mime', 'base64')
list_display = fields
admin.site.register(File, FileAdmin)
|
from django.db import models
# Create your models here.
class Articles(models.Model):
title = models.CharField(max_length=256)
created_at = models.DateTimeField()
author = models.CharField(max_length=128, default='piroyoung')
body = models.TextField()
class Categories(models.Model):
name = models.CharField(max_length=32)
member = models.ManyToManyField(
Articles,
through='ArticleCategoryMap',
through_fields=('category_id', 'article_id'),
)
class ArticleCategoryMap(models.Model):
article = models.ForeignKey(Articles, on_delete=models.CASCADE)
category = models.ForeignKey(Categories, on_delete=models.CASCADE)
|
from flask import Flask,render_template,request,redirect,flash,Response
import pandas as pd
from io import BytesIO
import base64
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
from numpy import exp, cos, linspace
from matplotlib.figure import Figure
import os, re
import numpy as np
from matplotlib.figure import Figure
import secrets
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import confusion_matrix
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
from sklearn.cluster import KMeans
from sklearn.metrics import r2_score
from sklearn.metrics import classification_report
from sklearn.metrics import accuracy_score
from sklearn.metrics import recall_score
from sklearn.metrics import precision_score
app = Flask(__name__)
secret = secrets.token_urlsafe(32)
app.secret_key = secret
csv_file = None
df = None
columns_all = None
x = None
datas = None
bagimli_degisken = None
test = None
algorithm = None
@app.route("/")
def index():
return render_template("index.html")
@app.route("/data", methods =["GET","POST"])
def data():
if 'form1' in request.form:
global datas
global csv_file
global df
global columns_all
global x
df = request.files["myfile"]
csv_file = request.files["myfile"]
if datas:
print(datas.head())
if csv_file:
file_name = csv_file.filename
df = pd.read_csv(csv_file)
describe = df.describe()
columns_all = df.columns
missing = df.isnull().sum()
missing = missing.to_frame()
missing = missing.rename(columns={0: 'Eksik Veri'})
corr = df.corr()
df_numeric = df.select_dtypes(include =['float64','int64'])
columns = df_numeric.columns
columns_all = df.columns
fig = plt.figure()
io = BytesIO()
for i in columns:
df_numeric[i] = df_numeric[i].fillna(df_numeric[i].mean())
flierprops = dict(marker='o', markerfacecolor='r', markersize=12,
linestyle='none', markeredgecolor='g')
df_numeric = df_numeric.astype(int)
columns = df_numeric.columns
plt.boxplot(df_numeric[columns].values ,flierprops=flierprops)
fig.savefig(io, format='png')
data = base64.encodestring(io.getvalue()).decode('utf-8')
html = 'data:image/png;base64,{}'
number_of_rows = len(df)
number_of_columns = len(df.columns)
return render_template("data.html" ,html = html.format(data), len = len(columns), columns = columns,tables=[describe.to_html(classes='data')] ,tablestwo=[missing.to_html(classes='data')] , tablesthree=[corr.to_html(classes='data')] ,satir = number_of_rows, sutun = number_of_columns , file = file_name)
if 'form2' in request.form:
return render_template("dataSteptwo.html")
if 'datasteptwo' in request.form:
if request.form.getlist('deletemissing'):
df = df.dropna()
flash('Eksik veriler silindi!')
if request.form.getlist("meanmissing"):
df = df.fillna(df.mean())
df = df.fillna(method = "bfill")
flash('Eksik veriler ortalama değer ile dolduruldu!')
if request.form.getlist("deleteoutlier"):
df_n = df.select_dtypes(["int64","float64"])
Q1 = df_n.quantile(0.25)
Q3 = df_n.quantile(0.75)
IQR = Q3-Q1
alt_sinir = Q1 - 1.5*IQR
ust_sinir = Q3 + 1.5*IQR
df_n = df_n[~((df_n < (alt_sinir )) | (df_n > (ust_sinir))).any(axis = 1)]
df_n = pd.DataFrame(df_n)
df[df_n.columns] = df_n
flash('Aykırı veriler silindi!')
print(df)
x = df["yas"]
x= x.values.reshape(-1,1)
return render_template("dataSteptwo.html")
if 'datastepthree' in request.form:
global algorithm
global test
global bagimli_degisken
test = request.form.get('testt')
bagimli_degisken = request.form.get('features')
algorithm = request.form.get('algorithms')
return render_template("dataStepthree.html" ,option_list = columns_all)
if 'datastepfour' in request.form:
test = float(test)
print(algorithm)
bagimli_degisken = df[bagimli_degisken]
X_train, X_test, Y_train, Y_test = train_test_split(x,bagimli_degisken, test_size = test, random_state = 42)
if algorithm == "knn":
knn = KNeighborsClassifier(n_neighbors=1, metric='minkowski')
knn.fit(X_train,Y_train)
y_pred = knn.predict(X_test)
rs = recall_score(Y_test, y_pred, average=None)
ps = precision_score(Y_test, y_pred, average=None)
acc = accuracy_score(Y_test, y_pred)
if algorithm == "linear":
lin_reg = LinearRegression()
lin_reg.fit(X_train,Y_train)
print('Linear R2 degeri')
print(r2_score(Y_test, lin_reg.predict(X_test)))
if algorithm == "svr":
svr_reg = SVR(kernel='rbf')
svr_reg.fit(X_test,Y_test)
print('SVR R2 degeri')
print(r2_score(Y_test, svr_reg.predict(X_test)))
if algorithm == "dt":
dtc = DecisionTreeClassifier(criterion = 'entropy')
dtc.fit(X_train,Y_train)
y_pred = dtc.predict(X_test)
cm = confusion_matrix(Y_test,y_pred)
print(cm)
if algorithm == "rf":
print("burada")
if algorithm == "logistic":
logr = LogisticRegression(random_state=0)
logr.fit(X_train,Y_train)
y_pred = logr.predict(X_test)
cm = confusion_matrix(Y_test,y_pred)
print(cm)
if algorithm == "naive":
svc = SVC(kernel='rbf')
svc.fit(X_train,Y_train)
y_pred = svc.predict(X_test)
cm = confusion_matrix(Y_test,y_pred)
print(cm)
if algorithm == "k-means":
print("burada")
if algorithm == "svm":
svc = SVC(kernel='rbf')
svc.fit(X_train,Y_train)
y_pred = svc.predict(X_test)
cm = confusion_matrix(Y_test,y_pred)
print(cm)
return render_template("dataStepfour.html" , acc = acc, rs=rs,ps=ps )
if 'datastepfive' in request.form:
return render_template("dataStepfive.html")
if __name__ == "__main__":
app.run(debug = True)
|
#!env python3
# -*- coding: utf-8 -*-
var1 = 'variable one'
if var1 == 'variable one':
print('var1 is variable one')
|
# -*- coding: utf-8 -*-
import functools, types
import util
def login_check(check_func, failed_func=None):
"""Utility decorator to wrapped a method with login process"""
if not check_func or not util.is_callable(check_func):
raise Exception('Developer Error: check_func parameter must point to an existing validation function')
if not check_func and not util.is_callable(check_func):
raise Exception('Developer Error: failed_func parameter must point to an existing function')
def _decorate_func_of(wrapped_func):
def _wrap_call_with(*a, **ka):
_auth = check_func(*a, **ka)
if not _auth:
if check_func:
failed_func(*a, **ka)
else:
raise HttpRequestException("Page you are accessing requires successful Login", status=401)
return wrapped_func(*a, **ka)
functools.update_wrapper(_decorate_func_of, wrapped_func, updated=[])
return __wrap_call_with
return _decorate_func_of
|
import socket
ip = '127.0.0.1' #Machine IP - Carbon Coder Software
porta = 1120 #Default Carbon Coder port
sck = socket.socket()
sck.connect((ip,porta))
message = "CarbonAPIXML1 68 <?xml version =\"1.0\" encoding=\"UTF-8\"?><cnpsXML TaskType=\"JobList\"/>"
sck.send(message.encode("utf-8"))
data = sck.recv(4096).decode()
print ('Resposta do Servidor: ' + data)
sck.close()
print("socket closed")
# bia-rodrig |
# Given an array and a value, remove all instances of that value in-place and return the new length.
#
# Do not allocate extra space for another array, you must do this by modifying the input array in-place with O(1) extra memory.
#
# The order of elements can be changed. It doesn't matter what you leave beyond the new length.
#
# Example:
#
# Given nums = [3,2,2,3], val = 3,
#
# Your function should r1200112eturn length = 2, with the first two elements of nums being 2.
class Solution(object):
def removeElement(self, nums, val):
i = 0
flag = -2
j = -2
count = 0
while (i < len(nums) - 1):
if (nums[j] == val):
flag = j
while (flag != -1):
nums[flag] = nums[flag + 1]
flag = flag + 1
nums[-1] = val
j = j - 1
i = i + 1
for i in nums:
if i !=val:
count=count+1
return count
if __name__ == '__main__':
s = Solution()
print "solution is: " + str(s.removeElement([3,2,2,1,2,3,4],3)) |
import numpy as np
import sys
n, m, k, t = list(map(int, input().strip().split()))
data = np.array(list(map(lambda line: list(map(float, line.strip().split())), sys.stdin.readlines())))
points = data[:, :data.shape[1] - 1]
clusters = np.array(list(map(int, data[:, data.shape[1] - 1])))
for step in range(t):
# print("step number: ", step)
means = np.array([np.mean(points[np.where(clusters == cluster_id)[0]], axis=0)
if cluster_id in clusters else np.zeros(m) for cluster_id in range(k)])
old_clusters = np.copy(clusters)
for index, point in enumerate(points):
min_norm = float("inf")
for cluster_id in range(k):
tmp = np.linalg.norm(means[cluster_id] - point)
if tmp < min_norm:
clusters[index] = cluster_id
min_norm = tmp
if (old_clusters == clusters).all():
break
for cluster_id in clusters:
print(f"{cluster_id}")
|
# -*- coding: utf-8 -*-
"""
ZigBee constants.
"""
# ========== MAC constants: ======================
#frame types:
TYPE_BCN = 'Beacon'
TYPE_DATA = 'Data'
TYPE_ACK = 'Ack.'
TYPE_CMD = 'Command'
FRAME_TYPE = {'000': TYPE_BCN,
'001': TYPE_DATA,
'010': TYPE_ACK,
'011': TYPE_CMD}
#addressing modes:
MODE_NONE = 'PAN-ID and address field are not present'
MODE_RESERVED = 'Reserved'
MODE_16 = '16-bit addressing'
MODE_64 = '64-bit addressing'
ADDR_MODE = {'00': MODE_NONE,
'01': MODE_RESERVED,
'10': MODE_16,
'11': MODE_64}
#command types:
CMD_ASS_REQ = 'Association request'
CMD_ASS_RESP = 'Association response'
CMD_DIS_NOT = 'Disassociation notification'
CMD_DAT_REQ = 'Data request'
CMD_PAN_ID_CON = 'PAN ID conflict notification'
CMD_ORP_NOT = 'Orphan notification'
CMD_BCN_REQ = 'Beacon request'
CMD_CRD_REL = 'Coordinator realignment'
CMD_GTS_REQ = 'GTS request'
COMMAND_TYPE = {'0x1': CMD_ASS_REQ,
'0x2': CMD_ASS_RESP,
'0x3': CMD_DIS_NOT,
'0x4': CMD_DAT_REQ,
'0x5': CMD_PAN_ID_CON,
'0x6': CMD_ORP_NOT,
'0x7': CMD_BCN_REQ,
'0x8': CMD_CRD_REL,
'0x9': CMD_GTS_REQ} |
import fresh_tomatoes
import media
# set up my six movie objects
secret_of_my_success = media.Movie(
"The Secret of My Success",
("https://upload.wikimedia.org/wikipedia/en/1/18/"
"The_Secret_Of_My_Success.jpg"),
"https://www.youtube.com/watch?v=rGHDATIJIX8")
john_wick = media.Movie(
"John Wick",
("https://upload.wikimedia.org/wikipedia/en/9/98/"
"John_Wick_TeaserPoster.jpg"),
"https://www.youtube.com/watch?v=2AUmvWm5ZDQ")
christmas_story = media.Movie(
"A Christmas Story",
("https://upload.wikimedia.org/wikipedia/en/6/65/"
"A_Christmas_Story_film_poster.jpg"),
"https://www.youtube.com/watch?v=LBe0Bl0wYHU")
blade_runner = media.Movie(
"Blade Runner",
"https://upload.wikimedia.org/wikipedia/en/5/53/Blade_Runner_poster.jpg",
"https://www.youtube.com/watch?v=eogpIG53Cis")
guardians_of_the_galaxy = media.Movie(
"Guardians of the Galaxy",
"https://upload.wikimedia.org/wikipedia/en/8/8f/GOTG-poster.jpg",
"https://www.youtube.com/watch?v=d96cjJhvlMA")
last_starfighter = media.Movie(
"The Last Starfighter",
"https://upload.wikimedia.org/wikipedia/en/4/4c/Last_starfighter_post.jpg",
"https://www.youtube.com/watch?v=H7NaxBxFWSo")
# make a list of my movies to pass to web page generator
my_movies = [john_wick, blade_runner, christmas_story,
last_starfighter, guardians_of_the_galaxy, secret_of_my_success]
# actually generate web page
fresh_tomatoes.open_movies_page(my_movies)
|
from interfaces.prediction_network import PredictionNetwork
from connect4.connect_four_state import ConnectFourState
import torch
class ConnectFourPredictionNetwork(PredictionNetwork):
def __init__(self, network, cols):
self._network = network
self._cols = cols
def predict(self, state):
"""
:param state: The ConnectFourState to predict for
:return: a pair (action_probability_pairs, value)
where action_probability_pairs is a list of (action, probability) pairs predicted by the network
the value is the probability that the current player will win the game, predicted by the network
"""
assert isinstance(state, ConnectFourState)
state_tensor = state.convert_to_tensor()
action_probabilities, value = self._network.predict(state_tensor)
all_possible_actions = state.all_possible_actions()
all_possible_actions_raw = [action.col for action in all_possible_actions]
for col in range(self._cols):
if col not in all_possible_actions_raw:
action_probabilities[col] = 0
action_probabilities = action_probabilities / sum(action_probabilities)
action_probability_pairs = [(action, action_probabilities[action.col].item())
for action in all_possible_actions]
return action_probability_pairs, value.item()
def translate_to_action_probabilities_tensor(self, action_mcts_probability_pairs):
tensor = torch.zeros([1, self._cols], dtype=torch.double)
for action, mcts_probability in action_mcts_probability_pairs:
tensor[0, action.col] = mcts_probability
return tensor
|
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import time
import math
import socket
import os
class SimpleNetworkClient :
def __init__(self, port1, port2) :
self.fig, self.ax = plt.subplots()
now = time.time()
self.lastTime = now
self.times = [time.strftime("%H:%M:%S", time.localtime(now-i)) for i in range(30, 0, -1)]
self.infTemps = [0]*30
self.incTemps = [0]*30
self.infLn, = plt.plot(range(30), self.infTemps, label="Infant Temperature")
self.incLn, = plt.plot(range(30), self.incTemps, label="Incubator Temperature")
plt.xticks(range(30), self.times, rotation=45)
plt.ylim((20,50))
plt.legend(handles=[self.infLn, self.incLn])
self.infPort = port1
self.incPort = port2
self.infToken = None
self.incToken = None
self.ani = animation.FuncAnimation(self.fig, self.updateInfTemp, interval=500)
self.ani2 = animation.FuncAnimation(self.fig, self.updateIncTemp, interval=500)
def updateTime(self) :
now = time.time()
if math.floor(now) > math.floor(self.lastTime) :
t = time.strftime("%H:%M:%S", time.localtime(now))
self.times.append(t)
#last 30 seconds of of data
self.times = self.times[-30:]
self.lastTime = now
plt.xticks(range(30), self.times,rotation = 45)
plt.title(time.strftime("%A, %Y-%m-%d", time.localtime(now)))
def getTemperatureFromPort(self, p, tok) :
s = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
s.sendto(b"%s;GET_TEMP" % tok, ("127.0.0.1", p))
msg, addr = s.recvfrom(1024)
m = msg.decode("utf-8")
m = m.split(' ')
temperature = float(m[0].strip())
unit_of_measure = m[1].strip()
return (temperature, unit_of_measure)
def authenticate(self, p, pw) :
s = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
s.sendto(b"AUTH %s" % pw, ("127.0.0.1", p))
msg, addr = s.recvfrom(1024)
return msg.strip()
def processInfTemp(self):
temperature_set = self.getTemperatureFromPort(self.infPort, self.infToken)
temperature = temperature_set[0]
unit_of_measure = temperature_set[1]
if unit_of_measure == 'C':
return temperature
if unit_of_measure == 'F':
return (temperature - 32) / 1.800
return temperature - 273
def updateInfTemp(self, frame) :
self.updateTime()
if self.infToken is None : #not yet authenticated
self.infToken = self.authenticate(self.infPort, b"%s" % os.environ['PRE_SHARED_KEY'].encode())
self.infTemps.append(self.processInfTemp())
#self.infTemps.append(self.infTemps[-1] + 1)
self.infTemps = self.infTemps[-30:]
self.infLn.set_data(range(30), self.infTemps)
return self.infLn,
def processIncTemp(self):
temperature_set = self.getTemperatureFromPort(self.incPort, self.incToken)
temperature = temperature_set[0]
unit_of_measure = temperature_set[1]
if unit_of_measure == 'C':
return temperature
if unit_of_measure == 'F':
return (temperature - 32) / 1.800
return temperature - 273
def updateIncTemp(self, frame) :
self.updateTime()
if self.incToken is None : #not yet authenticated
self.incToken = self.authenticate(self.incPort, b"%s" % os.environ['PRE_SHARED_KEY'].encode())
self.incTemps.append(self.processIncTemp())
#self.incTemps.append(self.incTemps[-1] + 1)
self.incTemps = self.incTemps[-30:]
self.incLn.set_data(range(30), self.incTemps)
return self.incLn,
snc = SimpleNetworkClient(23456, 23457)
plt.grid()
plt.show()
|
import sys
sys.path.append("..")
import requests
from bs4 import BeautifulSoup
import time
from backend.common.connect import Database
from SpotifySongInfo import spotify_info
#if year divisible by 4, it's a leap year -> feb + 1
DAYS_IN_MONTH =[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
WEEK = 7
year = 2019
month = 1
day = 8
# {
# JAN = 31
# FEB = 28
# MAR = 31
# APR = 30
# MAY = 31
# JUN = 30
# JUL = 31
# AUG = 31
# SEP = 30
# OCT = 31
# NOV = 30
# DEC = 31
# }
Database.connect()
while year >= 2019: #adjust this value to extend the history of data retrieval
date = str(year)+'-'+str(month).zfill(2)+'-'+str(day).zfill(2)
URL = 'https://www.billboard.com/charts/hot-100/'+date
print(URL)
page = requests.get(URL)
soup = BeautifulSoup(page.content, 'html.parser')
results = soup.find(class_='chart-list container')
#pdb.set_trace()
chart_elems = results.find_all(class_='chart-list__element display--flex')
for chart_elem in chart_elems:
ranking = chart_elem.find('span', class_='chart-element__rank__number').text
artist = chart_elem.find('span', class_='chart-element__information__artist text--truncate color--secondary').text
song_title = chart_elem.find('span', class_='chart-element__information__song text--truncate color--primary').text
# if ranking == '1' :
print(ranking + ". " + song_title + " - " + artist)
songID = Database.checkSongExists(song_title, artist)
if songID == None:
time.sleep(3)
try:
spotifyData = spotify_info(artist, song_title)
songData ={
'danceability' : spotifyData["danceability"], #how suitable for dancing the track is
'energy' : spotifyData["energy"], #measure of intensity and activity
'key' : spotifyData["key"], # overall key of the track
'loudness' : spotifyData["loudness"], # overall loudness in dB, typically from -60 to 0
'mode' : spotifyData["mode"], # major = 1, minor = 0
'speechiness' : spotifyData["speechiness"], #presence of spoken words in a track
'acousticness' : spotifyData["acousticness"], # confidence measure of whether track is acoustic
'instrumentalness' : spotifyData["instrumentalness"], #predicts whether a trackc contains no vocals
'liveness' : spotifyData["liveness"], # detects presence of an audience in recording
'valence' : spotifyData["valence"], # musical positiveness conveyed (1.0 being highest)
'tempo' : spotifyData["tempo"],
'time_signature' : spotifyData["time_signature"],
'duration' : spotifyData["duration_ms"],
'artist' : artist,
'ranking' : ranking,
'song_id' : spotifyData["id"],
'song_title' : song_title
}
Database.addSongToTable(songData, date)
except:
try:
spotifyData = spotify_info(artist.split('Featuring', 1)[0].split('&', 1)[0], song_title)
songData ={
'danceability' : spotifyData["danceability"], #how suitable for dancing the track is
'energy' : spotifyData["energy"], #measure of intensity and activity
'key' : spotifyData["key"], # overall key of the track
'loudness' : spotifyData["loudness"], # overall loudness in dB, typically from -60 to 0
'mode' : spotifyData["mode"], # major = 1, minor = 0
'speechiness' : spotifyData["speechiness"], #presence of spoken words in a track
'acousticness' : spotifyData["acousticness"], # confidence measure of whether track is acoustic
'instrumentalness' : spotifyData["instrumentalness"], #predicts whether a trackc contains no vocals
'liveness' : spotifyData["liveness"], # detects presence of an audience in recording
'valence' : spotifyData["valence"], # musical positiveness conveyed (1.0 being highest)
'tempo' : spotifyData["tempo"],
'time_signature' : spotifyData["time_signature"],
'duration' : spotifyData["duration_ms"],
'artist' : artist,
'ranking' : ranking,
'song_id' : spotifyData["id"],
'song_title' : song_title
}
Database.addSongToTable(songData, date)
except:
Database.rollback()
pass
else:
try:
Database.addRanking(ranking, songID, date)
except:
Database.rollback()
pass
day -= WEEK
if day < 1:
month -= 1
if month < 1:
year -= 1
month=12
day = DAYS_IN_MONTH[month-1] + day
#time.sleep(10)
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.contrib.auth import authenticate
from django import forms
from .interface import interface
from product_mgr_app.models import Product, Rating
def index_view(request):
user = request.user
products = Product.objects.all().order_by('-view')[:10]
#recommend = interface(1, 10) # 10 is an arbitrary parameter
table = products
return render(request, 'trend_app/index.html', {'table': table})
def detail_view(request, product_id):
user = request.user
product = Product.objects.get(pk=product_id)
product.view += 0
product.save()
if user.is_authenticated:
rating = Rating(account=user.account, product=product, rate=2)
rating.save()
return render(request, 'trend_app/detail.html', {'product': product})
|
#https://leetcode-cn.com/problems/letter-combinations-of-a-phone-number/
class Solution(object):
reflect_table = dict()
reflect_table["2"] = ['a','b','c']
reflect_table["3"] = ['d', 'e', 'f']
reflect_table["4"] = ['g', 'h', 'i']
reflect_table["5"] = ['j', 'k', 'l']
reflect_table["6"] = ['m', 'n', 'o']
reflect_table["7"] = ['p', 'q', 'r', 's']
reflect_table["8"] = ['t', 'u', 'v']
reflect_table["9"] = ['w', 'x', 'y', 'z']
def letterCombinations(self, digits):
"""
:type digits: str
:rtype: List[str]
"""
result = []
if digits == '':
return result
for item1 in digits:
tempList = []
for item2 in self.reflect_table.get(item1):
if len(result) > 0:
for item3 in result:
tempList.append(item3 + item2)
else:
tempList.append(item2)
result.clear()
result = tempList
return result
solution = Solution()
#["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"]
input = "23"
print(solution.letterCombinations(input)) |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 6 17:59:56 2019
@author: e1077783
"""
import sklearn.datasets
import numpy
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler
from sklearn.impute import SimpleImputer
from sklearn.compose import ColumnTransformer, make_column_transformer
from sklearn.pipeline import make_pipeline, Pipeline
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
def required_columns(df):
col_names = ['Survived', 'Name', 'PassengerId', 'Ticket', 'Cabin', 'SibSp', 'Parch']
req_cols = []
for each in df.columns:
if each not in col_names:
req_cols.append(each)
return req_cols
titanic = pd.read_csv("train.csv")
X , Y = titanic.loc[:, required_columns], titanic.loc[:, titanic.columns == 'Survived']
X_train,X_test, Y_train,Y_test = train_test_split(X,Y, random_state=0)
col_with_nan = ['Age']
col_to_scale = ['Fare']
col_to_encode = ['Embarked', 'Sex', 'Pclass']#[0,1,4,5]
imputer = Pipeline(steps = [ ('imp', SimpleImputer(strategy='mean')), ('scaler', StandardScaler())])
scaler = Pipeline(steps = [('scaler', StandardScaler())])
one_hot_encoder = Pipeline(steps = [('imp2', SimpleImputer(strategy='most_frequent')),
('onhot_enc', OneHotEncoder(handle_unknown='ignore')) ])
scaler = Pipeline(steps = [('scale', StandardScaler())])
transformer = ColumnTransformer(
transformers = [('imp_scale', imputer, col_with_nan), \
('scaler', scaler, col_to_scale), \
('imp_enc_only', one_hot_encoder, col_to_encode)],
#sparse_threshold = 0,
remainder = 'passthrough'
)
#x_tranformed = transformer.fit_transform(X_train, Y_train)
label_encoder = LabelEncoder()
label_encoder.fit_transform(Y_train)
pipeline = Pipeline(steps=[('transformer', transformer), ('classified', LogisticRegression(solver='lbfgs'))])
pipeline.fit(X_train, Y_train)
print(pipeline.score(X_test,Y_test)) |
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist
from sklearn.model_selection import train_test_split
from sklearn.decomposition import PCA
(x_train, _), (x_test, _) = mnist.load_data()
x = np.append(x_train, x_test, axis=0)
print(x.shape)
# 실습
# pca 를 통해 0.95 이상인 것 몇개?
# pca 배운거 다 집어넣고 확인
x = x.reshape(x.shape[0], x.shape[1] * x.shape[2])
pca = PCA()
x = pca.fit_transform(x)
cumsum = np.cumsum(pca.explained_variance_ratio_)
print(cumsum)
d = np.argmax(cumsum >= 0.95) + 1
print('축소된 차원 수 :', d)
plt.plot(cumsum)
plt.grid()
plt.show()
pca = PCA(n_components=d)
x = pca.fit_transform(x)
print(x.shape)
x_train, x_test = train_test_split(x, test_size=1/7, random_state=45)
print(x_train.shape, x_test.shape)
|
overlap_studies = {
"BBANDS":"Bollinger Bands",
"DEMA":"Double Exponential Moving Average",
"EMA":"Exponential Moving Average",
"HT_TRENDLINE":"Hilbert Transform - Instantaneous Trendline",
"KAMA":"Kaufman Adaptive Moving Average",
"MA":"Moving average",
"MAMA":"MESA Adaptive Moving Average",
"MAVP":"Moving average with variable period",
"MIDPOINT":"MidPoint over period",
"MIDPRICE":"Midpoint Price over period",
"SAR":"Parabolic SAR",
"SAREXT":"Parabolic SAR - Extended",
"SMA":"Simple Moving Average",
"T3":"Triple Exponential Moving Average (T3)",
"TEMA":"Triple Exponential Moving Average",
"TRIMA":"Triangular Moving Average",
"WMA":"Weighted Moving Average"
}
momentum_indicators = {
"ADX":"Average Directional Movement Index",
"ADXR":"Average Directional Movement Index Rating",
"APO":"Absolute Price Oscillator",
"AROON":"Aroon",
"AROONOSC":"Aroon Oscillator",
"BOP":"Balance Of Power",
"CCI":"Commodity Channel Index",
"CMO":"Chande Momentum Oscillator",
"DX":"Directional Movement Index",
"MACD":"Moving Average Convergence/Divergence",
"MACDEXT":"MACD with controllable MA type",
"MACDFIX":"Moving Average Convergence/Divergence Fix 12/26",
"MFI":"Money Flow Index",
"MINUS_DI":"Minus Directional Indicator",
"MINUS_DM":"Minus Directional Movement",
"MOM":"Momentum",
"PLUS_DI":"Plus Directional Indicator",
"PLUS_DM":"Plus Directional Movement",
"PPO":"Percentage Price Oscillator",
"ROC":"Rate of change : ((price/prevPrice)-1)*100",
"ROCP":"Rate of change Percentage: (price-prevPrice)/prevPrice",
"ROCR":"Rate of change ratio: (price/prevPrice)",
"ROCR100":"Rate of change ratio 100 scale: (price/prevPrice)*100",
"RSI":"Relative Strength Index",
"STOCH":"Stochastic",
"STOCHF":"Stochastic Fast",
"STOCHRSI":"Stochastic Relative Strength Index",
"TRIX":"1-day Rate-Of-Change (ROC) of a Triple Smooth EMA",
"ULTOSC":"Ultimate Oscillator",
"WILLR":"Williams' %R"
}
volumne_indicators = {
"AD":"Chaikin A/D Line",
"ADOSC":"Chaikin A/D Oscillator",
"OBV":"On Balance Volume",
}
volatility_indicators = {
"ATR":"Average True Range",
"NATR":"Normalized Average True Range",
"TRANGE":"True Range",
}
price_transform = {
"AVGPRICE":"Average Price",
"MEDPRICE":"Median Price",
"TYPPRICE":"Typical Price",
"WCLPRICE":"Weighted Close Price",
}
cycle_indicators = {
"HT_DCPERIOD":"Hilbert Transform - Dominant Cycle Period",
"HT_DCPHASE":"Hilbert Transform - Dominant Cycle Phase",
"HT_PHASOR":"Hilbert Transform - Phasor Components",
"HT_SINE":"Hilbert Transform - SineWave",
"HT_TRENDMODE":"Hilbert Transform - Trend vs Cycle Mode",
}
statistic_functions = {
"BETA":"Beta",
"CORREL":"Pearson's Correlation Coefficient (r)",
"LINEARREG":"Linear Regression",
"LINEARREG_ANGLE":"Linear Regression Angle",
"LINEARREG_INTERCEPT":"Linear Regression Intercept",
"LINEARREG_SLOPE":"Linear Regression Slope",
"STDDEV":"Standard Deviation",
"TSF":"Time Series Forecast",
"VAR":"Variance"
} |
#coverage.py
#encoding:utf8
from user_cf import user_cf
from operator import itemgetter
from settings import K
def coverage(train,test,W, N):
recommend_items=set()
all_items=set()
for user in train.keys():
for item in train[user]:
all_items.add(item[1])
rank=user_cf(user,train,W,K) #getrecommendation(user,N)
rank = dict(sorted(rank.items(),key=itemgetter(1),reverse=True) [0:N])
for item in rank.keys():
recommend_items.add(item)
return len(recommend_items)/(len(all_items)*1.0)
|
from loaders import load_IAM, load_MNIST, load_split_MNIST
def get_loader(name, label=0):
if name == "IAM":
return load_IAM.batch_generator()
if name == "MNIST":
return load_MNIST.batch_generator()
if name == "split_MNIST":
return load_split_MNIST.batch_generator(label) |
class Vehicle:
def modelname1(self,modelname):
self.modelname=modelname
def Regnumber(self,Regno):
self.Regno=Regno
class Bus(Vehicle):
def colourname(self,colour):
self.colour=colour
def printval(self):
print(self.Regno,self.modelname,self.colour)
B=Bus()
B.modelname1('KSRTC')
B.Regnumber('KL45')
B.colourname('RED')
B.printval() |
'''The MIT License (MIT)
Copyright (c) 2021, Demetrius Almada
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.'''
import numpy as np
import cv2
class YOLOObjectDetector:
def __init__(self, config):
self.object_detected = False
self.object_list = config.OBJECT_LIST
self.scale_factor = config.YOLO_SCALEFACTOR
self.kernel = config.YOLO_KERNEL
self.confidence = config.YOLO_CONFIDENCE
self.threshold = config.YOLO_THRESHOLD
self.thickness = config.BB_THICKNESS
self.font = config.BB_FONTFACE
self.font_scale = config.BB_FONTSCALE
self.text_color = config.BB_TEXTCOLOR
self.text_thickness = config.BB_TEXTTHICKNESS
self.labels = open(config.COCO_LABELS_PATH).read().strip().split('\n')
self.colors = np.random.randint(
100,
255,
size=(len(self.labels), 3),
dtype='uint8'
).tolist()
self.network = cv2.dnn.readNetFromDarknet(
config.YOLO_CONFIG_PATH,
config.YOLO_WEIGHTS_PATH
)
self.layer_names = None
self.boxes = None
self.confidences = None
self.class_ids = None
def get_layernames(self):
self.layer_names = self.network.getLayerNames()
self.layer_names = [
self.layer_names[i[0] - 1] for i in
self.network.getUnconnectedOutLayers()
]
def preprocess_image(self, image):
blob = cv2.dnn.blobFromImage(
image,
self.scale_factor,
self.kernel,
swapRB=True,
crop=False
)
return blob
def predict(self, blob):
self.network.setInput(blob)
if self.layer_names is None:
self.get_layernames()
return self.network.forward(self.layer_names)
def prune(self, boxes, confidences):
return cv2.dnn.NMSBoxes(
boxes,
confidences,
self.confidence,
self.threshold
)
def detect_objects(self, image, height, width):
blob = self.preprocess_image(image)
outputs = self.predict(blob)
boxes = []
confidences = []
class_ids = []
for output in outputs:
for detection in output:
scores = detection[5:]
class_id = np.argmax(scores)
if self.labels[class_id] not in self.object_list:
continue
confidence = scores[class_id]
if confidence > self.confidence:
box = detection[0:4] * np.array(
[width, height, width, height]
)
box = box.astype('int')
(centerX, centerY, box_width, box_height) = box
x = int(centerX - (box_width / 2))
y = int(centerY - (box_height / 2))
boxes.append([x, y, int(box_width), int(box_height)])
confidences.append(float(confidence))
class_ids.append(class_id)
indices = self.prune(boxes, confidences)
if len(indices) > 0:
self.object_detected = True
indices = indices.flatten()
boxes = [boxes[i] for i in indices]
confidences = [confidences[i] for i in indices]
class_ids = [class_ids[i] for i in indices]
else:
self.object_detected = False
self.boxes = boxes
self.confidences = confidences
self.class_ids = class_ids
return (boxes, confidences, class_ids)
def draw_boundingboxes(frame, object_detector):
(boxes, confidences, class_ids) = object_detector.detect_objects(
frame,
frame.shape[0],
frame.shape[1]
)
for i in range(len(boxes)):
(x, y) = (boxes[i][0], boxes[i][1])
(w, h) = (boxes[i][2], boxes[i][3])
color = object_detector.colors[class_ids[i]]
cv2.rectangle(
frame,
(x, y),
(x + w, y + h),
color,
object_detector.thickness
)
label = f'{object_detector.labels[class_ids[i]]}: {confidences[i]:.4f}'
((lx, ly), _) = cv2.getTextSize(
label,
object_detector.font,
object_detector.font_scale,
object_detector.text_thickness
)
cv2.rectangle(
frame,
(x, y),
(x + lx, y - ly),
color,
cv2.FILLED
)
cv2.putText(
frame,
label,
(x, y),
object_detector.font,
object_detector.font_scale,
object_detector.text_color,
object_detector.text_thickness
)
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^$', 'apps.sharefile.views.friendFiles', name='friendFiles'),
url(r'sendMsg2cscServer/$', 'apps.sharefile.views.sendMyInfoToServer', name='sendMsg2cscServer'),
url(r'^peerPort/$', 'apps.sharefile.views.peerPort', name='peerPort'),
)
|
#!/usr/kai/anaconda3/python
# -*- coding: utf-8 -*-
# Hilfs-Funktionen:
# zum Spielen von Go9x9 (Brettdrehung, Print)
# zur Konvertierung des NN Input Formats (B7)
# zur Speicherung des MCTS Trees
# V1: setzt auf auf V4: gameT3
# V2: b7 convert functions
# V3: testing
# V4: drehung
# V5: Performance: b, b1 und neues Int statt b7
# V6: timer
#
import torch, time
import numpy as np
PLAYER_BLACK = 1
PLAYER_WHITE = -1
size = 9 # Board Größe
size2 = size*size
ANZ_POSITIONS = size2+1
boardSchwarzNP = np.ones((size, size), dtype=float)
boardWeissNP = np.zeros((size, size), dtype=float)
boardSchwarz = [[1] * size for i in range(size)]
boardWeiss = [[0] * size for i in range(size)]
b2Initial = [[[0] * size for i in range(size)] for j in range(2)]
# Codierung Board als b = [[0] * size for i in range(size)]
# 9*9 Matrix,(0,0) ist oben links
# schwarz = PLAYER_BLACK, weiss = PLAYER_WHITE, leer = 0
# Board5 = Board * 5 ist NN Input (hier jeweils Codierung Black, White mit 1/0
# für aktuell, vorheriges Board und BlackOrWhite Board
# sowie Board2 für Performance als Int mit 2**x und 3**x Konvertierung (0,0), (0,1), ...(8,8),
# für aktuell, vorheriges Board mit -1/0/1
# Ermittlung vorheriges als 0/1 Delta: 0=gleicher Stein, 1=wurde gesetzt oder geschlagen
class BoardValues:
"""
Speichert und Retrieves für Board2-States: Count, Value, Probs (ValueAvg wird berechnet)
"""
def __init__(self):
# count of visits, state -> [N(s, a)]
# total value of the state's action, state -> [W(s, a)]
# average value of actions, state -> [Q(s, a)]: berechnet
# prior probability of actions, state -> [P(s,a)]
# Dictionaries mit Key Int-of-Board2
self.b = {}
def clear(self):
self.b.clear()
def __len__(self):
return len(self.b)
def expand(self, b2Int, probs):
self.b[b2Int] = [[0] * ANZ_POSITIONS, [0.0] * ANZ_POSITIONS, probs]
def backup(self, b2Int, action, val):
self.b[b2Int][0][action] += 1
self.b[b2Int][1][action] += val
# if self.b[b2Int][0][action] > 40 and abs(self.b[b2Int][1][action] - val) > 0.001:
# print('backup mit action: ', action, ' value: ', val, ' bei:')
# printBrett(intToB(b2Int)[0])
# print('neues count: ', self.b[b2Int][0][action], 'neues value: ', self.b[b2Int][1][action])
class GoTimer:
# startet und stoppt die Zeit, mehrfach
def __init__(self, routine, mitGesamt=False):
self.t = 0
self.tUsed = 0
self.anz = 0
self.routine = routine
self.mitGesamt = mitGesamt
def start(self):
self.t = time.time()
def stop(self):
self.tUsed += time.time() - self.t
self.anz += 1
def timerPrint(self):
if self.mitGesamt:
stdUsed = self.tUsed // 3600
minUsed = (self.tUsed - 3600 * stdUsed) // 60
secUsed = (self.tUsed - 3600 * stdUsed) % 60
print('Routine: '+self.routine+', Zeit insg.: %02d:%02d:%02d' % (stdUsed, minUsed, secUsed))
self.tUsed = round(self.tUsed / self.anz)
minUsed = self.tUsed // 60
secUsed = self.tUsed % 60
print('Routine: '+self.routine+', Zeit pro Step: %02d:%02d' % (minUsed, secUsed))
def b5To2(b5):
b = [[0] * size for i in range(size)]
b1 = [[0] * size for i in range(size)]
f01 = b5[4][0][0]
for i in range(size):
for j in range(size):
if b5[1-f01][i][j] == 1:
b[i][j] = 1
else:
b[i][j] = - b5[f01][i][j]
if b5[3-f01][i][j] == 1:
b1[i][j] = 1
else:
b1[i][j] = - b5[2+f01][i][j]
return b, b1
def b1To5(b, b1, whoMoves):
bRet = [[[0] * size for i in range(size)] for j in range(5)]
if whoMoves == 0:
bRet[4] = boardWeiss
else:
bRet[4] = boardSchwarz
for i in range(size):
for j in range(size):
if b[i][j] == 1:
bRet[1-whoMoves][i][j] = 1
else:
bRet[whoMoves][i][j] = -b[i][j]
if b1[i][j] == 1:
bRet[3-whoMoves][i][j] = 1
else:
bRet[2+whoMoves][i][j] = -b1[i][j]
return bRet
def printBrett(b, istFlat=False, mitFloat=False):
for row in range(size):
for col in range(size):
if istFlat:
cell = b[row*size+col]
else:
cell = b[row][col]
if mitFloat:
print('%4.3f ' % (cell), end='')
else:
print(str(cell).rjust(3), end='')
print('')
if istFlat:
print('Wert für pass: ',b[81])
def printBufferEntry(buf):
# buf ist List mit board2-Int, whoMoves, probs, value
whoMoves = buf[1]
print(whoMoves, ' moves')
print('Probs:')
printBrett(buf[2], istFlat=True, mitFloat=True)
print('Value: ', buf[3])
print('current Board:')
printBrett(intToB(buf[0])[0])
print('')
def bToInt(board2):
# analog der Methode von goSpielNoGraph
# wird nur bei Drehung benötigt
num, numB = 0, 0
bDelta = [[0] * 9 for i in range(9)]
for row in range(9):
for col in range(9):
if board2[0][row][col] != board2[1][row][col]:
bDelta[row][col] = 1
for row in range(9):
for col in range(9):
num += bDelta[row][col] * 2**(9*row+col)
for row in range(9):
for col in range(9):
numB += (board2[0][row][col]+1) * 3**(9*row+col)
return numB*2**81+num
def intToB(num):
# Board Convertierung von Int Darstellung zu 9x9er Boards
board2 = [[[0] * size for i in range(size)] for j in range(2)]
bDelta = [[0] * 9 for i in range(size)]
numDelta = num % 2**size2
numB = num // 2**size2
for row in range(size):
for col in range(size):
bDelta[row][col] = numDelta % 2
numDelta = numDelta // 2
for row in range(size):
for col in range(size):
board2[0][row][col] = numB % 3 - 1
numB = numB // 3
found = False
for row in range(size):
if found:
break
for col in range(size):
if bDelta[row][col] == 1 and board2[0][row][col] != 0:
farbeAct = board2[0][row][col]
found = True
break
for row in range(size):
for col in range(size):
if bDelta[row][col] == 0:
board2[1][row][col] = board2[0][row][col]
elif board2[0][row][col] == 0:
board2[1][row][col] = -farbeAct
return board2
def _encode_list_state(dest_np, board2, whoMoves):
"""
In-place encodes list state into the zero numpy array
:param dest_np: dest array, expected to be zero
:param state_list: state of the game in the list form
:param who_move: player index who to move
"""
if whoMoves == 0:
dest_np[4] = boardWeissNP
else:
dest_np[4] = boardSchwarzNP
for i in range(size):
for j in range(size):
if board2[0][i][j] == 1:
dest_np[1-whoMoves][i][j] = 1
else:
dest_np[whoMoves][i][j] = -board2[0][i][j]
if board2[1][i][j] == 1:
dest_np[3-whoMoves][i][j] = 1
else:
dest_np[2+whoMoves][i][j] = -board2[1][i][j]
def state_lists_to_batch(state_lists, whoMoves_lists, device="cpu"):
"""
Convert list of list states to batch for network
:param state_lists: list of 'list states'
:param who_moves_lists: list of player index who moves
:return Variable with observations
"""
# assert isinstance(state_lists, list)
batch_size = len(state_lists)
batch = np.zeros((batch_size, 5, size, size), dtype=float)
for idx, (state, whoMoves) in enumerate(zip(state_lists, whoMoves_lists)):
_encode_list_state(batch[idx], state, whoMoves)
return torch.FloatTensor(batch).to(device)
def dreh(reihe, spalte, drehung):
# return gespiegelte/gedrehte row, col
if drehung % 90 == 0:
for i in range(drehung // 90):
reiheAlt = reihe
reihe = spalte
spalte = size - 1 - reiheAlt
elif drehung == 1:
reihe = size - 1 - reihe
elif drehung == 2:
spalte = size - 1 - spalte
elif drehung == 3:
reihe = size - 1 - reihe
reiheAlt = reihe
reihe = spalte
spalte = size - 1 - reiheAlt
else: # drehung == 4
spalte = size - 1 - spalte
reiheAlt = reihe
reihe = spalte
spalte = size - 1 - reiheAlt
return reihe, spalte
def drehPosition(position, drehung):
# return gespiegelte/gedrehte Position Liste
posRet = [0] * ANZ_POSITIONS
posRet[81] = position[81]
for i in range(size2):
reihe, spalte = dreh(i//9, i%9, drehung)
posRet[reihe*size+spalte] = position[i]
return posRet
def drehB2(b2int, drehung):
# return gespiegelte/gedrehte b
if drehung == 0:
return b2int
b2 = intToB(b2int)
b2ret = [[[0] * size for i in range(size)] for j in range(2)]
for row in range(size):
for col in range(size):
reihe, spalte = dreh(row, col, drehung)
for board in range(2):
b2ret[board][reihe][spalte] = b2[board][row][col]
return bToInt(b2ret)
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'placeholder':'username'}),label='')
password = forms.CharField(widget = forms.PasswordInput(attrs={'placeholder':'password'}),label='')
class signupform(UserCreationForm):
first_name = forms.CharField(max_length=30, required=False)
last_name = forms.CharField(max_length=30,required=False)
email = forms.EmailField(max_length=254, help_text='Required')
class Meta:
model = User
fields = ('username','first_name','last_name','email','password1','password2')
# class UserRegistrationFrom(forms.ModelForm):
# password = forms.CharField(label = 'password',
# widget = forms.PasswordInput)
# password2 = forms.CharField(label='repeat passowrd',
# widget=forms.PasswordInput)
# class Meta:
# model = User
# fields = ('username', 'first_name', 'email')
# def clean_password2(self):
# cd = self.cleaned_data
# if cd['password'] != cd['password2']:
# raise forms.ValidationError('Passwords don\'t match.')
# return cd['password2'] |
#-----Statement of Authorship----------------------------------------#
#
# This is an individual assessment item. By submitting this
# code I agree that it represents my own work. I am aware of
# the University rule that a student must not act in a manner
# which constitutes academic dishonesty as stated and explained
# in QUT's Manual of Policies and Procedures, Section C/5.3
# "Academic Integrity" and Section E/2.1 "Student Code of Conduct".
#
# Student no: n9965661
# Student name: Victor Wang
#
# NB: Files submitted without a completed copy of this statement
# will not be marked. All files submitted will be subjected to
# software plagiarism analysis using the MoSS system
# (http://theory.stanford.edu/~aiken/moss/).
#
#--------------------------------------------------------------------#
#-----Task Description-----------------------------------------------#
#
# TREASURE MAP
#
# This assignment tests your skills at processing data stored in
# lists, creating reusable code and following instructions to display
# a complex visual image. The incomplete Python program below is
# missing a crucial function, "follow_path". You are required to
# complete this function so that when the program is run it traces
# a path on the screen, drawing "tokens" to indicate discoveries made
# along the way, while using data stored in a list to determine the
# steps to be taken. See the instruction sheet accompanying this
# file for full details.
#
# Note that this assignment is in two parts, the second of which
# will be released only just before the final deadline. This
# template file will be used for both parts and you will submit
# your final solution as a single Python 3 file, whether or not you
# complete both parts of the assignment.
#
#--------------------------------------------------------------------#
#-----Preamble-------------------------------------------------------#
#
# This section imports necessary functions and defines constant
# values used for creating the drawing canvas. You should not change
# any of the code in this section.
#
# Import the functions needed to complete this assignment. You
# should not need to use any other modules for your solution. In
# particular, your solution must not rely on any non-standard Python
# modules that need to be downloaded and installed separately,
# because the markers will not have access to such modules.
from turtle import *
from math import *
from random import *
# Define constant values used in the main program that sets up
# the drawing canvas. Do not change any of these values.
grid_size = 100 # pixels
num_squares = 7 # to create a 7x7 map grid
margin = 50 # pixels, the size of the margin around the grid
legend_space = 400 # pixels, the space to leave for the legend
window_height = grid_size * num_squares + margin * 2
window_width = grid_size * num_squares + margin + legend_space
font_size = 18 # size of characters for the coords
starting_points = ['Top left', 'Top right', 'Centre',
'Bottom left', 'Bottom right']
#
#--------------------------------------------------------------------#
#-----Functions for Creating the Drawing Canvas----------------------#
#
# The functions in this section are called by the main program to
# manage the drawing canvas for your image. You should not change
# any of the code in this section. (Very keen students are welcome
# to draw their own background, provided they do not change the map's
# grid or affect the ability to see it.)
#
# Set up the canvas and draw the background for the overall image
def create_drawing_canvas():
# Set up the drawing window with enough space for the grid and
# legend
setup(window_width, window_height)
setworldcoordinates(-margin, -margin, window_width - margin,
window_height - margin)
# Draw as quickly as possible
tracer(False)
# Choose a neutral background colour (if you want to draw your
# own background put the code here, but do not change any of the
# following code that draws the grid)
bgcolor('light grey')
# Get ready to draw the grid
penup()
color('slate grey')
width(2)
# Draw the horizontal grid lines
setheading(0) # face east
for y_coord in range(0, (num_squares + 1) * grid_size, grid_size):
penup()
goto(0, y_coord)
pendown()
forward(num_squares * grid_size)
# Draw the vertical grid lines
setheading(90) # face north
for x_coord in range(0, (num_squares + 1) * grid_size, grid_size):
penup()
goto(x_coord, 0)
pendown()
forward(num_squares * grid_size)
# Draw each of the labels on the x axis
penup()
y_offset = -27 # pixels
for x_coord in range(0, (num_squares + 1) * grid_size, grid_size):
goto(x_coord, y_offset)
write(str(x_coord), align = 'center',
font=('Arial', font_size, 'normal'))
# Draw each of the labels on the y axis
penup()
x_offset, y_offset = -5, -10 # pixels
for y_coord in range(0, (num_squares + 1) * grid_size, grid_size):
goto(x_offset, y_coord + y_offset)
write(str(y_coord), align = 'right',
font=('Arial', font_size, 'normal'))
# Mark the space for drawing the legend
goto((num_squares * grid_size) + margin, (num_squares * grid_size) // 2)
write(' Put your legend here', align = 'left',
font=('Arial', 24, 'normal'))
# Reset everything ready for the student's solution
pencolor('black')
width(1)
penup()
home()
tracer(True)
# End the program and release the drawing canvas to the operating
# system. By default the cursor (turtle) is hidden when the
# program ends - call the function with False as the argument to
# prevent this.
def release_drawing_canvas(hide_cursor = True):
tracer(True) # ensure any drawing still in progress is displayed
if hide_cursor:
hideturtle()
done()
#
#--------------------------------------------------------------------#
#-----Test Data for Use During Code Development----------------------#
#
# The "fixed" data sets in this section are provided to help you
# develop and test your code. You can use them as the argument to
# the follow_path function while perfecting your solution. However,
# they will NOT be used to assess your program. Your solution will
# be assessed using the random_path function appearing below. Your
# program must work correctly for any data set that can be generated
# by the random_path function.
#
# Each of the data sets is a list of instructions expressed as
# triples. The instructions have two different forms. The first
# instruction in the data set is always of the form
#
# ['Start', location, token_number]
#
# where the location may be 'Top left', 'Top right', 'Centre',
# 'Bottom left' or 'Bottom right', and the token_number is an
# integer from 0 to 4, inclusive. This instruction tells us where
# to begin our treasure hunt and the token that we find there.
# (Every square we visit will yield a token, including the first.)
#
# The remaining instructions, if any, are all of the form
#
# [direction, number_of_squares, token_number]
#
# where the direction may be 'North', 'South', 'East' or 'West',
# the number_of_squares is a positive integer, and the token_number
# is an integer from 0 to 4, inclusive. This instruction tells
# us where to go from our current location in the grid and the
# token that we will find in the target square. See the instructions
# accompanying this file for examples.
#
# Some starting points - the following fixed paths just start a path
# with each of the five tokens in a different location
fixed_path_0 = [['Start', 'Top left', 0]]
fixed_path_1 = [['Start', 'Top right', 1]]
fixed_path_2 = [['Start', 'Centre', 2]]
fixed_path_3 = [['Start', 'Bottom left', 3]]
fixed_path_4 = [['Start', 'Bottom right', 4]]
# Some miscellaneous paths which encounter all five tokens once
fixed_path_5 = [['Start', 'Top left', 0], ['East', 1, 1], ['East', 1, 2],
['East', 1, 3], ['East', 1, 4]]
fixed_path_6 = [['Start', 'Bottom right', 0], ['West', 1, 1], ['West', 1, 2],
['West', 1, 3], ['West', 1, 4]]
fixed_path_7 = [['Start', 'Centre', 4], ['North', 2, 3], ['East', 2, 2],
['South', 4, 1], ['West', 2, 0]]
# A path which finds each token twice
fixed_path_8 = [['Start', 'Bottom left', 1], ['East', 5, 2],
['North', 2, 3], ['North', 4, 0], ['South', 3, 2],
['West', 4, 0], ['West', 1, 4],
['East', 3, 1], ['South', 3, 4], ['East', 1, 3]]
# Some short paths
fixed_path_9 = [['Start', 'Centre', 0], ['East', 3, 2],
['North', 2, 1], ['West', 2, 3],
['South', 3, 4], ['West', 4, 1]]
fixed_path_10 = [['Start', 'Top left', 2], ['East', 6, 3], ['South', 1, 0],
['South', 1, 0], ['West', 6, 2], ['South', 4, 3]]
fixed_path_11 = [['Start', 'Top left', 2], ['South', 1, 0], ['East', 2, 4],
['South', 1, 1], ['East', 3, 4], ['West', 1, 3],
['South', 2, 0]]
# Some long paths
fixed_path_12 = [['Start', 'Top right', 2], ['South', 4, 0],
['South', 1, 1], ['North', 3, 4], ['West', 4, 0],
['West', 2, 0], ['South', 3, 4], ['East', 2, 3],
['East', 1, 1], ['North', 3, 2], ['South', 1, 3],
['North', 3, 2], ['West', 1, 2], ['South', 3, 4],
['East', 3, 0], ['South', 1, 1]]
fixed_path_13 = [['Start', 'Top left', 1], ['East', 5, 3], ['West', 4, 2],
['East', 1, 3], ['East', 2, 2], ['South', 5, 1],
['North', 2, 0], ['East', 2, 0], ['West', 1, 1],
['West', 5, 0], ['South', 1, 3], ['East', 3, 0],
['East', 1, 4], ['North', 3, 0], ['West', 1, 4],
['West', 3, 1], ['South', 4, 1], ['East', 5, 1],
['West', 4, 0]]
# "I've been everywhere, man!" - this path visits every square in
# the grid, with randomised choices of tokens
fixed_path_99 = [['Start', 'Top left', randint(0, 4)]] + \
[['East', 1, randint(0, 4)] for step in range(6)] + \
[['South', 1, randint(0, 4)]] + \
[['West', 1, randint(0, 4)] for step in range(6)] + \
[['South', 1, randint(0, 4)]] + \
[['East', 1, randint(0, 4)] for step in range(6)] + \
[['South', 1, randint(0, 4)]] + \
[['West', 1, randint(0, 4)] for step in range(6)] + \
[['South', 1, randint(0, 4)]] + \
[['East', 1, randint(0, 4)] for step in range(6)] + \
[['South', 1, randint(0, 4)]] + \
[['West', 1, randint(0, 4)] for step in range(6)] + \
[['South', 1, randint(0, 4)]] + \
[['East', 1, randint(0, 4)] for step in range(6)]
# If you want to create your own test data sets put them here
#
#--------------------------------------------------------------------#
#-----Function for Assessing Your Solution---------------------------#
#
# The function in this section will be used to assess your solution.
# Do not change any of the code in this section.
#
# The following function creates a random data set specifying a path
# to follow. Your program must work for any data set that can be
# returned by this function. The results returned by calling this
# function will be used as the argument to your follow_path function
# during marking. For convenience during code development and
# marking this function also prints the path to be followed to the
# shell window.
#
# Note: For brevity this function uses some Python features not taught
# in IFB104 (dictionaries and list generators). You do not need to
# understand this code to complete the assignment.
#
def random_path(print_path = True):
# Select one of the five starting points, with a random token
path = [['Start', choice(starting_points), randint(0, 4)]]
# Determine our location in grid coords (assuming num_squares is odd)
start_coords = {'Top left': [0, num_squares - 1],
'Bottom left': [0, 0],
'Top right': [num_squares - 1, num_squares - 1],
'Centre': [num_squares // 2, num_squares // 2],
'Bottom right': [num_squares - 1, 0]}
location = start_coords[path[0][1]]
# Keep track of squares visited
been_there = [location]
# Create a path up to 19 steps long (so at most there will be 20 tokens)
for step in range(randint(0, 19)):
# Find places to go in each possible direction, calculating both
# the new grid square and the instruction required to take
# us there
go_north = [[[location[0], new_square],
['North', new_square - location[1], token]]
for new_square in range(location[1] + 1, num_squares)
for token in [0, 1, 2, 3, 4]
if not ([location[0], new_square] in been_there)]
go_south = [[[location[0], new_square],
['South', location[1] - new_square, token]]
for new_square in range(0, location[1])
for token in [0, 1, 2, 3, 4]
if not ([location[0], new_square] in been_there)]
go_west = [[[new_square, location[1]],
['West', location[0] - new_square, token]]
for new_square in range(0, location[0])
for token in [0, 1, 2, 3, 4]
if not ([new_square, location[1]] in been_there)]
go_east = [[[new_square, location[1]],
['East', new_square - location[0], token]]
for new_square in range(location[0] + 1, num_squares)
for token in [0, 1, 2, 3, 4]
if not ([new_square, location[1]] in been_there)]
# Choose a free square to go to, if any exist
options = go_north + go_south + go_east + go_west
if options == []: # nowhere left to go, so stop!
break
target_coord, instruction = choice(options)
# Remember being there
been_there.append(target_coord)
location = target_coord
# Add the move to the list of instructions
path.append(instruction)
# To assist with debugging and marking, print the list of
# instructions to be followed to the shell window
print('Welcome to the Treasure Hunt!')
print('Here are the steps you must follow...')
for instruction in path:
print(instruction)
# Return the random path
return path
#
#--------------------------------------------------------------------#
#-----Student's Solution---------------------------------------------#
#
# Complete the assignment by replacing the dummy function below with
# your own "follow_path" function.
# FOLLOW THE PATH AS PER THE PROVIDED DATASET
##Provide a list of fillcolors to fill for different tokens
list_of_colours = ['black', 'white', 'red', 'navy blue', 'light blue']
##Provide a list of token names to be displayed in the legend, in correct order
list_of_token_names = ['SHIELD', 'Avengers', 'Captain America', 'Fantastic Four', 'Deadpool']
##Provide your list of tokens in the correct number order, for indexing
list_of_tokens = [0, 1, 2, 3, 4]
##Diameter of each token
diameter = 100
##Set up variables to store & count the number of instances your individual token functions has been called, and intialize them to 0
shield_logo_counter= 0
avengers_logo_counter= 0
captain_america_shield_counter = 0
fantastic_four_logo_counter = 0
deadpool_logo_counter= 0
##Create a function to draw Marvel's SHIELD logo. Set two parameters to store the x and y coordinates and do this for each token.
def draw_shield(x, y):
## Set the heading to default east to make sure the turtle walks in a circle centered in the grid square, and do this all for each token function.
setheading(0)
## Use a global variable to access the list of fillcolors to be used, that are outside of your token functions, including this one.
global list_of_colours
## Use a global variable to access the counter variable for this token that is outside of the function, to count the number of times the function has been
## called
global shield_logo_counter
## Move the pen up before moving to the intended circle's perimeter
penup()
goto(x+50, y-100)
## Put the pen down to start drawing
pendown()
## The fillcolor would be the first colour from the list
fillcolor(list_of_colours[0])
## Begin filling the circle with your chosen colour
begin_fill()
circle(50)
## Fill up the circle
end_fill()
## Draw the second circle
## Put your pen up each time you move from one coordinate to the next
penup()
## Move to one of your next coordinates required to help draw the shape of the token
goto(x+50, y-95)
pendown()
fillcolor(list_of_colours[1])
begin_fill()
circle(45)
end_fill()
penup()
## Move to the second set of coordinates
goto(x+75, y-85)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+58, y-61)
goto(x+50, y-69)
goto(x+42, y-61)
goto(x+25, y-85)
goto(x+75, y-85)
end_fill()
## Move to the third set of coordinates
penup()
goto(x+18, y-79)
begin_fill()
fillcolor(list_of_colours[0])
goto(x+35, y-58)
goto(x+29, y-51)
goto(x+12, y-69)
goto(x+18, y-79)
end_fill()
## Move to the fourth set of coordinates
penup()
goto(x+82, y-79)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+65, y-58)
goto(x+71, y-51)
goto(x+88, y-69)
goto(x+82, y-79)
end_fill()
##Move to the fifth set of coordinates
penup()
goto(x+11, y-67)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+29, y-48)
goto(x+21, y-39)
goto(x+7, y-53)
goto(x+11, y-67)
end_fill()
##Move to the sixth set of coordinates
penup()
goto(x+89, y-67)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+71, y-48)
goto(x+79, y-39)
goto(x+93, y-53)
goto(x+89, y-67)
end_fill()
##Move to the seventh set of coordinates
penup()
goto(x+20, y-38)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+12, y-29)
goto(x+7, y-45)
goto(x+20, y-38)
end_fill()
##Move to the eigth set of coordinates
penup()
goto(x+80, y-38)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+88, y-29)
goto(x+93, y-45)
goto(x+80, y-38)
end_fill()
##Move to your final set of coordinates for this token
penup()
goto(x+86, y-26)
pendown()
begin_fill()
fillcolor(list_of_colours[0])
goto(x+79, y-19)
goto(x+60, y-34)
goto(x+55, y-29)
goto(x+60, y-29)
goto(x+47, y-18)
goto(x+40, y-34)
goto(x+21, y-19)
goto(x+14, y-26)
goto(x+50, y-67)
goto(x+86, y-26)
end_fill()
##After finishing drawing each token, make sure you move the turtle back to its starting position in the grid square
penup()
goto(x, y-100)
##Each time this token function is called, add 1 to the counter variable initialized
shield_logo_counter = shield_logo_counter + 1
##Use a return statement to ensure the value counted could be accessed outside of this function after its called
return shield_logo_counter
##Create a function to draw Marvel's Avengers logo
def draw_avengers(x, y):
setheading(0)
global list_of_colours
global avengers_logo_counter
penup()
goto(x+50, y-92)
pendown()
fillcolor(list_of_colours[1])
begin_fill()
circle(43, extent = 315)
end_fill()
penup()
goto(x+50, y-86)
setheading(0)
pendown()
fillcolor(list_of_colours[0])
begin_fill()
circle(37)
end_fill()
penup()
goto(x+38, y-68)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
goto(x+27, y-92)
goto(x+13, y-92)
goto(x+54, y-13)
goto(x+66, y-13)
goto(x+65, y-56)
goto(x+56, y-49)
goto(x+55, y-27)
goto(x+44, y-57)
goto(x+55, y-57)
goto(x+56, y-53)
goto(x+66, y-63)
goto(x+55, y-72)
goto(x+55, y-68)
goto(x+38, y-68)
end_fill()
penup()
goto(x+54, y-77)
pendown()
fillcolor(list_of_colours[1])
begin_fill()
goto(x+66, y-69)
goto(x+66, y-77)
goto(x+54, y-77)
end_fill()
penup()
goto(x, y-100)
avengers_logo_counter= avengers_logo_counter + 1
return avengers_logo_counter
##Create a function to draw Captain America's logo
def draw_captain_america_shield(x,y):
setheading(0)
global list_of_colours
global captain_america_shield_counter
penup()
goto(x+50, y-100)
pendown()
fillcolor(list_of_colours[2])
begin_fill()
circle(50)
end_fill()
penup()
goto(x+50, y-91)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
circle(41)
end_fill()
penup()
goto(x+50, y-82)
fillcolor(list_of_colours[2])
begin_fill()
pendown()
circle(32)
end_fill()
penup()
goto(x+50, y-72)
fillcolor(list_of_colours[3])
begin_fill()
pendown()
circle(22)
end_fill()
penup()
goto(x+60, y-65)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
goto(x+58, y-52)
goto(x+68, y-43)
goto(x+55, y-43)
goto(x+50, y-30)
goto(x+44, y-43)
goto(x+31, y-43)
goto(x+41, y-52)
goto(x+38, y-65)
goto(x+50, y-58)
goto(x+60, y-65)
end_fill()
penup()
goto(x, y-100)
captain_america_shield_counter= captain_america_shield_counter + 1
return captain_america_shield_counter
##Create a function to draw Marvel's Fantastic Four logo
def draw_fantastic_four_logo(x,y):
setheading(0)
global list_of_colours
global fantastic_four_logo_counter
penup()
goto(x+50, y-100)
pendown()
fillcolor(list_of_colours[1])
begin_fill()
circle(50)
end_fill()
penup()
goto(x+50, y-94)
fillcolor(list_of_colours[4])
begin_fill()
pendown()
circle(44)
end_fill()
penup()
goto(x+50, y-90)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
circle(40)
end_fill()
penup()
goto(x+65, y-87)
fillcolor(list_of_colours[4])
begin_fill()
pendown()
goto(x+65, y-67)
goto(x+23, y-67)
goto(x+23, y-62)
goto(x+70, y-15)
goto(x+76, y-19)
goto(x+76, y-56)
goto(x+89, y-56)
goto(x+87, y-67)
goto(x+76, y-67)
goto(x+76, y-82)
goto(x+65, y-87)
end_fill()
penup()
goto(x+65, y-57)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
goto(x+65, y-30)
goto(x+38, y-56)
goto(x+65, y-57)
end_fill()
penup()
goto(x, y-100)
fantastic_four_logo_counter= fantastic_four_logo_counter + 1
return fantastic_four_logo_counter
##Create a function to draw Marvel's Deadpool logo
def draw_deadpool(x,y):
setheading(0)
global list_of_colours
global deadpool_logo_counter
penup()
goto(x+50, y-100)
pendown()
fillcolor(list_of_colours[2])
begin_fill()
circle(50)
end_fill()
penup()
goto(x+58, y-90)
fillcolor(list_of_colours[0])
begin_fill()
pendown()
circle(40, extent= 180)
goto(x+58, y-90)
end_fill()
penup()
goto(x+42, y-10)
setheading(180)
fillcolor(list_of_colours[0])
begin_fill()
pendown()
circle(40, extent= 180)
goto(x+42, y-10)
end_fill()
penup()
goto(x+36, y-55)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
goto(x+17, y-45)
goto(x+23, y-55)
goto(x+36, y-55)
end_fill()
penup()
goto(x+64, y-55)
fillcolor(list_of_colours[1])
begin_fill()
pendown()
goto(x+80, y-45)
goto(x+76, y-55)
goto(x+64, y-55)
end_fill()
penup()
goto(x, y-100)
deadpool_logo_counter= deadpool_logo_counter + 1
return deadpool_logo_counter
##Create a function to draw the treasure map path where the tokens would be found
def follow_path(path):
## Use index 0 to access the first sublist within the random path of instructions given, which in this case is the one that specifies the starting position
## on the treasure map
starting_position = path[0]
## Use conditional statements to tell the turtle which x and y coordinates to go to, depending on the random direction accessed from index 1,
## which is the second item in this particular sublist
if starting_position[1] == 'Top left':
goto(0, 600)
elif starting_position[1] == 'Top right':
goto(600, 600)
elif starting_position[1] == 'Bottom left':
goto(0, 100)
elif starting_position[1] == 'Bottom right':
goto(600, 0)
## If all the other options are not true in this instance of the random path, then start at the 'Centre' of the treasure map
else:
goto(300,300)
## Use conditonal statements to call your different token functions, depending on the token number returned (from the third item of the sublist)
if starting_position[2] == 0:
## When calling the token functions, take into account the current x and y coordinates the turtle is located. We need to add 100 to the current y
## coordinate because otherwise we wouldn't be able to draw our token starting from the top-left of the grid square the turtle is currently in
draw_shield(xcor(), ycor()+100)
elif starting_position[2] == 1:
draw_avengers(xcor(), ycor()+100)
elif starting_position[2] == 2:
draw_captain_america_shield(xcor(), ycor()+100)
elif starting_position[2] == 3:
draw_fantastic_four_logo(xcor(), ycor()+100)
else:
draw_deadpool(xcor(), ycor()+100)
## Use indicing to slice the starting path from the rest of the instructions, which mainly specifies the direction the turtle would move in.
path = path[1:]
## Use a for-each loop in this specified path to access the aforementioned instructions
for directions in path:
## Set the turtle's heading depending on the direction
if directions[0] == 'North':
setheading(90)
elif directions[0] == 'South':
setheading(270)
elif directions[0] == 'East':
setheading(0)
else:
setheading(180)
## Move the turtle by this number of squares, multipled by 100 pixels
if directions[1]:
forward(directions[1] * 100)
if directions[2] == 0:
draw_shield(xcor(), ycor() + 100)
elif directions[2] == 1:
draw_avengers(xcor(), ycor() + 100)
elif directions[2] == 2:
draw_captain_america_shield(xcor(), ycor() + 100)
elif directions[2] == 3:
draw_fantastic_four_logo(xcor(), ycor() + 100)
else:
draw_deadpool(xcor(), ycor() + 100)
## Set up variables that would add up the total number of tokens in the treasure map & create a list of the current values for each token variable
total_tokens_counter = shield_logo_counter + avengers_logo_counter + captain_america_shield_counter + fantastic_four_logo_counter + deadpool_logo_counter
total_tokens_counter
list_of_token_counter_values = [shield_logo_counter, avengers_logo_counter, captain_america_shield_counter, fantastic_four_logo_counter, deadpool_logo_counter]
list_of_token_counter_values
## Enclose the function to draw the legend that displays the themes of the tokens
def draw_legend():
global list_of_colours
penup()
goto(750, 0)
fillcolor(list_of_colours[4])
begin_fill()
## Draw the legend shape
forward(400)
setheading(90)
forward(675)
setheading(90)
forward(400)
setheading(90)
forward(675)
end_fill()
## Move to the position where you want to start drawing your tokens in the legend
goto(900,25)
## For each iteration called, draw the corresponding token number, and write them their names and display the total number of tokens and the number
## of instances each tokens appears as you go along
for index in range(5):
if list_of_tokens[index] == 0:
draw_shield(xcor(), ycor()+100)
elif list_of_tokens[index] == 1:
draw_avengers(xcor(), ycor()+100)
elif list_of_tokens[index] == 2:
draw_captain_america_shield(xcor(), ycor()+100)
elif list_of_tokens[index] == 3:
draw_fantastic_four_logo(xcor(), ycor()+100)
else:
draw_deadpool(xcor(), ycor()+100)
setheading(0)
forward(diameter + 10)
write(list_of_token_names[index] + ' ' + str(list_of_token_counter_values[index]))
setheading(180)
forward(diameter)
setheading(90)
forward(diameter + 25)
goto(950, 650)
write('Marvel Superheros' + ' ' + str(total_tokens_counter))
return draw_legend()
#--------------------------------------------------------------------#
#-----Main Program---------------------------------------------------#
#
# This main program sets up the background, ready for you to start
# drawing your solution. Do not change any of this code except
# as indicated by the comments marked '*****'.
#
# Set up the drawing canvas
create_drawing_canvas()
# Control the drawing speed
# ***** Modify the following argument if you want to adjust
# ***** the drawing speed
speed('fastest')
# Decide whether or not to show the drawing being done step-by-step
# ***** Set the following argument to False if you don't want to wait
# ***** forever while the cursor moves around the screen
tracer(True)
# Give the drawing canvas a title
# ***** Replace this title with a description of your solution's theme
# ***** and its tokens
title("Marvel Superheros token game")
### Call the student's function to follow the path
### ***** While developing your program you can call the follow_path
### ***** function with one of the "fixed" data sets, but your
### ***** final solution must work with "random_path()" as the
### ***** argument to the follow_path function. Your program must
### ***** work for any data set that can be returned by the
### ***** random_path function.
# follow_path(fixed_path_0) # <-- used for code development only, not marking
follow_path(random_path()) # <-- used for assessment
# Exit gracefully
# ***** Change the default argument to False if you want the
# ***** cursor (turtle) to remain visible at the end of the
# ***** program as a debugging aid
release_drawing_canvas()
#
#--------------------------------------------------------------------#
|
from typing import Tuple, List
import numpy, os, gzip
def glove_reader(file_path: str) -> Tuple[List[str], List[List[float]]]:
if os.path.isfile(file_path):
word = []
vector = []
if '.gz' != file_path[-3:]:
with open(file_path, 'r') as fp:
for w in fp:
w_list = w.strip().split(' ')
if len(w_list) <= 2: continue
word.append(w_list[0])
vector.append([float(f) for f in w_list[1:]])
return (word, vector)
else:
with gzip.open(file_path, 'rt') as fp:
for w in fp:
w_list = w.strip().split(' ')
if len(w_list) <= 2: continue
word.append(w_list[0])
vector.append([float(f) for f in w_list[1:]])
return (word, vector)
else: raise RuntimeError("Glove file (%s) does not exist.")
|
import numpy as np
# One dimensional array
arr1 = ['Anuj', 'Ashish', 'Ayush', 'Bhavya']
print("The array of 1 dimension is: ", np.array(arr1))
narr = np.array(arr1)
print("The array of 1 dimension is: ", narr)
# Two dimensional array
arr2 = np.array([['Anuj', 'Ashish', 'Ayush', 'Bhavya'],
['Adarsh', 'Amarjeet', 'Aniket', 'Baldev']])
print("The array of 2 dimension is: ", arr2)
print("The array of 2 dimension is: ", arr2[1][2])
print("The array of 2 dimension is: ", arr2[1, 3])
# Array example
arr = np.array(([1, 2, 3, 4, 5], [2, 3, 1, 5, 1], [9, 8, 7, 6, 5]))
print(arr)
a = np.array([1, 2, 3])
print(a)
print("\n")
b = np.array([[1, 3, 6], [2, 4, 8]])
print(b)
print(b.shape)
print(b.size)
print("\n")
c = np.array([1, 2, 3, 4, 5], dtype=int)
print(b)
print(c.itemsize)
print(c.dtype.name)
print("\n")
d = np.arange(15).reshape(3, 5)
print(d)
print(type(d))
|
#!/usr/bin/env python3
import cgi
import html
form = cgi.FieldStorage()
text_form = form.getfirst("ticket_number","none")
ticket_number = html.escape(text_form)
def lucky(num):
""" Checks if the ticket is lucky.
The ticket is lucky if the sum of the first three digits equals the last \
three digits.
"""
digits = [] #digits list
st = str(num)
try:
for i in range(len(st)):
digits.append(int(st[i]))
except:
print("<p>An error occurs! It's not an integer number!</p>")
if len(digits) != 6:
print("<p>Hey, you should enter a 6-digit number!</p>")
elif (sum(digits[0:3]) == sum(digits[3:6])):
print('<p>The number of your ticket is {}.</p>'.format(st))
print('<h4>Congratulations! Your ticket is lucky.</h4>')
else:
print('<h4>Sorry, the ticket number {} is not lucky.</h4>'.format(st))
print("Content-type: text/html\n")
print("""<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>Lucky ticket</title>
<style>
h1, h2, h3, h4, h5 {text-align:center;}
p {text-align:center;}
</style>
</head>
<body>""")
print("<br>")
lucky(ticket_number)
print("""<br><a href="../index.html">Go to main page</a>
</body>
</html>""")
|
# class AboutDict:
# def __init__(self):
# pass
# def var_to_dict(self,*args):
import sys
def ss(a,b):
len(sys.argv)
print(sys.getframe().f_code.co_name )
a ='s'
c ='dd'
ss(a,c)
|
#!/usr/bin/python
import cma
import numpy as np
def fobj1(x):
assert len(x)==1
return 3.0*(x[0]-1.2)**2+2.0
def frange(xmin,xmax,num_div):
return [xmin+(xmax-xmin)*x/float(num_div) for x in range(num_div+1)]
#fobj= cma.fcts.rosen
fobj= fobj1
using_bounds= False
if not using_bounds:
#options = {'CMA_diagonal':100, 'verb_time':0}
#options = {'CMA_diagonal':1, 'verb_time':0}
#options = {'verb_time':0}
options={}
#options['popsize']= 4
#res = cma.fmin(fobj, [0.1], 0.5, options)
es = cma.CMAEvolutionStrategy([0.1], 0.5, options)
else:
#options = {'CMA_diagonal':1, 'verb_time':0, 'bounds':[[-1.0],[0.0]]}
#options = {'CMA_diagonal':1, 'verb_time':0, 'bounds':[[-1.0],[]]}
#options = {'CMA_diagonal':1, 'verb_time':0, 'bounds':[[-1.0],None]}
options = {'CMA_diagonal':1, 'verb_time':0, 'bounds':[[],[0.0]]}
es = cma.CMAEvolutionStrategy([-0.1], 0.5, options)
#solutions= es.ask()
#solutions= [np.array([ 1.29323333]), np.array([ 1.33494294]), np.array([ 1.2478004]), np.array([ 1.34619473])]
#scores= [fobj(x) for x in solutions]
#es.tell(solutions,scores)
print 'es.result():',es.result()
count= 0
while not es.stop():
solutions, scores = [], []
#while len(solutions) < es.popsize+3: #This is OK
while len(solutions) < es.popsize:
#curr_fit = None
#while curr_fit in (None, np.NaN):
x = es.ask(1)[0]
#curr_fit = cma.fcts.somenan(x, cma.fcts.elli) # might return np.NaN
solutions.append(x)
scores.append(fobj(x))
es.tell(solutions, scores)
es.disp()
#print 'es.result():',es.result()
#print solutions
fp= file('data/res%04i.dat'%(count),'w')
count+=1
for x in solutions:
fp.write('%f %f\n' % (x[0],fobj(x)))
fp.close()
res= es.result()
print('best solutions fitness = %f' % (res[1]))
print res
fp= file('outcmaes_obj.dat','w')
for x1 in frange(-2.0,2.0,100):
x= np.array([x1])
fp.write('%f %f\n' % (x[0],fobj(x)))
fp.close()
fp= file('outcmaes_res.dat','w')
#for x in res[0]:
x= res[0]
fp.write('%f %f\n' % (x[0],fobj(x)))
fp.close()
cma.plot();
print 'press a key to exit > ',
raw_input()
#cma.savefig('outcmaesgraph')
|
class Triangulo:
def __init__(self, a,b,c):
self.a = a
self.b = b
self.c = c
def perimetro(self):
perimetro = self.a + self.b + self.c
return perimetro
def tipo_lado(self):
if self.a != self.b and self.b != self.c and self.a != self.c:
return "escaleno"
elif self.a == self.b or self.b == self.c or self.a == self.c:
if self.a == self.b and self.b == self.c and self.a == self.c:
return "equilátero"
return "isósceles"
def retangulo(self):
catetos = (self.a ** 2) + (self.b ** 2)
hipotenusa = self.c ** 2
if catetos == hipotenusa:
return True
else:
return False
def semelhantes(self, t):
ka = self.a / t.a
kb = self.b / t.b
kc = self.c / t.c
if ka == kb and kb == kc and ka == kc:
return True
else:
return False
'''t1 = Triangulo(2, 2, 2)
t2 = Triangulo(2, 2, 2)
print(t1.a)
print(t2.a)
print(t1.semelhantes(t2))''' |
from game.items.item import Tool
from game.skills import SkillTypes
class TinderBox(Tool):
name = 'Tinderbox'
value = 1
skill_requirement = {SkillTypes.firemaking: 1}
weight = 0.035 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from random import random
names = ["small", "medium", "large", "xlarge"]
Ns = [12, 16, 20, 24]
cc = 20
for no, n, name in zip(range(len(Ns)), Ns, names):
fp = open("input%.2d%s.txt" % (n, name), "w")
fp.write("%d\n" % cc)
for case in xrange(cc):
fp.write("%d\n" % n);
for i in xrange(n):
fp.write("%.10lf %.10lf\n" % (random() * 1000, random() * 1000))
fp.close()
|
from django.shortcuts import render
from folder_tree.models import FolderTree
# Create your views here.
def index(request):
data = {'FolderTree': FolderTree.objects.all()}
return render(request, 'index.html',data) |
from torch import nn
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from transformers import BertModel
class RNNSequenceModel(nn.Module):
def __init__(self, model_params):
super(RNNSequenceModel, self).__init__()
self.hidden_size = model_params['hidden_size']
self.embed_dim = model_params['embed_dim']
self.dropout_ratio = model_params.get('dropout_ratio', 0)
self.gru = nn.GRU(
input_size=self.embed_dim,
hidden_size=self.hidden_size,
num_layers=1,
batch_first=True,
bidirectional=True
)
self.linear = nn.Linear(2 * self.hidden_size, self.hidden_size // 4)
self.dropout = nn.Dropout(p=self.dropout_ratio)
self.tanh = nn.Tanh()
for name, param in self.named_parameters():
if 'embedding' in name:
continue
elif 'bias' in name:
nn.init.constant_(param, 0.0)
elif 'weight' in name and 'hh' not in name:
nn.init.xavier_uniform_(param)
elif 'weight' in name and 'hh' in name:
nn.init.orthogonal_(param)
def forward(self, input, input_len):
self.gru.flatten_parameters()
packed = pack_padded_sequence(input, input_len, batch_first=True, enforce_sorted=False)
hidden, _ = self.gru(packed)
hidden, _ = pad_packed_sequence(hidden, batch_first=True)
hidden = self.tanh(hidden)
d = self.tanh(self.linear(hidden))
dropout = self.dropout(d)
return dropout
class MLPModel(nn.Module):
def __init__(self, model_params):
super(MLPModel, self).__init__()
self.embed_dim = model_params['embed_dim']
self.hidden_size = model_params['hidden_size']
self.dropout_ratio = model_params.get('dropout_ratio', 0)
self.linear = nn.Sequential(nn.Linear(self.embed_dim, self.hidden_size),
nn.ReLU(),
nn.Dropout(p=self.dropout_ratio))
def forward(self, input, *args):
out = self.linear(input)
return out
class BERTSequenceModel(nn.Module):
def __init__(self, model_params):
super(BERTSequenceModel, self).__init__()
self.embed_dim = model_params['embed_dim']
self.hidden_size = model_params['hidden_size']
self.dropout_ratio = model_params.get('dropout_ratio', 0)
self.n_tunable_layers = model_params.get('fine_tune_layers', None)
self.bert = BertModel.from_pretrained('bert-base-cased')
self.linear = nn.Sequential(nn.Linear(self.embed_dim, self.hidden_size),
nn.ReLU(),
nn.Dropout(p=self.dropout_ratio))
self.bert.pooler.dense.weight.requires_grad = False
self.bert.pooler.dense.bias.requires_grad = False
if self.n_tunable_layers is not None:
tunable_layers = {str(l) for l in range(12 - self.n_tunable_layers, 12)}
for name, param in self.bert.named_parameters():
if not set.intersection(set(name.split('.')), tunable_layers):
param.requires_grad = False
def forward(self, input, input_len):
attention_mask = (input.detach() != 0).float()
output, _ = self.bert(input, attention_mask=attention_mask)
output = output[:, 1:-1, :] # Ignore the output of the CLS and SEP tokens
output = self.linear(output)
return output
|
def separateDigits(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
lst = []
sum = 0
# iterate through the provided list
for idx in nums:
# if the value is > 10 (aka more than one digit)
if idx >= 10:
vals = []
# break up the number and add to a new list
while idx > 0:
# mod gets the value to add
digit = int(idx % 10)
vals.append(digit)
# divide pops off the remainder
idx = idx / 10
# reverse list and add to return list
for integer in vals[::-1]:
lst.append(integer)
else:
# value is 1 digit, < 10, add to return list
lst.append(idx)
return lst |
#/bin/python
#
#
#script takes recipe in *_* format and desired number of iterations
import subprocess, func
import subprocess, sys, logging, optparse
logging.basicConfig(filename='./out/log.out',level=logging.DEBUG)
parser = optparse.OptionParser('usage: python run_drift.py [fs-drift options]')
parser.add_option('-R', '--recipe', dest='recipe', default='', type='string', help='specify recipe to use')
parser.add_option('-D', '--device', dest='device', default='', type='string', help='specify device on which drift runs')
parser.add_option('-M', '--mountpoint', dest='mountpoint', default='', type='string', help='specify mountpoint on which drift runs')
(options, args) = parser.parse_args()
recipe = ' -'.join(' '.join(options.recipe.split('_')).split('-'))
subprocess.call("echo -n "+recipe+" >> ./out/recipe",shell=True)
logging.info('initializing')
process = subprocess.Popen(("sync").split(), stdout=subprocess.PIPE)
out, err = process.communicate()
logging.info(out)
process = subprocess.Popen(("echo 3 > /proc/sys/vm/drop_caches").split(),stdout=subprocess.PIPE)
out, err = process.communicate()
logging.info('clean_cache')
logging.info(out)
drift = 'python fs-drift/fs-drift.py ' + recipe + ' >> ./out/fs_drift.out'
logger = func.log_free_space(options.device)
logging.info(drift)
print drift
subprocess.call(drift,shell=True)
logger.signal = False
#log extents histogram
data = func.get_data(options.mountpoint)
extents = open('./out/extents.log','w')
for i in range(0,len(data)):
extents.write(str(func.count_extents(data[i]))+'\n')
extents.close()
logging.info('run_drift over')
|
class Create_file(object):
def __init__(self, url, file_name):
self.url = url
self.file_name = file_name
def open_file(self):
with open(self.file_name, 'a') as file: #opens the ammendable('a') file with name "file_name"
file.write("\nfor " + self.url + " :\n" + "\n" + self.file_name + ":\n") #writes 1st two lines in the file describing about the link
|
import os
import celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings')
from django.conf import settings # noqa
app = celery.Celery('project')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks([
'share',
'share.janitor',
])
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 24 11:23:48 2019
@author: nanokoper
"""
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
from sklearn.ensemble import RandomForestRegressor
import xgboost as xg
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import mean_absolute_error
import patsy
path = '/home/nanokoper/Pulpit/ISA/jdsz2-wenus/projekt_ML/dane_slimak.csv'
df = pd.read_csv(path, encoding = 'utf-8', delim_whitespace=True)
df['Sex'].replace({'I': 2, 'F': 1, 'M': 0},inplace = True)
#Macierz korelacji - wstepna propozycja to niepowtarzanie feature engineering Length i Diameter, bo są mocno skorelowane i mają podobną korelację
sns.heatmap(df.corr(), cmap = 'seismic', annot=True, fmt=".2f")
plt.show()
X = df.drop(['Rings'], axis = 1)
y = df['Rings']
#wykresy dla parametrow w modelach
#funkcja kosztu
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state = 100)
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, test_size=0.2, random_state = 100)
"""normalizacja z outleiersami
norm_scale = preprocessing.StandardScaler().fit(df[['Sex', 'Height']])
df_norm = norm_scale.transform(df[['Sex', 'Height']])plt.figure(figsize=(10,10))
plt.scatter(df_norm[:,0], df_norm[:,1], color='blue', alpha=0.3)"""
#Linear regression
clf_linear = LinearRegression()
clf_linear.fit(X_train, y_train)
accuracy_linear = clf_linear.score(X_test, y_test)
y_pred_linear = clf_linear.predict(X_test)
mae_linear = mean_absolute_error(y_test, y_pred_linear)
#SVR
clf_SVR = SVR(gamma='scale', C=1.0, epsilon=0.2)
clf_SVR.fit(X_train, y_train)
accuracy_SVR = clf_SVR.score(X_test, y_test)
y_pred_SVR = clf_SVR.predict(X_test)
mae_SVR = mean_absolute_error(y_test, y_pred_SVR)
#randomforestregressor
clf_RFC = RandomForestRegressor()
clf_RFC.fit(X_train, y_train)
accuracy_RFC = clf_RFC.score(X_test, y_test)
y_pred_RFC = clf_RFC.predict(X_test)
mae_RFC = mean_absolute_error(y_test, y_pred_RFC)
#xgboostregressor
clf_XGB = xg.XGBRegressor()
clf_XGB.fit(X_train, y_train)
accuracy_XGB = clf_XGB.score(X_test, y_test)
y_pred_XGB = clf_XGB.predict(X_test)
mae_XGB = mean_absolute_error(y_test, y_pred_XGB)
#DecisionTreeRegressor
clf_DTR = DecisionTreeRegressor()
clf_DTR.fit(X_train, y_train)
accuracy_DTR = clf_DTR.score(X_test, y_test)
y_pred_DTR = clf_DTR.predict(X_test)
mae_DTR = mean_absolute_error(y_test, y_pred_DTR)
print('Accuracy linear:', accuracy_linear)
print('Accuracy SVR:', accuracy_SVR)
print('Accuracy RFC:', accuracy_RFC)
print('Accuracy XGB:', accuracy_XGB)
print('Accuracy DTR:', accuracy_DTR)
print('MAE Linear regression:', mae_linear)
print('MAE SVR:', mae_SVR)
print('MAE RFC:', mae_RFC)
print('MAE XBG:', mae_XGB)
print('MAE DTR:', mae_DTR)
""" To do: patsy, crossvalidacja, (model.summary(), OLS, coefficient matrix i inne takie, MSE) - inne funkcje kosztu i sprawdzenie modeli,
dopasowanie parametrów modeli. Dostosowanie kodu do kaggle - nie tykamy testowych danych"""
|
class PartySizePicker:
select_toggle_selector = 'div.select-toggle[aria-owns="partySize-dropdown-list"]' |
from flask import *
from flask_menu import register_menu
from wtforms import *
auth = Blueprint('auth', __name__, url_prefix='/auth', template_folder='auth_templates')
class FormLogin(Form):
"""Clase para soporte de credenciales de usuarios"""
# empresa = StringField('Empresa', validators=[validators.optional(), validators.email("Formato usuario@empresa")],
# render_kw=dict(placeholder="Empresa"))
usuario = StringField('usuario', validators=[validators.length(min=1)],
render_kw=dict(placeholder='Usuario', autocomplete="off"))
password = PasswordField('Password', validators=[validators.length(min=1)], render_kw=dict(placeholder="Password"))
class FormLoginUpdate(Form):
usuario = StringField('Usuario', validators=[validators.length(min=1)], render_kw=dict(placeholder='Usuario'))
password = PasswordField('Password', validators=[validators.length(min=1)], render_kw=dict(placeholder="Password"))
repetir_password = PasswordField('Repetir password', validators=[validators.equal_to(password)],
render_kw=dict(placeholder="Repetir password"))
@auth.route('/')
def home():
"""Redireccionar a login"""
return redirect(url_for('.login'))
def md5(data):
"""Auxiliar para convertir texto en MD5"""
if data:
import hashlib
return hashlib.md5(data.encode()).hexdigest().upper()
else:
return None
@register_menu(auth, "auth.login", "Login")
@auth.route('/login', methods=['GET', 'POST'])
def login():
"""Credenciales del sistemas"""
if request.method == 'GET':
return render_template('auth.login.html', form=FormLogin())
elif request.method == 'POST':
respuesta = FormLogin(request.form)
if respuesta.validate():
cuenta = g.data.select_one(
"select * from usuarios where usuario = :usuario and password = :password",
usuario=respuesta.usuario.data,
password=md5(respuesta.password.data))
if cuenta is None:
flash('Credenciales invalidas')
return redirect(url_for('.login'))
else:
session.update(respuesta.data)
return redirect(url_for('lobby.home'))
else:
flash('Credenciales invalidas')
return redirect(url_for('.login'))
@register_menu(auth, "auth.logout", "Cerrar sesion")
@auth.route('/logout')
def logout():
"""Controlador de salida de usuarios del sistema"""
session.clear()
flash('Ha finalizado la session correctamente')
return redirect(url_for('.login'))
# @auth.route('/profile')
# def profile():
# if request.method == 'GET':
# print(session)
# dato = g.data.select_one('select * from usuarios where usuario = :usuario',
# usuario=session.get('usuario'))
# form = FormLoginUpdate(data=dict(dato))
#
# print(dict(dato))
# return render_template('auth.profile.html', user=form)
|
"""MetFilab URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from MetFilabApp.views import home, users, currency
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
# Home
url(r'^$', home.home_page),
url(r'^dashboard', home.dash_board),
# Account
url(r'^signin', users.signin),
url(r'^signout', users.signout),
url(r'^signup', users.signup),
# Currency
url(r'^currency/search_json', currency.search_json),
url(r'^currency/search', currency.search),
]
|
#coding:utf-8
import bobo, webob
from controller import Controller
from service.top.display import TopDisplayService
from view.view import View
@bobo.subroute('', scan=True)
class TopController(Controller):
def __init__(self, request):
self.request = request
@bobo.query('')
@bobo.query('/')
def base(self):
service = TopDisplayService()
result = service.execute()
view = View("TOP", **result)
return view.render()
@bobo.query('/add')
def get_add(self):
return "(^^)"
@bobo.query('/error')
def login(self):
params = {
"user_name":"user1",
}
view = View("ERROR", **params)
return view.render()
|
#%%
from models import CNNBiLSTMATTN, CNNs, LSTMs, CNNLSTM
from models import root_mean_squared_error, weighted_root_mean_squared_error, last_time_step_rmse
from utils import WindowGenerator
from utils import draw_plot, draw_plot_all, save_results
import tensorflow as tf
from sklearn.preprocessing import StandardScaler
import numpy as np
import math
import plotly.express as px
import plotly.graph_objects as go
###################################################################
# Config
###################################################################
class Config():
def __init__(
self,
input_width = 14,
label_width = 7,
shift = 7,
label_columns = ["Maximum_Power_This_Year"],
batch_size = 32,
features = ["meteo", "covid", "gas", "exchange"],#, "gas", "exchange"], #"exchange"], #"gas", ],
filters = 64,
kernel_size = 3,
activation = 'relu',
lstm_units = 100,
attn_units = 100,
learning_rate = 0.001,
epochs = 1000,
verbose = 0,
aux1 = False,
aux2 = False,
is_x_aux1 = False,
is_x_aux2 = False,
trial = "CNN_LSTM"
):
self.input_width = input_width
self.label_width = label_width
self.shift = shift
self.label_columns = label_columns
self.batch_size = batch_size
self.features = features
self.filters = filters
self.kernel_size = kernel_size
self.activation = activation
self.lstm_units = lstm_units
self.attn_units = attn_units
self.learning_rate = learning_rate
self.epochs = epochs
self.verbose = verbose
self.aux1 = aux1
self.aux2 = aux2
self.is_x_aux1 = is_x_aux1
self.is_x_aux2 = is_x_aux2
self.trial = trial
config = Config()
callback = tf.keras.callbacks.EarlyStopping(monitor='val_last_time_step_rmse', patience=30)
tf.keras.backend.set_floatx('float64')
###################################################################
# LSTM LSTM
###################################################################
Dataset = WindowGenerator(
input_width = config.input_width,
label_width = config.label_width,
shift = config.shift,
label_columns = config.label_columns,
batch_size = config.batch_size,
features = config.features
)
X_train, y_train = Dataset.train
model_mcge = CNNLSTM(config)
# model_mcge = LSTMs(config)
# model_mcge = CNNLSTM(config)
tf.keras.backend.clear_session()
np.random.seed(1)
tf.random.set_seed(1)
#%%
model_mcge.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss=root_mean_squared_error,
metrics=[last_time_step_rmse])
model_mcge.fit(X_train, y_train, epochs=config.epochs,
verbose=config.verbose,
validation_split=0.2,
callbacks=[callback]
)
#%%
X_test, y_test = Dataset.test
evalutation_m = model_mcge.evaluate(X_test, y_test)
print("Evaluation",evalutation_m)
#%%
y_pred1 = model_mcge.predict(X_train)
y_pred1 = Dataset.inverse_transform(y_pred1)
# y_pred2 = model_mcge.predict(Dataset.val)
# y_pred2 = Dataset.inverse_transform(y_pred2)
y_pred3 = model_mcge.predict(X_test)
y_pred3 = Dataset.inverse_transform(y_pred3)
draw_plot_all(config, Dataset, y_pred1=y_pred1, y_pred3=y_pred3)
#%%
y_true = Dataset.inverse_transform(y_test)
y_pred = Dataset.inverse_transform(model_mcge(X_test))
root_mean_squared_error(Dataset.inverse_transform(
model_mcge.predict(X_test)),
Dataset.inverse_transform(Dataset.test[1].reshape((-1,7))))/(y_pred3.shape[0]*7)
#%%
def mean_absolute_percentage_error(y_true, y_pred):
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
mean_absolute_percentage_error(Dataset.inverse_transform(
model_mcge.predict(X_test)),
Dataset.inverse_transform(Dataset.test[1].reshape((-1,7))))/(y_pred3.shape[0]*7)
#%%
save_results(config, y_pred)
# %%
|
def do_add_activities(uid, uactivities, boto):
status=200
ujson = {'type': "person", 'id': uid, 'added': uactivities}
try:
if((str(uid)!="")):
item = boto.get_item(id=uid)
added_list = [] + uactivities.split(",")
acti_list = item['activities']
for i in added_list:
acti_list.append(i)
ujson['type'] = item['type']
ujson['id'] = item['id']
ujson['added'] = uactivities.split(",")
try:
boto.put_item(data={
'id': uid,
'type': item['type'],
'name': item['name'],
'activities': acti_list,
}, overwrite=True)
except:
print "Error in put_item"
status = 200
except:
errors = {}
errors['error'] = 'Item not found.'
errors['id'] = uid
ujson = errors
status = 404
add_activities_response = {'status': status, 'json': ujson}
return add_activities_response
|
import datetime
import discord
import random
import sys
import os
token = sys.argv[1]
class CoronaBot(discord.Client):
def __init__(self):
super().__init__()
self.permission_denied_warned_servers = []
self.corona_emoji = "<:corona:684132221077946401>"
self.protected_message_contents = ["(This message is not infectious)"]
self.max_infection_time = 600
self.infectious_message_delay = 3600 # This means by one hour there is a 50/50 chance of an infectious message
self.log_file = "./log.txt"
self.bind_events()
def bind_events(self):
@self.event
async def on_ready():
await self.event_on_ready()
@self.event
async def on_message(ctx):
await self.event_on_message(ctx)
async def event_on_ready(self):
self.log(f"Bot started as {self.user.name}")
async def event_on_message(self, msg):
self.log_message(msg)
if msg.guild.id == 461648348622094347:
if msg.channel.id != 461648348622094349:
self.log("Skipping, in MC BOYZ and not general")
return
corona_role = discord.utils.get(msg.guild.roles, name="Corona infected")
if corona_role is None:
self.log("No corona role detected in server, attempting to create one")
try:
corona_role = await msg.guild.create_role(name="Corona infected", color=discord.Color(0xff0000))
await msg.guild.get_member(self.user.id).add_roles(corona_role)
except PermissionError:
self.log("Permission error, need Manage roles permission")
if msg.guild.id not in self.permission_denied_warned_servers:
self.permission_denied_warned_servers.append(msg.guild.id)
await msg.channel.send("I do not have permission to manage roles")
else:
self.log("Suppressing warning since server has already been warned before")
else:
await msg.channel.send("Corona has been detected in this server")
if corona_role is not None:
latest_messages = []
async for earlier_message in msg.channel.history(limit=3):
latest_messages.append(earlier_message)
preceding_message = latest_messages[1]
preceding_author = preceding_message.author
preceding_author_is_me = self.user.id == preceding_author.id
author_has_corona = msg.author in corona_role.members
seconds_passed_since_preceding_message = msg.created_at.timestamp() - preceding_message.created_at.timestamp()
if not author_has_corona:
self.log(" Earlier messages:")
for earlier_message in latest_messages:
self.log_message(earlier_message, prefix=" > ")
self.log(f" Last message:")
self.log_message(preceding_message, prefix=" > ")
if preceding_author_is_me and not self.is_infectious(preceding_message.content):
self.log("Last message is by me and is not infectious")
else:
preceding_author_has_corona = preceding_author in corona_role.members
if preceding_author_has_corona:
self.log("Preceding author has corona")
self.log(f"{round(seconds_passed_since_preceding_message, 0)} seconds have passed since preceding message")
if seconds_passed_since_preceding_message < self.max_infection_time:
self.log("USER GOT INFECTED!")
await msg.author.add_roles(corona_role)
await msg.channel.send(f"{self.corona_emoji * 3} **Uh oh! It looks like you have been infected :flushed:** (This message is not infectious) {self.corona_emoji * 3}")
else:
self.log("Infection has died in this message")
if not msg.author.id == self.user.id:
n = self.infectious_message_delay // seconds_passed_since_preceding_message
num = random.randint(0, n)
self.log(f"Chance of random message: 1/{int(n)} number generated: {num}")
if num == 0:
await msg.channel.send(":wave:")
try:
if str(self.user.id) in msg.content:
await msg.channel.send("Bruh")
except UnicodeDecodeError:
pass
def log(self, message, end="\n"):
start = datetime.datetime.now().strftime("<%d/%m/%Y %H:%M:%S> ")
print(message)
if self.log_file:
with open(self.log_file, "a") as fa:
for char in start + message + end:
try:
fa.write(char)
except UnicodeDecodeError:
pass
def is_infectious(self, message_content):
for protected_message_content in self.protected_message_contents:
if protected_message_content in message_content:
return False
return True
def log_message(self, msg, prefix=""):
if msg.guild:
if msg.author.display_name != msg.author.name:
self.log(f"{prefix}[{msg.guild.name}/{msg.channel.name}] {msg.author.name}#{msg.author.discriminator} ('{msg.author.display_name}'): {msg.content}")
else:
self.log(f"{prefix}[{msg.guild.name}/{msg.channel.name}] {msg.author.name}#{msg.author.discriminator}: {msg.content}")
else:
self.log(f"{prefix}[DM/{msg.channel.recipient}] {msg.author.name}#{msg.author.discriminator}: {msg.content}")
client = CoronaBot()
client.run(token) |
import ssl
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
DADOS_EMAIL = {
"port": 587,
"smtp_server": "smtp.gmail.com",
"sender_email": "thiagomandouemail@gmail.com",
"receiver_email": "diego.capassi.moreira@gmail.com",
"password": 'H0m3w0rk@2020'
}
# Função para enviar emails
def sendEmail(dadosEmail, nomeBanco, nomeUsuario):
msg = MIMEMultipart()
msg['From'] = dadosEmail['sender_email']
msg['To'] = dadosEmail['receiver_email']
msg['Subject'] = 'Verificacao Db'
body = nomeUsuario+', favor confirmar se o banco de dados: "' + \
nomeBanco + '", devera permanecer com a classificacao: "HIGH"'
message = MIMEText(body, 'plain')
msg.attach(message)
context = ssl.create_default_context()
with smtplib.SMTP(dadosEmail['smtp_server'], dadosEmail['port']) as server:
server.starttls(context=context)
server.login(dadosEmail['sender_email'], dadosEmail['password'])
server.sendmail(
dadosEmail['sender_email'], dadosEmail['receiver_email'], msg.as_string())
print(msg.as_string())
sendEmail(DADOS_EMAIL, "Usuarios", "Sr. Jorge")
|
def anagrams(words):
anagrams = {}
for i in words:
if str(sorted(i)) in anagrams:
anagrams[str(sorted(i))].append(i)
else:
anagrams[str(sorted(i))] = [i]
anagrams_list = list(anagrams.values())
return [x for x in anagrams_list if len(x) >1] #remove lists with one item
print(anagrams(["cab", "bac", "abc", "bad", "dab", "fab"]))
# [["cab", "bac", "abc"], ["bad", "dab"]] |
# Native Modules
# Downloaded Modules
# Custom modules
from .dbconnection import DBConnection
# Constants
class DBIp():
def __init__(self, p_dict):
self.id = p_dict["id"]
self.link = p_dict["link"]
self.asn = p_dict["asn"]
self.asowner = p_dict["asowner"]
self.network = p_dict["network"]
self.continent = p_dict["continent"]
self.country = p_dict["country"]
self.scanned = p_dict["scanned"]
@staticmethod
def create():
_connection = DBConnection()
try:
command = "CREATE TABLE ips ("
command += "id SERIAL PRIMARY KEY,"
command += "link INTEGER(25) NOT NULL,"
command += "asn INTEGER(8),"
command += "asowner VARCHAR(150),"
command += "network VARCHAR(20),"
command += "continent VARCHAR(2),"
command += "country VARCHAR(2),"
command += "scanned BOOLEAN);"
_connection.cursor.execute(command)
command = "CREATE TABLE messageip ("
command += "message_id INTEGER REFERENCES messages(id),"
command += "ip_id INTEGER REFERENCES ips(id));"
_connection.cursor.execute(command)
except:
return False
else:
_connection.commit()
return True
finally:
_connection.close()
@staticmethod
def listall():
_connection = DBConnection()
try:
command = "SELECT * FROM ips;"
_connection.cursor.execute(command)
names = [
"id", "link", "asn", "asowner", "network",
"continent", "country", "scanned"
]
results = []
for item in _connection.cursor.fetchall():
results.append(DBIp(dict(zip(names,item))))
except:
return None
else:
_connection.commit()
return results
finally:
_connection.close()
@staticmethod
def select(p_ip_id):
_connection = DBConnection()
try:
command = "SELECT * FROM ips "
command += "WHERE ips.id = "+p_ip_id+";"
_connection.cursor.execute(command)
names = [
"id", "link", "asn", "asowner", "network",
"continent", "country", "scanned"
]
results = []
for item in _connection.cursor.fetchall():
results.append(DBIp(dict(zip(names,item))))
except:
return None
else:
_connection.commit()
return results
finally:
_connection.close()
@staticmethod
def insert(p_ip):
_connection = DBConnection()
try:
command = "INSERT INTO ips (link, asn, asowner, network, "
command += "continent, country, scanned)"
command += "VALUES ("+p_ip.link+", "
command += p_ip.asn+", "
command += p_ip.asowner+", "
command += p_ip.network+", "
command += p_ip.continent+", "
command += p_ip.country+", "
command += p_ip.scanned+", "
command += p_ip.malicious+");"
_connection.cursor.execute(command)
except:
return False
else:
_connection.commit()
return True
finally:
_connection.close()
@staticmethod
def insertrealation(p_message_id, p_ip_id):
_connection = DBConnection()
try:
command = "INSERT INTO msgip (message_id, ip_id)"
command += "VALUES ("+p_message_id+", "+p_ip_id+");"
_connection.cursor.execute(command)
except:
return False
else:
_connection.commit()
return True
finally:
_connection.close()
@staticmethod
def alter(p_ip_id, p_ismalicious):
_connection = DBConnection()
try:
# set 'isscanned' to true and 'ismalicious' based on ip details
command = ""
_connection.cursor.execute(command)
except:
return False
else:
_connection.commit()
return True
finally:
_connection.close()
@staticmethod
def delete(ip_id):
_connection = DBConnection()
try:
command = "DELETE FROM msgip "
command += "WHERE msgip.ip_id = "+ip_id+";"
_connection.cursor.execute(command)
command = "DELETE FROM ips "
command += "WHERE ips.id = "+ip_id+";"
_connection.cursor.execute(command)
except:
return False
else:
_connection.commit()
return True
finally:
_connection.close() |
import time,datetime,json,uuid,requests
from sqlalchemy import and_,extract
from django.shortcuts import render,HttpResponse
from django.http import JsonResponse
from django.core import serializers
import BJTU_RBAC.models as models
from BJTU_RBAC.orm import sqlConn
localtime = time.localtime(time.time())
'''
获取用户列表(分页)条件查询
'''
def getUserList(request):
curPage = int(request.GET.get("curPage")) # 当前页
pageSize = int(request.GET.get("pageSize")) # 每页条数
roleId=request.GET.get('roleId')
pId=request.GET.get('pId')
User = models.User
UserRole = models.UserRole
RoleP = models.RolePermission
userList=sqlConn.Session_class().query(User)
if request.GET.get('acct'):
userList=userList.filter(User.acct==request.GET.get('acct'))
if request.GET.get('name'):
userList=userList.filter(User.name==request.GET.get('name'))
if roleId:
userList = userList.join(UserRole, User.acct == UserRole.accId).filter(
UserRole.roleId == roleId)
if roleId!=''and roleId!=None and pId!=''and pId!=None:
userList = userList.join(RoleP, UserRole.roleId == RoleP.roleId).filter(
RoleP.pId == pId)
elif (roleId==''or roleId==None) and (pId!='' and pId!=None):
userList = userList.join(UserRole, User.acct == UserRole.accId)\
.join(RoleP, UserRole.roleId == RoleP.roleId).filter(
RoleP.pId == pId)
userList=userList.limit(pageSize).offset((curPage - 1) * pageSize)
totalSize=userList.all().__len__()
userjoin = ','.join([str(x) for x in userList])
totalPageSize = int((totalSize + pageSize - 1) / pageSize) # 总页数
jsonstr = {'data': '['+userjoin+']', 'totalSize': totalSize, 'curPage': curPage, 'totalPageSize': totalPageSize}
return HttpResponse(json.dumps(jsonstr))
'''
删除用户
'''
def deleteUserByid(request):
id=request.GET.get('id')
sqlConn.delete(models.User,id)
return HttpResponse(json.dumps({'status':'OK','msg':'删除成功~'}))
'''
保存用户
'''
def saveUser(request):
acct=request.POST.get('acct')
name = request.POST.get('name')
sts = request.POST.get('sts')
user=None
if acct!=None:
user=sqlConn.getObjBywhere(models.User,{'acct':acct}).scalar()
if user==None:
user=models.User(id=uuid.uuid1().__str__().replace('-',''),
name=name,
sts=sts,
acct=acct,
pwd= '123456',
ctime=localtime)
sqlConn.save(user)
return HttpResponse(json.dumps({'status':'OK','msg':'保存成功'}))
else:
return HttpResponse(json.dumps({'status': 'error', 'msg': '已经存在的账号'}))
'''
更新用户
'''
def updateUser(request):
sqlConn.updateById(models.User,request.POST.get('id'),
{ 'acct' : request.POST.get('acct'),
'name' : request.POST.get('name'),
'sts': request.POST.get('sts'),
})
return HttpResponse(json.dumps({'status': 'OK', 'msg': '更新成功'}))
'''
用户角色
'''
def userRoles(request):
accId = request.GET.get('id')
userRole=sqlConn.getObjBywhere(models.UserRole,{'accId':accId})
roleList=[]
for obj in userRole:
role=sqlConn.getObjBywhere(models.Role,{'roleId':obj.roleId})[0]
roleList.append(role)
userjoin = ','.join([str(x.__repr__()) for x in roleList])
return HttpResponse('['+userjoin+']')
|
# 开启debug模式
debug = True
# 数据库连接操作
# 数据库链接方法:dialect+driver://username:password@host:port/database
DIALECT = 'mysql'
DRIVER = 'mysqlconnector'
USERNAME = 'root'
PASSWORD = 'root'
HOST = '127.0.0.1'
PORT = '3306'
DATABASE = 'flask'
# SQLALCHEMY_DATABASE_URI--连接数据库制指定变量
SQLALCHEMY_DATABASE_URI = "{}+{}://{}:{}@{}:{}/{}?charset=utf8".format(DIALECT, DRIVER, USERNAME, PASSWORD, HOST, PORT, DATABASE)
# 这行代码防止报错(不影响的报错)
SQLALCHEMY_TRACK_MODIFICATIONS = False |
import numpy as np
import random as random
import math
import re
import sys
import matplotlib.pyplot as plt
import seaborn as sns
from scipy.stats import beta
def beta_func(theta,a,b):
# beta prior
numSteps = 1000
theta_vector = np.linspace(0,1,numSteps)
vector_int = [x**(a-1)*(1-x)**(b-1) for x in theta_vector]
dx = 1/numSteps
coeff = 1/np.trapz(vector_int,dx=dx)
p_theta = coeff*(theta**(a-1)*(1-theta)**(b-1))
return p_theta
def plot_beta_func(a,b):
theta_list = np.linspace(0,1,1000)
output = [beta_func(theta,a,b) for theta in theta_list]
plt.figure()
plt.plot(theta_list,output)
plt.xlabel('Theta')
plt.ylabel('Probability')
plt.title('Probability for Beta Distribution for a = {}, b = {}'.format(a,b))
plt.savefig('beta_dist_a_{}_b_{}.png'.format(a,b))
|
from .apps import OsfOauth2AdapterConfig
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class OSFAccount(ProviderAccount):
def to_str(self):
# default ... reserved word?
dflt = super(OSFAccount, self).to_str()
return next(
value
for value in (
# try the name first, then the id, then the super value
'{} {}'.format(
self.account.extra_data.get('first_name', None),
self.account.extra_data.get('last_name', None)
),
self.account.extra_data.get('id', None),
dflt
)
if value is not None
)
class OSFProvider(OAuth2Provider):
id = 'osf'
name = 'Open Science Framework'
account_class = OSFAccount
def extract_common_fields(self, data):
attributes = data.get('data').get('attributes')
return dict(
# we could put more fields here later
# the api has much more available, just not sure how much we need right now
username=data.get('data').get('id'),
first_name=attributes.get('given_name', None),
last_name=attributes.get('family_name', None),
time_zone=attributes.get('timezone', None),
locale=attributes.get('locale', None),
profile_image_url=data.get('data').get('links').get('profile_image')
)
def extract_uid(self, data):
return str(data.get('data').get('id'))
def get_default_scope(self):
return OsfOauth2AdapterConfig.default_scopes
provider_classes = [OSFProvider]
|
# coding: utf-8
import re, requests, xlwt
headers = {
'Accept':'*/*',
'Accept-Encoding':'gzip, deflate',
'Accept-Language':'zh-CN,zh;q=0.8',
'Connection':'keep-alive',
'Content-Length':'141',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie':'JSESSIONID=ACCB528F0EE48B65148459707758F8A7; semester.id=41; _qddaz=QD.da7t4w.7jl1gc.ize7udib; yunsuo_session_verify=d290620f1b1ec82ad6eca983f842c615; JSESSIONID=8838F7490E8828162ACA1584B27A2405',
'DNT':'1',
'Host':'jwxt.xsyu.edu.cn',
'Origin':'http://jwxt.xsyu.edu.cn',
'Referer':'http://jwxt.xsyu.edu.cn/eams/teacherCourseTable!search.action',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
'X-Requested-With':'XMLHttpRequest'
}
url = 'http://jwxt.xsyu.edu.cn/eams/teacherCourseTable!search.action'
book = xlwt.Workbook(encoding='utf-8', style_compression=0)
sheet = book.add_sheet('stuInfo', cell_overwrite_ok=False)
stuNum = 1
sheet.write(0, 0, '学号')
sheet.write(0, 1, '姓名')
sheet.write(0, 2, '学院')
sheet.write(0, 3, '专业')
sheet.write(0, 4, '班级')
def getPage(page):
global stuNum
data = {
'semester.id': '41',
'std.department.id': '36',
'std.education.id': '1',
'std.type.id': '1',
'stdActive': '1',
'courseTableType': 'std',
'std.grade': '2014',
'std.project.id': '1',
'pageNo': str(page)
}
html = requests.post(url=url, headers=headers, data=data).text
idReg = re.compile(r'<td class="stdCode">(.*?)</td>')
nameReg = re.compile(r'<td class="stdName">.*?<a href=".*?" target="_blank" title=".*?">(.*?)</a>')
stdGradereg= re.compile(r'<td class="stdGrade">.*?</td><td>.*?</td><td>.*?</td><td>(.*?)</td><td>(.*?)</td><td></td><td>(.*?)</td></tr>')
idList = re.findall(idReg, html)
nameList= re.findall(nameReg,html)
stdCMC = re.findall(stdGradereg, html)
for i in range(0, len(idList)):
# print idList[i],nameList[i],stdCMC[i][0], stdCMC[i][1],stdCMC[i][2]
sheet.write(stuNum, 0, idList[i])
sheet.write(stuNum, 1, nameList[i])
sheet.write(stuNum, 2, stdCMC[i][0])
sheet.write(stuNum, 3, stdCMC[i][1])
sheet.write(stuNum, 4, stdCMC[i][2])
stuNum += 1
print '> Page %s Done! stuNum= %s' % (str(page), str(stuNum))
for i in range(1, 26):
getPage(i)
book.save('XSYU.xls') |
import logic
class RaftGUI:
'''
The RaftGUI is the interface between the main GUI and the logic module.
All properties of the raft, such as the location and velocity of the
raft is set here. Any key press event is being evaluated here.
The interface to the GUI module is:
1. updating of coordinates of the GUI canvas item
2. getting key press events from the GUI
The interface to the logic module is:
1. sending key press events to the logic module
2. sending grid type to the logic module
3. getting the resulting action from 1 & 2
Internal processing of the RaftGUI:
1. internal update of coordinates
2. setting a destination
3. setting of velocity base on destination
'''
vel = 1
game_ended = False
def __init__(self, river):
'''
Initialises a new raft, setting it's start coordinate base on the
where the 'S' grid is located in the map.
Initialise velocity and destination.
'''
self.river = river
self.grid_coord = river.getStartOrEndCoord('S').getGridID()
self.XY_coord = list(river.grid(*self.grid_coord).getXYCoord())
self.starting_grid_coord = self.grid_coord
self.starting_XY_coord = self.XY_coord[:]
self.x_vel = 0
self.y_vel = 0
self.dest_grid_coord = None
self.dest_XY_coord = None
def getGridCoord(self):
return self.grid_coord
def getXYCoord(self):
return self.XY_coord
## def getCornerCoord(self):
## '''
## Get the corner coordinates to define a canvas shape.
## Useless in case of creating image.
## '''
## return (self.XY_coord[0], self.XY_coord[1],
## self.XY_coord[0] + grid_length,
## self.XY_coord[1] + grid_length)
def restartLevel(self):
''' Reset all the coordinates '''
self.grid_coord = self.starting_grid_coord
self.XY_coord = self.starting_XY_coord[:]
self.game_ended = False
self.setNextAction(logic_event=logic.execute(self))
def move(self):
''' Update XY coordinates base on set velocity '''
self.XY_coord[0] += self.x_vel
self.XY_coord[1] += self.y_vel
# sets destination to None when it has reached
# then resets velocity and finally interfaces with the logic
# module (to update the logic module with its new location)
if self.isAtDestination():
self.grid_coord = self.dest_grid_coord # update grid coordinate
self.resetDestination()
self.setVel()
self.setNextAction(logic_event=logic.execute(self))
def isAtDestination(self):
at_dest = False
# assuming that we are dealing strictly with integer
# otherwise we will need to compare by epsilon
dest_coord = list(self.dest_XY_coord) \
if self.dest_XY_coord is not None else None
if self.XY_coord == dest_coord:
at_dest = True
return at_dest
def resetDestination(self):
self.dest_grid_coord = None
self.dest_XY_coord = None
def setDestination(self, grid_coord):
self.dest_grid_coord = grid_coord
self.dest_XY_coord = self.river.grid(*grid_coord).getXYCoord()
def getDestination(self, event):
curr_grid = self.getGridCoord()
if event == 'Up':
return (curr_grid[0] - 1, curr_grid[1])
elif event == 'Down':
return (curr_grid[0] + 1, curr_grid[1])
elif event == 'Left':
return (curr_grid[0], curr_grid[1] - 1)
elif event == 'Right':
return (curr_grid[0], curr_grid[1] + 1)
else:
print 'getDestination error/no destination'
def setVel(self):
''' Sets the velocity base on destination. '''
if self.dest_XY_coord is None:
self.x_vel = 0
self.y_vel = 0
else:
# assumming that we are dealing strictly with integer
# otherwise we will need to compare by epsilon
if self.dest_XY_coord[0] < self.XY_coord[0]:
self.x_vel = -self.vel
elif self.dest_XY_coord[0] > self.XY_coord[0]:
self.x_vel = self.vel
else:
self.xvel = 0
if self.dest_XY_coord[1] < self.XY_coord[1]:
self.y_vel = -self.vel
elif self.dest_XY_coord[1] > self.XY_coord[1]:
self.y_vel = self.vel
else:
self.y_vel = 0
def hasEnded(self):
return self.game_ended
def isInAutopilotMode(self):
'''No keystroke is allowed as long as raft is moving.
When not moving, it could be in the middle of autopilot mode.
Therefore we need to disable keystroke when in autopilot mode too.
'''
return not (self.x_vel == 0 and self.y_vel == 0 and \
logic.getNonKeyDownAction(self) is None)
def keyDown(self, event):
''' Interface with GUI to get key press events.
Checks if keystroke is allowed before setting new action.
When raft is still moving, any keystroke may cause the
program to set a new destination. Therefore this must
be taken into account.'''
if not self.isInAutopilotMode():
self.setNextAction(key_event=event)
def setNextAction(self, key_event=None, logic_event=None):
''' Interface with logic module to get next course of action. '''
print 'key event: {}, logic event: {}'.format(key_event, logic_event)
if key_event is not None:
logic_event = logic.execute(self, key_event.keysym)
if logic_event in ['Up', 'Down', 'Left', 'Right']:
self.setDestination(self.getDestination(logic_event))
elif logic_event == 'E' or logic_event == 'R':
self.game_ended = True
## elif logic_event == 'R':
## self.restartLevel()
|
import threading
from typing import Callable
from threading import Semaphore
def printFirst():
print("first", end="")
def printSecond():
print("second", end="")
def printThird():
print("third", end="")
class Foo:
def __init__(self):
self.semaphore_second = Semaphore(0)
self.semaphore_third = Semaphore(0)
def first(self, printFirst: 'Callable[[], None]') -> None:
printFirst()
self.semaphore_second.release()
def second(self, printSecond: 'Callable[[], None]') -> None:
self.semaphore_second.acquire()
printSecond()
self.semaphore_third.release()
def third(self, printThird: 'Callable[[], None]') -> None:
self.semaphore_third.acquire()
printThird()
if __name__ == '__main__':
foo = Foo()
threads = []
threads.append(threading.Thread(target=foo.third, args=[printThird]))
threads.append(threading.Thread(target=foo.second, args=[printSecond]))
threads.append(threading.Thread(target=foo.first, args=[printFirst]))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
print()
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import datetime
from dateutil.relativedelta import relativedelta
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from tools.translate import _
class account_fees_line(osv.osv):
_inherit = "account.fees.line"
_columns = {
'analytics_id':fields.many2one('account.analytic.plan.instance', 'Distribucion analitica'),
}
def _default_get_move_form_hook(self, cursor, user, data):
data = super(account_fees_line, self)._default_get_move_form_hook(cursor, user, data)
if data.has_key('analytics_id'):
del(data['analytics_id'])
return data
def create_analytic_lines(self, cr, uid, ids, context=None):
if context is None:
context = {}
super(account_fees_line, self).create_analytic_lines(cr, uid, ids, context=context)
analytic_line_obj = self.pool.get('account.analytic.line')
for line in self.browse(cr, uid, ids, context=context):
if line.analytics_id:
if not line.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal!'),_("You have to define an analytic journal on the '%s' journal.") % (line.journal_id.name,))
toremove = analytic_line_obj.search(cr, uid, [('move_id','=',line.id)], context=context)
if toremove:
analytic_line_obj.unlink(cr, uid, toremove, context=context)
for line2 in line.analytics_id.account_ids:
val = (line.credit or 0.0) - (line.debit or 0.0)
amt=val * (line2.rate/100)
al_vals={
'name': line.name,
'date': line.date,
'account_id': line2.analytic_account_id.id,
'unit_amount': line.quantity,
'product_id': line.product_id and line.product_id.id or False,
'product_uom_id': line.product_uom_id and line.product_uom_id.id or False,
'amount': amt,
'general_account_id': line.account_id.id,
'move_id': line.id,
'journal_id': line.journal_id.analytic_journal_id.id,
'ref': line.ref,
'percentage': line2.rate
}
analytic_line_obj.create(cr, uid, al_vals, context=context)
return True
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if context is None:
context = {}
result = super(account_move_line, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu)
return result
account_fees_line()
class account_fees_line(osv.osv):
_inherit = "account.fees.line"
_columns = {
'analytics_id':fields.many2one('account.analytic.plan.instance', 'Distribucion analitica'),
}
def _default_get_move_form_hook(self, cursor, user, data):
data = super(account_fees_line, self)._default_get_move_form_hook(cursor, user, data)
if data.has_key('analytics_id'):
del(data['analytics_id'])
return data
def create_analytic_lines(self, cr, uid, ids, context=None):
if context is None:
context = {}
super(account_fees_line, self).create_analytic_lines(cr, uid, ids, context=context)
analytic_line_obj = self.pool.get('account.analytic.line')
for line in self.browse(cr, uid, ids, context=context):
if line.analytics_id:
if not line.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal!'),_("You have to define an analytic journal on the '%s' journal.") % (line.journal_id.name,))
toremove = analytic_line_obj.search(cr, uid, [('move_id','=',line.id)], context=context)
if toremove:
analytic_line_obj.unlink(cr, uid, toremove, context=context)
for line2 in line.analytics_id.account_ids:
val = (line.credit or 0.0) - (line.debit or 0.0)
amt=val * (line2.rate/100)
al_vals={
'name': line.name,
'date': line.date,
'account_id': line2.analytic_account_id.id,
'unit_amount': line.quantity,
'product_id': line.product_id and line.product_id.id or False,
'product_uom_id': line.product_uom_id and line.product_uom_id.id or False,
'amount': amt,
'general_account_id': line.account_id.id,
'move_id': line.id,
'journal_id': line.journal_id.analytic_journal_id.id,
'ref': line.ref,
'percentage': line2.rate
}
analytic_line_obj.create(cr, uid, al_vals, context=context)
return True
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if context is None:
context = {}
result = super(account_move_line, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu)
return result
account_fees_line() |
import sys
import pickle
from collections import defaultdict
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
import scipy.stats as stats
colors = sns.hls_palette(8, l=.3, s=.8)
sns.set_palette(sns.hls_palette(8, l=.3, s=.8))
mpl.style.use('presentation')
mpl.rcParams['figure.figsize'] = (12, 9)
def main():
d = pickle.load(open('ram/gain_fcn.pickle', 'rb'))
npoints = 161
nxtals = 54
nbunches = 8
calos = [1]
xtals = range(54)
bunches = [-1, 1, 2, 3, 4, 5, 6, 7, 8]
n0 = 12
npoints -= n0
d_tags = {}
m_gain = np.zeros([nxtals*(nbunches+1), npoints])
m_gerr = np.zeros([nxtals*(nbunches+1), npoints])
idx = 0
# Reformat the data.
for calo in calos:
for xtal in xtals:
for bunch in bunches:
m_gain[idx] = d['ifg_val'][calo][xtal][bunch][n0:]
m_gerr[idx] = d['ifg_err'][calo][xtal][bunch][n0:]
tags = {}
tags['calo'] = calo
tags['xtal'] = xtal
tags['bunch'] = bunch
d_tags[idx] = tags
idx += 1
# Histogram residuals per xtal and per bunch
N = nxtals*(nbunches+1)
res_by_xtal = np.zeros([nxtals, npoints * nbunches])
res_by_bunch = np.zeros([nbunches, npoints * nxtals])
for idx in range(N):
if d_tags[idx] == -1:
continue
bi = d_tags[idx]['bunch'] - 1
ri = d_tags[idx]['xtal'] * npoints
res_by_bunch[bi, ri:ri+npoints] = m_gain[idx]
xi = d_tags[idx]['xtal']
ri = (d_tags[idx]['bunch'] - 1)* npoints
res_by_xtal[xi, ri:ri+npoints] = m_gain[idx]
plt.clf()
counts, bins, p = plt.hist(res_by_bunch[0], bins=50)
plt.clf()
for bi, res in enumerate(res_by_bunch):
y, x, p = plt.hist(res, bins=bins, histtype='step', color=colors[bi%8])
x = x[1:] - 0.5 * (x[1] - x[0])
mean = np.dot(x, y) / y.sum()
plt.axvline(mean, color=colors[bi%8], alpha=0.5)
plt.title(r'In-Fill Gain Deviation By Bunch')
plt.savefig('test.png')
plt.clf()
for xi, res in enumerate(res_by_xtal):
y, x, p = plt.hist(res, bins=bins, histtype='step', color=colors[xi%8])
x = x[1:] - 0.5 * (x[1] - x[0])
mean = np.dot(x, y) / y.sum()
plt.axvline(mean, color=colors[xi%8], alpha=0.5)
plt.title(r'In-Fill Gain Deviation By Xtal')
plt.savefig('test2.png')
# Try Shapiro-Wilke test
test_by_xtal = defaultdict(list)
test_by_bunch = defaultdict(list)
for idx in range(N):
if d_tags[idx] == -1:
continue
bi = d_tags[idx]['bunch'] - 1
xi = d_tags[idx]['xtal']
w, p = stats.shapiro(m_gain[idx])
test_by_bunch[bi] = w
test_by_xtal[xi] = w
plt.clf()
for xi, res in enumerate(test_by_bunch):
y, x, p = plt.hist(res, bins=bins, histtype='step', color=colors[xi%8])
x = x[1:] - 0.5 * (x[1] - x[0])
mean = np.dot(x, y) / y.sum()
plt.axvline(mean, color=colors[xi%8], alpha=0.5)
plt.title(r'In-Fill Shipiro Test By Bunch')
plt.savefig('test3.png')
plt.clf()
for xi, res in enumerate(test_by_xtal):
y, x, p = plt.hist(res, bins=bins, histtype='step', color=colors[xi%8])
x = x[1:] - 0.5 * (x[1] - x[0])
mean = np.dot(x, y) / y.sum()
plt.axvline(mean, color=colors[xi%8], alpha=0.5)
plt.title(r'In-Fill Shipiro Test By Xtal')
plt.savefig('test4.png')
if __name__ == '__main__':
sys.exit(main()) |
class Logger(object):
def __init__(self):
self.log = {}
def shouldPrintMessage(self, timestamp, message):
if timestamp < self.log.get(message, 0): # get the next printable timestamp
return False
self.log[message] = timestamp + 10 # set the next printable timestamp
return True |
from clase_Pokemon import Pokemon
class Squirtle(Pokemon):
pokemon = 'Squirtle'
tipo = ("agua")
tipo_experiencia = "Parabolico"
atributos = {
"ps": 104,
"ataque": 78.5,
"defensa": 95.5,
"atq_esp": 80.5,
"def_esp": 94.5,
"velocidad": 73.5
}
# 'Movimiento': Nivel requerido
lista_mov = {
'Placaje':1,
'Burbuja':9,
'Pistola agua':3,
'Giro rápido':9,
'Mordisco':12,
'Hidropulso':15,
'Acua cola':24,
'Cabezazo':31,
'Hidrobomba':33
}
def __init__(self, nivel, nombre, movimientos_guardados, experiencia):
super().__init__(nivel, nombre, movimientos_guardados, experiencia)
def evolucion(self):
if self.get_nivel() >= 16:
confirmacion = None
while (confirmacion != 'Si' or confirmacion != 'No'):
confirmacion = input('¿Desea evolucionar a Wartortle? Si/No: ')
if confirmacion == 'Si':
return Wartortle(self.get_nivel(), self.get_nombre(), self.get_movimientos_guardados(),
self.get_experiencia())
else:
return False
else:
return False
def imprimir_nombre(self):
print(type(self).__name__)
def __str__(self):
return super().__str__()
class Wartortle(Pokemon):
pokemon = 'Wartortle'
tipo = ("agua")
tipo_experiencia = "Parabolico"
atributos = {
"ps": 119,
"ataque": 93.5,
"defensa": 110.5,
"atq_esp": 95.5,
"def_esp": 110.5,
"velocidad": 88.5
}
lista_mov = {
'Placaje': 1,
'Burbuja': 9,
'Pistola agua': 3,
'Giro rápido': 9,
'Mordisco': 12,
'Hidropulso': 15,
'Acua cola': 24,
'Cabezazo': 31,
'Hidrobomba': 33
}
def __init__(self, nivel, nombre, movimientos_guardados, experiencia):
super().__init__(nivel, nombre, movimientos_guardados, experiencia)
def evolucion(self):
if self.get_nivel() >= 36:
confirmacion = None
while (confirmacion != 'Si' or confirmacion != 'No'):
confirmacion = input(f'¿Desea evolucionar a Blastoise? Si/No')
if confirmacion == 'Si':
return Blastoise(self.get_nivel(), self.get_nombre(), self.get_movimientos_guardados(),
self.get_experiencia())
else:
return False
else:
return False
def __str__(self):
return super().__str__()
class Blastoise(Pokemon):
pokemon = 'Blastoise'
tipo = ('agua')
tipo_experiencia = "Parabolico"
atributos = {
"ps": 139,
"ataque": 113.5,
"defensa": 130.5,
"atq_esp": 115.5,
"def_esp": 135.5,
"velocidad": 108.5
}
lista_mov = {
'Placaje': 1,
'Burbuja': 9,
'Pistola agua': 3,
'Giro rápido': 9,
'Mordisco': 12,
'Hidropulso': 15,
'Acua cola': 24,
'Cabezazo': 31,
'Hidrobomba': 33,
'Excavar':38,
'Rayo hielo': 45,
'Pulso drágon': 62
}
def __init__(self, nivel, nombre, movimientos_guardados, experiencia):
super().__init__(nivel, nombre, movimientos_guardados, experiencia)
def evolucion(self):
if self.get_nivel() >= 50:
confirmacion = None
while (confirmacion != 'Si' or confirmacion != 'No'):
confirmacion = input(f'¿Desea evolucionar a Mega-Blastoise? Si/No')
if confirmacion == 'Si':
return Mega_Blastoise(self.get_nivel(), self.get_nombre(), self.get_movimientos_guardados(),
self.get_experiencia())
else:
return False
else:
return False
def __str__(self):
return super().__str__()
class Mega_Blastoise(Pokemon):
pokemon = 'Mega-Blastoise'
tipo = ('agua')
tipo_experiencia = "Parabolico"
atributos = {
"ps": 186,
"ataque": 97,
"defensa": 189,
"atq_esp": 205,
"def_esp": 183,
"velocidad": 143
}
lista_mov = {
'Placaje': 1,
'Burbuja': 9,
'Pistola agua': 3,
'Giro rápido': 9,
'Mordisco': 12,
'Hidropulso': 15,
'Acua cola': 24,
'Cabezazo': 31,
'Hidrobomba': 33,
'Excavar': 38,
'Rayo hielo': 45,
'Pulso drágon': 62,
'Foco resplandor': 53,
'Esfera aural': 60,
'Ventisca': 70,
'Metereobola': 51
}
def __init__(self, nivel, nombre, movimientos_guardados, experiencia):
super().__init__(nivel, nombre, experiencia, movimientos_guardados)
def __str__(self):
return super().__str__() |
# class AdminTest(SeleniumTest):
#
# def test_admin_login(self):
# # a admin account is already registered
# # the admin is on the home page
#
# # the admin click the log in button
#
# # a form appears (dynamic)
#
# # the admin fill up the informations
# # - email
# # - password
#
# # click on the login button
#
# # the admin goes to the dashboard view
# pass
#
# def test_admin_dashboard(self):
# # the admin is logged in and on the home page
# # click on the dashboard link on the navigation
#
# # the admin goes to the dashboard view
#
# # the admin get information of the
# # number of review pending / review acccepted
# # reviews rejected
# pass
|
from pyarrow.hdfs import connect as hdfs_connector
from neomodel import db
from models import *
from factories import *
def eternity():
return "9999-01-01T00:00:00"
class HdfsToNeo4j:
def __init__(self, import_name, directory, version):
self._hdfs = hdfs_connector()
self._import_name = import_name
self._directory = directory.rstrip('/')
self._version = version
self._fileFactory = XMLFileFactory(
ZIPFileFactory(
JARFileFactory(
TextFileFactory(
BinaryFileFactory(
FileFactory())))))
#@db.transaction
def update(self):
expire_all_states_to(self._import_name, self._version)
self._update_directory({ 'name': self._directory })
def _name_from(self, path):
if path is self._directory:
return self._import_name
else:
path_elements = path.split('/')
return path_elements[-1:][0].strip('/')
def _local_path_from(self, path):
return path.replace(self._directory, '')
def _directory_from(self, path):
directory = Directory.get_or_create({
'path': self._local_path_from(path),
'name': self._name_from(path),
'import_name': self._import_name
})[0]
directory.source = path
return directory
def _file_from(self, path):
file = self._fileFactory.create_file({
'path': self._local_path_from(path),
'name': self._name_from(path),
'import_name': self._import_name
})
file.source = path
return file
def _update_directory(self, node):
directory = self._directory_from(node['name'])
for child in self._hdfs.ls(directory.source, detail=True):
child_element = None
if child['kind'] is 'directory':
child_element = self._update_directory(child)
else:
child_element = self._file_from(child['name'])
self._update_state_of(child_element)
child_element.save()
directory.children.connect(child_element)
return directory
def _create_new_state_for(self, file):
state = State(
size=self._hdfs.info(file.source)['size'],
root=self._directory
).save()
file.state.connect(state, { 'since': self._version, 'until': eternity() })
def _last_state_of(self, file):
try:
return file.state.match(until=self._version)[0]
except IndexError:
return None
"""
see branch feature/checksum-file-comparison for better implementation
"""
def _has_changed_since(self, last_state, file):
return last_state.size != self._hdfs.info(file.source)['size']
@db.transaction
def _update_state_of(self, file):
last_state = self._last_state_of(file)
if last_state: # file exists already
last_state_rel = file.state.relationship(last_state)
if self._has_changed_since(last_state, file):
self._create_new_state_for(file)
last_state_rel.until = self._version
else:
last_state_rel.until = eternity()
last_state_rel.save()
else: # file has been created
self._create_new_state_for(file)
|
#%% why numpy
array = []
for i in range(0, 10):
array.append(i**2)
array = [i**2 for i in range(0, 10)]
#%% array
import numpy as np # np->convention
np_zero = np.zeros(4)
np_array = np.array([0, 1, 2, 3, 10])
np_arange = np.arange(10) ** 2 #-> operator overloading
#%% math lib
min = np_array.min()
max = np_array.max()
mean = np_array.mean()
std = np_array.std()
#%% Filter
a = np.array([1, 2, 3, 4])
b = np.array([True, False, False, True])
a[b]
a[a % 2==0]
# Multidimension
#%% Reshape []->[][]
a = np.arange(8)
tuple = (4, 2)
reshape = a.reshape(tuple)
print(reshape)
print(reshape[0])
print(a.reshape((-1, 4)))
#%% Reshape-> [][]->[]
b = np.array([[1, 2, 3], [4, 5, 6]])
b.reshape(-1)
#%% Reshape describe
print(b.shape)
print(a.shape)
|
import itertools
count = 0
str = input()
list = str.split()
str1 = input()
list1 = str1.split()
for v in itertools.combinations(list1, 3):
if(int(v[0])+int(v[1])+int(v[2]) == int(list[1])):
count = count + 1
print(count)
|
"""
Constraint Module
"""
from typing import List
import predicates
class Constraint:
"""
Constraint Class
Contains predicates.
Predicates are stored in a 2D list and interpreted as follows:
Equals(0) or (MoreThan(4) and LessThan(7))
_predicates = [[Eqals(0)],[MoreThan(4), LessThan(7)]]
"""
_predicates: List[List[predicates.Predicate]]
def __init__(self, predicates: List[List[predicates.Predicate]]):
self._predicates = predicates
def __str__(self):
return "({})".\
format(" or ".join("({})".format(" and ".join(str(and_clause) for and_clause in or_clause)) for or_clause in self._predicates))
def evaluate(self, operand):
evaluation = False
for or_clause in self._predicates:
and_evaluation = True
for and_clause in or_clause:
and_evaluation = and_evaluation and and_clause.evaluate(operand.get_value())
evaluation = evaluation or and_evaluation
return evaluation
|
# Wathaned Ean
import random
def namey():
global name
name = raw_input("What is your name? ")
randomnumber()
def randomnumber():
global ranum
ranum = random.randrange(10)+1
def printname():
for printy in range(ranum, 0, -1):
print name
def goodbye():
print "Bye", name
# main
ranum = 0
playagain = "yes"
while playagain != "no":
namey()
print "Hi", name
print "Your random number is", ranum
printname()
playagain = raw_input("Do you want to go again? anything to go again or no to quit! ")
if playagain == "no":
goodbye()
print "Exiting..."
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.