text stringlengths 8 6.05M |
|---|
t = int(input())
while t > 0:
n,l,r = map(int,input().split())
arr = list(map(int,input().strip().split()))[:n]
cnt = 0
for i in range(n-1):
for j in range(i+1,n,+1):
if arr[j] == arr[j-1]:
j+=1
if arr[i] + arr[j] >= l and arr[i] + arr[j] <= r:
j += 1
cnt+=1
else:
i+=1
print(cnt)
t = t-1
|
#!/usr/local/bin/python3
# -*- conding: utf-8 -*-
from flask import Blueprint
auth_api = Blueprint('auth', __name__, url_prefix='/api/auth')
from . import views |
# Generated by Django 2.2.1 on 2019-06-14 02:39
import bbs.save
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bbs', '0003_auto_20190614_1004'),
]
operations = [
migrations.AlterField(
model_name='userinfo',
name='wechatimg',
field=models.ImageField(blank=True, default=None, null=True, storage=bbs.save.newStorage(), unique=True, upload_to='wechat/%Y%m/', verbose_name='微信二维码'),
),
]
|
#!/usr/bin/env python3
from sys import argv
from time import sleep
argv.append(bytearray(1024 * 1024 * int(argv[1])))
sleep(3600)
|
from ..type import ComponentType
from asn1PERser.classes.templates.creator import template_filler
from asn1PERser.classes.module import already_filled_template
class SequenceOfType(ComponentType):
def __init__(self):
super(SequenceOfType, self).__init__()
self._ComponentType = None
self.typereference = self.__class__.__name__
def fill_template(self, has_parent=False):
filled_template = template_filler.fill(asn_type=self.__class__.__name__,
class_name=self.template_class_name,
class_type=self.__class__.__name__,
field_type=self.ComponentType.template_field_type,
constraint={'extensionMarker': self.constraint.extensionMarker,
'lowerEndpoint': self.constraint.lowerEndpoint,
'upperEndpoint': self.constraint.upperEndpoint})
if has_parent:
return ([self.ComponentType], filled_template)
if filled_template in already_filled_template:
return ''
already_filled_template.add(filled_template)
if self.ComponentType.typereference in ['IntegerType', 'BooleanType', 'OctetStringType', 'BitStringType']:
return filled_template
components_template = self.ComponentType.fill_template()
return components_template + filled_template
@property
def ComponentType(self):
return self._ComponentType
@ComponentType.setter
def ComponentType(self, ComponentType):
self._ComponentType = ComponentType
def __getitem__(self, item):
return [self.ComponentType][item]
def __repr__(self):
return '\n\t'.join([super(SequenceOfType, self).__repr__(), str(self.ComponentType)])
|
from .desc_list import *
from .create import * |
users = {}
while True:
tokens = input()
if tokens == "Statistics":
break
tokens = tokens.split("->")
command = tokens[0]
username = tokens[1]
if command == "Add":
if username in users.keys():
print(f"{username} is already registered")
else:
users[username] = []
elif command == "Send":
email = tokens[2]
if username in users.keys():
users[username].append(email)
elif command == "Delete":
if username in users.keys():
users.pop(username)
else:
print(f"{username} not found!")
users = dict(sorted(users.items(), key=lambda user: (-len(user[1]), user[0])))
print(f"Users count: {len(users.keys())}")
for user, emails in users.items():
print(user)
for email in emails:
print(f" - {email}") |
import os
import pymongo
import datetime
import string
import pytz
import sqlite3
import random
import traceback
import json
import click
from flask import current_app, g
from flask.cli import with_appcontext
def get_mongodb():
ret_obj, error_obj = {}, {}
try:
conn_str, db_name, ser = "", "", ""
ser = current_app.config["SERVER"] if "SERVER" in current_app.config else ser
ser = os.environ["SERVER"] if "SERVER" in os.environ else ser
if ser == "dev":
conn_str = current_app.config["MONGODB_CONNSTRING"]
db_name = current_app.config["DB_NAME"]
else:
conn_str, db_name = os.environ['MONGODB_CONNSTRING'], os.environ['DB_NAME']
client = pymongo.MongoClient(conn_str)
client.server_info()
ret_obj = client[db_name]
except pymongo.errors.ServerSelectionTimeoutError as err:
error_obj = {"status":0, "error":"Connection to MongoDB failed", "details":err.details}
except pymongo.errors.OperationFailure as err:
error_obj = {"status":0, "error":"Connection to MongoDB failed", "details":err.details}
return ret_obj, error_obj
def get_mongodb_old():
ret_obj, error_obj = {}, {}
try: #Connect to mongodb
client = pymongo.MongoClient(
current_app.config["DB_HOST"],
authSource=current_app.config["DB_NAME"],
username=current_app.config["DB_USERNAME"],
password=current_app.config["DB_PASSWORD"],
authMechanism='SCRAM-SHA-1',
serverSelectionTimeoutMS=10000
)
client.server_info()
ret_obj = client[current_app.config["DB_NAME"]]
except pymongo.errors.ServerSelectionTimeoutError as err:
error_obj = {"status":0, "error":"Connection to MongoDB failed", "details":err.details}
except pymongo.errors.OperationFailure as err:
error_obj = {"status":0, "error":"Connection to MongoDB failed", "details":err.details}
return ret_obj, error_obj
def log_error(error_log):
mongo_dbh, error_obj = get_mongodb()
if error_obj != {}:
return error_obj
ts_format = "%Y-%m-%d %H:%M:%S %Z%z"
ts = datetime.datetime.now(pytz.timezone('US/Eastern')).strftime(ts_format)
try:
error_id = get_random_string(6)
error_obj = {"id":error_id, "log":error_log, "ts":ts}
res = mongo_dbh["c_log"].insert_one(error_obj)
return {"status":0, "error":"exception-error-" + error_id}
except Exception as e:
return {"status":0, "error":"Unable to log error!"}
def get_random_string(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def reset_sequence_value(coll_obj, sequence_name):
seq_doc = coll_obj.find_and_modify(
query={'sequence_name': sequence_name},
update={'$set': {'sequence_value': 0}},
upsert=True,
new=True
)
return
def next_sequence_value(coll_obj, sequence_name):
seq_doc = coll_obj.find_and_modify(
query={'sequence_name': sequence_name},
update={'$inc': {'sequence_value': 1}},
upsert=True,
new=True
)
return int(seq_doc["sequence_value"])
|
# -*- coding: utf-8 -*-
from odoo import models, fields, _, api
from datetime import datetime
from dateutil.relativedelta import relativedelta
import pytz
from odoo.exceptions import UserError
from .tzlocal import get_localzone
from odoo import tools
class IncapacidadesNomina(models.Model):
_name = 'incapacidades.nomina'
_description = 'IncapacidadesNomina'
name = fields.Char("Name", required=True, copy=False, readonly=True, states={'draft': [('readonly', False)]}, index=True, default=lambda self: _('New'))
employee_id = fields.Many2one('hr.employee', string='Empleado')
fecha = fields.Date('Fecha')
ramo_de_seguro = fields.Selection([('Riesgo de trabajo', 'Riesgo de trabajo'), ('Enfermedad general', 'Enfermedad general'), ('Maternidad','Maternidad')], string='Ramo de seguro')
tipo_de_riesgo = fields.Selection([('Accidente de trabajo', 'Accidente de trabajo'), ('Accidente de trayecto', 'Accidente de trayecto'), ('Enfermedad de trabajo','Enfermedad de trabajo')], string='Tipo de riesgo')
secuela = fields.Selection([('Ninguna', 'Ninguna'), ('Incapacidad temporal', 'Incapacidad temporal'), ('Valuación inicial provisional','Valuación inicial provisional'), ('Valuación inicial definitiva', 'Valuación inicial definitiva')], string='Secuela')
control = fields.Selection([('Unica', 'Unica'), ('Inicial', 'Inicial'), ('Subsecuente','Subsecuente'), ('Alta médica o ST-2', 'Alta médica o ST-2')], string='Control')
control2 = fields.Selection([('01', 'Prenatal o ST-3'), ('02', 'Enalce'), ('03','Postnatal')], string='Control maternidad')
dias = fields.Integer("Dias")
porcentaje = fields.Char('Porcentaje')
descripcion = fields.Text('Descripción')
state = fields.Selection([('draft', 'Borrador'), ('done', 'Hecho'), ('cancel', 'Cancelado')], string='Estado', default='draft')
folio_incapacidad = fields.Char('Folio de incapacidad')
@api.model
def create(self, vals):
if vals.get('name', _('New')) == _('New'):
vals['name'] = self.env['ir.sequence'].next_by_code('incapacidades.nomina') or _('New')
result = super(IncapacidadesNomina, self).create(vals)
return result
# @api.multi
# @api.onchange('folio_incapacidad')
# def _check_folio_length(self):
# if self.folio_incapacidad:
# if len(self.folio_incapacidad) != 7:
# raise UserError(_('La longitud del folio es incorrecto'))
@api.multi
def action_validar(self):
leave_type = None
if self.ramo_de_seguro=='Riesgo de trabajo':
leave_type = self.env.ref('nomina_cfdi_extras_ee.hr_holidays_status_inc_rt', False)
elif self.ramo_de_seguro=='Enfermedad general':
leave_type = self.env.ref('nomina_cfdi_extras_ee.hr_holidays_status_inc_eg', False)
elif self.ramo_de_seguro=='Maternidad':
leave_type = self.env.ref('nomina_cfdi_extras_ee.hr_holidays_status_inc_mat', False)
if self.fecha:
date_from = self.fecha
date_to = date_from + relativedelta(days=self.dias - 1)
date_from = date_from.strftime("%Y-%m-%d") + ' 00:00:00'
date_to = date_to.strftime("%Y-%m-%d") +' 23:59:59'
else:
date_from = datetime.today().strftime("%Y-%m-%d")
date_to = date_from + ' 20:00:00'
date_from += ' 06:00:00'
timezone = self._context.get('tz')
if not timezone:
timezone = self.env.user.partner_id.tz or 'UTC'
#timezone = tools.ustr(timezone).encode('utf-8')
local = pytz.timezone(timezone) #get_localzone()
naive_from = datetime.strptime (date_from, "%Y-%m-%d %H:%M:%S")
local_dt_from = local.localize(naive_from, is_dst=None)
utc_dt_from = local_dt_from.astimezone (pytz.utc)
date_from = utc_dt_from.strftime ("%Y-%m-%d %H:%M:%S")
naive_to = datetime.strptime (date_to, "%Y-%m-%d %H:%M:%S")
local_dt_to = local.localize(naive_to, is_dst=None)
utc_dt_to = local_dt_to.astimezone (pytz.utc)
date_to = utc_dt_to.strftime ("%Y-%m-%d %H:%M:%S")
nombre = 'Incapacidades_'+self.name
registro_falta = self.env['hr.leave'].search([('name','=', nombre)], limit=1)
if registro_falta:
registro_falta.write({'date_from' : date_from,
'date_to' : date_to,
'employee_id' : self.employee_id.id,
'holiday_status_id' : leave_type and leave_type.id,
'state': 'validate',
})
else:
holidays_obj = self.env['hr.leave']
vals = {'date_from' : date_from,
'holiday_status_id' : leave_type and leave_type.id,
'employee_id' : self.employee_id.id,
'name' : 'Incapacidades_'+self.name,
'date_to' : date_to,
'state': 'confirm',}
holiday = holidays_obj.new(vals)
holiday._onchange_employee_id()
holiday._onchange_leave_dates()
vals.update(holiday._convert_to_write({name: holiday[name] for name in holiday._cache}))
vals.update({'holiday_status_id' : leave_type and leave_type.id,})
#holidays_obj.create(vals)
incapacidad = self.env['hr.leave'].create(vals)
incapacidad.action_validate()
self.write({'state':'done'})
return
@api.multi
def action_cancelar(self):
self.write({'state':'cancel'})
nombre = 'Incapacidades_'+self.name
registro_falta = self.env['hr.leave'].search([('name','=', nombre)], limit=1)
if registro_falta:
registro_falta.action_refuse() #.write({'state':'cancel'})
@api.multi
def action_draft(self):
self.write({'state':'draft'})
@api.multi
def unlink(self):
raise UserError("Los registros no se pueden borrar, solo cancelar.") |
## General output full path (note to user: you can change this variable)
#output_filedir = "/mnt/XHDD/master_project_result/Threshold_test_newavg_nongpu"
output_filedir = "W:\BRICIA\\resources\Cesca_MSS2_sample_50\\yunhee_master_project_result"
#input_dir = "/mnt/XHDD/ADNI_20x3_2015_IAM"
input_dir = "W:\BRICIA\\resources\Cesca_MSS2_sample_50\\"
## Name of csv file (note to user: you can change this variable)
#csv_filename = "testset_data.csv"
csv_filename = "W:\BRICIA\\resources\Cesca_MSS2_sample_50\data_list.csv"
## These parameters run each code
T_weighted_penalisation_algorithm = True
colour_channel_algorithm = True
## Size of source and target patches.
## Must be in the form of python's list data structure.
## Default: patch_size = [1,2,4,8]
patch_size = [1,2,4,8]
## Weights for age map blending produced by different size of source/target patches
## Must be in the form of python's list data structure.
## Its length must be the same as 'patch_size' variable.
## Default: blending_weights = [0.65,0.2,0.1,0.05]
blending_weights = [0.65, 0.2, 0.1, 0.05]
## Used only for automatic calculation for all number of samples
## NOTE: Smaller number of samples makes computation faster (please refer to the manuscript).
## Samples used for IAM calculation
## Default: num_samples_a[ll = [512]
num_samples_all_param = [512]
## Uncomment line below and comment line above if you want to run all different number of samples
# num_samples_all = [64, 128, 256, 512, 1024, 2048]
## Weight of distance function to blend maximum difference and average difference between source
## and target patches. Default: alpha=0.5. Input value should be between 0 and 1 (i.e. floating).
alpha = 0.5
## Threshold value for cutting of probability values of brain masks, if probability masks
## are given instead of binary masks.
bin_tresh = 0.5
## Threshold value for Age value penalisation approach
Ttrsh = 0.6
## Save JPEG outputs
save_jpeg = True
save_mat = True
## Delete all intermediary files/folders, saving some spaces in the hard disk drive.
delete_intermediary = False
|
# Bubble sort implementation
numbers = [3,53,65,1,321,54,76,43,2,4,66]
#O(n^2) Time / O(1) Space
def bubbleSort(array):
length = len(array)
for x in range(0,length):
for j in range(0,length-1):
if array[j] > array[j+1]:
# swap
temp = array[j]
array[j] = array[j+1]
array[j+1] = temp
bubbleSort(numbers)
print(numbers) |
from django.apps import AppConfig
class CprofileConfig(AppConfig):
name = 'cprofile'
|
import copy
import pygame
import pytmx
from pytmx.util_pygame import load_pygame
from sound_play import Sound_play as sound
from com.wwa.main.levels import levels
from com.wwa.players.cowboy import Cowboy
from com.wwa.players.sun import Sun
TMW_DESERT_SPACING_PNG = '../map/tmw_desert_spacing.png'
PIC_ = '../pic/'
PIC_GAME_OVER_PNG = '%sgame_over.png' % PIC_
LEVEL_COMPLETED_PNG = '%slevel_completed.png' % PIC_
LEVEL_PNG = '%slevel.png' % PIC_
CLOCK_PNG = '%sclock.png' % PIC_
SCORES_BROWN_PNG = '%sscores_brown.png' % PIC_
TIME_TAKEN_Y = 400
LAYER_DONE_PERCENT = 500
CACTUS_FINAL_Y = 300
FINAL_SCORES_X = 190 + 100
FONT_NAME = 'JOKERMAN'
BOXES = "boxes"
PIC_OBJS = 'pic_objs'
EXIT = 'exit'
TELEPORT_LEVEL = 'teleport'
WIN_WIDTH = 800
WIN_HEIGHT = 800
SCORE_POS_X = 520
SCORE_POS_Y = 750
LEVEL_POS_X = 250
LEVEL_POS_Y = 400
SCORE_COUNT_POS_X = SCORE_POS_X + 220
SCORE_AND_CACTUS_POS_Y = SCORE_POS_Y + 5
CACTUS_COUNT_POS_X = SCORE_POS_X + 80
TIME_POS_X = 650
TIME_POS_Y = 15
TIME_INDEX = 1
LEVEL_INDEX = 0
red = pygame.Color(153, 0, 0)
class Wwa():
def __init__(self, level, sound_on, godmode):
pygame.init()
self.time = levels[level - 1][TIME_INDEX]
self.rect = []
self.suns = []
self.level = level
self.pic_obj_level = None
self.clock = pygame.time.Clock()
self.cactus_count = 0
self.life = 100
self.godmode = godmode
if godmode:
self.life = 10000
self.game_display = pygame.display.set_mode((WIN_WIDTH, WIN_HEIGHT))
self.teleports = None
self.pytmx_map = load_pygame("../map//" + levels[level - 1][LEVEL_INDEX] + ".tmx")
self.score_image = pygame.image.load(SCORES_BROWN_PNG)
self.clock_image = pygame.image.load(CLOCK_PNG)
self.level_image = pygame.image.load(LEVEL_PNG)
self.finish_background = pygame.image.load(LEVEL_COMPLETED_PNG)
self.game_over_pic = pygame.image.load(PIC_GAME_OVER_PNG)
self.sound = sound(sound_on)
self.sound_on = sound_on
self.main_loop()
def put_text(self, t, font_name, font_size, x, y, color):
font = pygame.font.SysFont(font_name, font_size)
text = font.render(str(t), True, color)
self.game_display.blit(text, (x, y))
def redraw_pics(self):
for layer in self.pytmx_map.visible_layers:
if layer.name == PIC_OBJS:
self.pic_obj_level = layer
if isinstance(layer, pytmx.TiledTileLayer):
for x in range(0, 40):
for y in range(0, 40):
image = self.pytmx_map.get_tile_image(x, y, 0)
if image != None and (x, y) not in self.rect:
self.pics.blit(image, (32 * x, 32 * y))
else:
surface_image = pygame.image.load(TMW_DESERT_SPACING_PNG)
self.pics.blit(surface_image, (32 * x, 32 * y), (5 * 32 + 6, 3 * 32 + 4, 32, 32))
def show_level(self):
self.game_display.fill(pygame.Color(244, 215, 65))
pygame.display.update()
self.game_display.blit(self.level_image, (LEVEL_POS_X, LEVEL_POS_Y))
self.put_text('Level ' + str(self.level), 'JOKERMAN', 25, LEVEL_POS_X + 100, LEVEL_POS_Y + 5, (255, 255, 255))
pygame.display.update()
pygame.time.delay(3000)
def show_final_scores(self):
self.put_text('Cactuses ' + str(self.cactus_count), 'JOKERMAN', 25, FINAL_SCORES_X, CACTUS_FINAL_Y, (0, 0, 0))
time_taken = levels[self.level - 1][TIME_INDEX] - self.time
self.put_text('Time ' + str(time_taken), 'JOKERMAN', 25, FINAL_SCORES_X, TIME_TAKEN_Y, (0, 0, 0))
percent_done = (float)(self.cactus_count) / int(len(self.pic_obj_level)) * 100
self.put_text('Layer done on ' + str(format(percent_done, '.2f')) + "%", 'JOKERMAN', 25, FINAL_SCORES_X,
LAYER_DONE_PERCENT, (0, 0, 0))
pygame.display.update()
pygame.time.delay(50)
def show_finish(self):
self.game_display.blit(self.finish_background, (190, 100))
pygame.display.update()
self.show_final_scores()
pygame.time.delay(3000)
self.level += 1
if self.level > len(levels):
self.loop = False
else:
Wwa(self.level, self.sound_on, self.godmode)
self.loop = False
def minus_life(self):
self.life -= 1
self.put_text(self.life, FONT_NAME, 25, SCORE_COUNT_POS_X, SCORE_AND_CACTUS_POS_Y,
(255, 255, 255))
self.sound.play_hit_sound()
if self.life <= 0:
self.show_game_over()
self.loop = False
def show_game_over(self):
self.game_display.blit(self.game_over_pic, (190, 100))
pygame.display.update()
self.sound.play_game_over_sound()
pygame.time.delay(3000)
def update_sun(self):
for s in self.suns:
if not self.cowboy.is_step_back:
s.update(-self.cowboy.movement_dict[self.cowboy.movement][0],
-self.cowboy.movement_dict[self.cowboy.movement][1])
else:
s.update(0, 0)
s.draw(self.game_display)
def check_sun_collide(self):
for s in self.suns:
if self.cowboy.rect.colliderect(s.rect):
self.minus_life()
def main_loop(self):
for s in levels[self.level-1][2]:
self.suns.append(Sun(copy.copy(s)))
self.cowboy = Cowboy()
self.background = pygame.Surface((42 * 32, 42 * 32))
self.pics = pygame.Surface((42 * 32, 42 * 32))
self.loop = True
self.event = None
self.redraw_pics()
self.show_level()
while (self.loop):
self.time -= 1;
self.cowboy.is_step_back = False
for event in pygame.event.get():
pass
layer_index = 0
for layer in self.pytmx_map.visible_layers:
layer_index += 1
if isinstance(layer, pytmx.TiledObjectGroup):
if layer.name == EXIT:
for obj in layer:
if pygame.Rect(obj.x + self.cowboy.pos_x, obj.y + self.cowboy.pos_y, obj.width,
obj.height).colliderect(self.cowboy.rect) == True:
self.show_finish()
if layer.name == PIC_OBJS:
for obj in layer:
if pygame.Rect(obj.x + self.cowboy.pos_x, obj.y + self.cowboy.pos_y, obj.width,
obj.height).colliderect(self.cowboy.rect) == True:
cactus = (round(obj.x / 32), round(obj.y / 32))
if cactus not in self.rect:
self.cactus_count += 1
self.put_text(self.cactus_count, FONT_NAME, 25, CACTUS_COUNT_POS_X,
SCORE_AND_CACTUS_POS_Y, (255, 255, 255))
self.rect.append(cactus)
self.redraw_pics()
self.sound.play_pick_sound()
break
if layer.name == BOXES:
for obj in layer:
if pygame.Rect(obj.x + self.cowboy.pos_x, obj.y + self.cowboy.pos_y, obj.width,
obj.height).colliderect(self.cowboy.rect) == True:
self.cowboy.step_back()
self.cowboy.is_step_back = True
self.minus_life()
break
self.check_sun_collide()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_F3:
self.sound_on = not self.sound_on
self.sound = sound(self.sound_on)
elif event.key == pygame.K_F4:
self.godmode = not self.godmode
if self.godmode:
self.life = 10000
else:
self.life = 100
self.cowboy.update(event)
self.game_display.blit(self.pics, (self.cowboy.pos_x, self.cowboy.pos_y))
self.game_display.blit(self.score_image, (SCORE_POS_X - 20, SCORE_POS_Y - 10))
self.update_sun()
self.game_display.blit(self.clock_image, (TIME_POS_X - 100, TIME_POS_Y - 17))
self.put_text(self.life, FONT_NAME, 25, SCORE_COUNT_POS_X, SCORE_AND_CACTUS_POS_Y, (255, 255, 255))
self.put_text(self.cactus_count, FONT_NAME, 25, CACTUS_COUNT_POS_X, SCORE_AND_CACTUS_POS_Y,
(255, 255, 255))
self.put_text(self.time, FONT_NAME, 25, TIME_POS_X, TIME_POS_Y,
(255, 255, 255))
self.cowboy.draw(self.game_display)
self.clock.tick(60)
pygame.display.update()
|
import base64
import time
import bcrypt
import dataset
def extract_domain(email_address):
"""
Given an email address, extract the domain name from it. This is done by finding the @ and
then splicing the email address and returning everything found after the @. If no @ is found
then the entire email address string is returned.
:param email_address:
:return:
"""
email_address = email_address.lower()
# figure out the domain from the email address
try:
return email_address[email_address.index(u'@') + 1:]
except ValueError:
# no @ found, just use the whole string
return email_address
def anonymize_email(email_address, domain=None):
"""
Hash the email address securely and return it as a string. We use bcrypt to do this and use the
domain as the salt.
:param email_address: the hashed email address
"""
email_address = email_address.lower()
if domain is None:
domain = extract_domain(email_address)
# create a custom salt by base64 encoding the domain and then trimming the whole thing to 22
# characters (which is bcrypt's required salt length). Note that we fill the right side of the
# domain with dots to ensure it's at least 18 characters in length. This is necessary as we need
# to ensure that the base64 encode result is at least 22 characters long and 18 is the minimum
# input length necessary to create a base64 encoding result of at least 22 characters.
salt = u'$2b$12$' + base64.b64encode(domain.zfill(18))[:22]
return bcrypt.hashpw(email_address.encode(u'utf-8'), salt.encode(u'utf-8'))
def anonymize_kwargs(kwargs):
"""
Given a dict of kwargs, replace the value associated with the email key (if there is one)
with the anonymized version of the email address. Does nothing if anonymization is turned
off or if email isn't a key in the dict. Any changes are made in place.
:param kwargs: a dict
"""
# note that we use get instead of in on the kwargs as we want to only anonymize the email
# address if it exists in the kwargs and isn't None
if kwargs.get('email', None) is not None:
kwargs['email'] = anonymize_email(kwargs['email'])
def statistics(database_url, anonymize):
"""Create a new CkanPackagerStatistics object and return it.
This is useful for one-liners: statistics(db).log_request(request)
@param database_url: database url as per
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
@param anonymize: boolean indicating whether the email addresses in the database should be
treated anonymously
"""
return CkanPackagerStatistics(database_url, anonymize)
class CkanPackagerStatistics(object):
def __init__(self, database_url, anonymize):
"""Class used to track application statistics.
@param database_url: database url as per
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
@param anonymize: boolean indicating whether the email addresses in the database should be
treated anonymously
"""
self._db = dataset.connect(database_url)
self.anonymize = anonymize
def log_request(self, resource_id, email, count=None):
"""Log a new incoming request to the statistics
@param resource_id: The resource id that was requested
@param email: The email address that requested the resource
"""
domain = extract_domain(email)
if self.anonymize:
email = anonymize_email(email, domain)
# increase totals for all resources and the resource requested
self._increase_totals('requests', resource_id='*')
self._increase_totals('requests', resource_id=resource_id)
# if there isn't already a request in the requests table from the email address we need to
# increment the unique requesters count on all resources (*)
if self._db['requests'].find_one(email=email) is None:
self._increase_totals('emails', resource_id='*')
# increase totals for that resource if the email address hasn't requested this resource
# before
resource_match = self._db['requests'].find_one(email=email, resource_id=resource_id)
if resource_match is None:
self._increase_totals('emails', resource_id=resource_id)
# store request
self._db['requests'].insert({
u'count': count,
u'domain': domain,
u'email': email,
u'resource_id': resource_id,
u'timestamp': int(time.time()),
})
def log_error(self, resource_id, email, message):
"""Log a new error to the statistics
@param resource_id: The resource id that was requested when the
error happened
@param email: The email address that requested the resource
@param message: The error message
"""
if self.anonymize:
email = anonymize_email(email)
# Increase totals
self._increase_totals('errors', resource_id='*')
# Increase totals for that resource
self._increase_totals('errors', resource_id=resource_id)
# Store timestamped error
self._db['errors'].insert({
'timestamp': int(time.time()),
'resource_id': resource_id,
'email': email,
'message': message
})
def get_requests(self, start=0, count=100, **kwargs):
"""Return requests as a list of dictionaries
@param start: start of the query
@param count: Number of requests to return
@param **kwargs: conditions
@returns: List of rows (as dictionaries)
"""
if self.anonymize:
anonymize_kwargs(kwargs)
result = []
iterator = self._db['requests'].find(
_offset=start,
_limit=count,
order_by='-timestamp',
**kwargs
)
for row in iterator:
del row['id']
result.append(row)
return result
def get_errors(self, start=0, count=100, **kwargs):
"""Return errors as a list of dicts
@param start: start of the query
@param count: number of requests to return
@param **kwargs: conditions
@returns: List of rows (as dictionaries)
"""
if self.anonymize:
anonymize_kwargs(kwargs)
result = []
iterator = self._db['errors'].find(
_offset=start,
_limit=count,
order_by='-timestamp',
**kwargs
)
for row in iterator:
del row['id']
result.append(row)
return result
def get_totals(self, **kwargs):
"""Return the overall stastitics (the totals)
@param **kwargs: conditions on the totals table
@returns: Dictionary of rows (as dictionaries), indexed by the resource
id.
"""
totals = {}
for row in self._db['totals'].find(**kwargs):
totals[row['resource_id']] = {
'emails': row['emails'],
'errors': row['errors'],
'requests': row['requests']
}
return totals
def _increase_totals(self, counter, **kwargs):
"""Increase the given counter
@param counter: Name of the counter
@param **kwargs: conditions
"""
r = self._db['totals'].find_one(**kwargs)
if r is None:
r = {
'resource_id': '*',
'errors': 0,
'requests': 0,
'emails': 0
}
for key in kwargs:
r[key] = kwargs[key]
r[counter] += 1
self._db['totals'].upsert(r, kwargs.keys())
|
#!/usr/bin/env python
class cPose:
def __init__(self, x, y):
'''
Initialization function.
Contains the x and y-coordinates in separate instance variables
'''
self.x = x
self.y = y
def __str__(self):
'''String method for testing purposes'''
return 'cPose(' + str(self.x) + ' , ' + str(self.y) + ')'
|
import tensorflow as tf
import numpy as np
import keras
from spiking_models import SpikingReLU, Accumulate
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.datasets import mnist
from operations_layers import SqueezeLayer, ExpandLayer, ExtractPatchesLayer, PositionalEncodingLayer
from weight_normalization import robust_weight_normalization
from utils import evaluate_conversion, evaluate_conversion_and_save_data
from multi_head_self_attention import multi_head_self_attention
def create_and_train_ann():
"""
Definition and training of artificial neural network with defined architecture in a keras functional API way.
:return: trained artificial neural network
"""
inputs = tf.keras.layers.Input(shape=(28, 28, 1))
patches = ExtractPatchesLayer()(inputs)
x = tf.keras.layers.Dense(d_model)(patches)
x = PositionalEncodingLayer(d_model, num_patches)(x)
out = x
for _ in range(num_multi_head_attention_modules):
out = multi_head_self_attention(out, num_heads, projection_dim, d_model)
x = tf.keras.layers.Reshape([-1, d_model])(x)
add = tf.keras.layers.Add()([out, x])
# feedforward mlp
out = tf.keras.layers.Dense(mlp_dim, activation="relu")(add)
out = tf.keras.layers.Dense(d_model)(out)
out = tf.keras.layers.Add()([out, add])
x = tf.keras.layers.Flatten()(out)
x = tf.keras.layers.Dense(mlp_dim, activation="relu")(x)
# --------------------------------------------------
x = tf.keras.layers.Dense(num_classes)(x)
x = tf.keras.layers.Softmax()(x)
ann = tf.keras.models.Model(inputs=inputs, outputs=x)
ann.compile(
optimizer="adam",
loss="categorical_crossentropy",
metrics=["accuracy"])
ann.fit(
x_train,
y_train,
validation_data=(x_test, y_test),
batch_size=batch_size,
epochs=epochs)
return ann
def create_and_train_snn(weights, y_test):
"""
Definition of spiking neural network. It copies ann network up to the dense layers with relu activation functions,
which are translated into rnn layers with SpikingReLU cells (neurons). This network is not trained, it's weights
are filled with normalized weights of artificial neural network.
:param weights: normalized weights from ann
:param y_test:
:return:
"""
inputs = tf.keras.layers.Input(shape=(28, 28, 1), batch_size=y_test.shape[0])
patches = ExtractPatchesLayer()(inputs)
x = tf.keras.layers.Dense(d_model)(patches)
x = PositionalEncodingLayer(d_model, num_patches)(x)
out = x
for _ in range(num_multi_head_attention_modules):
out = multi_head_self_attention(out, num_heads, projection_dim, d_model)
x = tf.keras.layers.Reshape([-1, d_model])(x)
add = tf.keras.layers.Add()([out, x])
# feedforward mlp
out = tf.keras.layers.Dense(mlp_dim)(add)
out = tf.keras.layers.Reshape([1, l * mlp_dim])(out)
out = tf.keras.layers.RNN(SpikingReLU(l * mlp_dim), return_sequences=True, return_state=False,
stateful=True)(out)
out = tf.keras.layers.Reshape([-1, mlp_dim])(out)
out = tf.keras.layers.Dense(d_model)(out)
out = tf.keras.layers.Add()([out, add])
x = tf.keras.layers.Flatten()(out)
x = ExpandLayer()(x)
x = tf.keras.layers.Dense(mlp_dim)(x)
x = tf.keras.layers.RNN(SpikingReLU(mlp_dim), return_sequences=True, return_state=False,
stateful=True)(x)
# --------------------------------------------------
x = tf.keras.layers.Dense(num_classes)(x)
x = tf.keras.layers.RNN(Accumulate(num_classes), return_sequences=True, return_state=False, stateful=True)(x)
x = tf.keras.layers.Softmax()(x)
x = SqueezeLayer()(x)
spiking = tf.keras.models.Model(inputs=inputs, outputs=x)
print("-" * 32 + "\n")
spiking.compile(
optimizer="adam",
loss="categorical_crossentropy",
metrics=["accuracy"])
print(spiking.summary())
spiking.set_weights(weights)
return spiking
if __name__ == "__main__":
tf.random.set_seed(1234)
batch_size = 64
epochs = 2
d_model = 64
mlp_dim = 128
l = 50
num_heads = 4
num_classes = 10
channels = 1
image_size = 28
patch_size = 4
num_patches = (image_size // patch_size) ** 2
patch_dim = channels * patch_size ** 2
projection_dim = d_model // num_heads
num_multi_head_attention_modules = 4
timesteps = 50
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Normalize input so we can train ANN with it.
# Will be converted back to integers for SNN layer.
x_train = x_train / 255
x_test = x_test / 255
# One-hot encode target vectors.
y_train = to_categorical(y_train, 10)
y_test = to_categorical(y_test, 10)
# Analog model
ann = create_and_train_ann()
print(ann.summary())
_, testacc = ann.evaluate(x_test, y_test, batch_size=batch_size, verbose=0)
model_normalized = robust_weight_normalization(ann, x_test, ppercentile=0.99)
weights = model_normalized.get_weights()
# Preprocessing for RNN
# Add a channel dimension.
axis = 1 if keras.backend.image_data_format() == 'channels_first' else -1
x_train_expanded = np.expand_dims(x_train, axis)
x_test_expanded = np.expand_dims(x_test, axis)
# Conversion to spiking model
snn = create_and_train_snn(weights, y_test)
print("Simulating network")
evaluate_conversion(snn, x_test_expanded, y_test, testacc, y_test.shape[0], timesteps)
|
import scrapy
from scrapy.spiders import Rule, CrawlSpider
from scrapy.selector import Selector
from scrapy.linkextractors import LinkExtractor
from rowpiece.items import RowpieceItem
import urllib.request
import struct
import zlib
from fontTools.ttLib import TTFont
import xml.dom.minidom as xmldom
import os
def getValue(node, attribute):
return node.attributes[attribute].value
def getTTGlyphList(xml_path):
dataXmlfilepath = os.path.abspath(xml_path)
dataDomObj = xmldom.parse(dataXmlfilepath)
dataElementObj = dataDomObj.documentElement
dataTTGlyphList = dataElementObj.getElementsByTagName('TTGlyph')
return dataTTGlyphList
def isEqual(ttglyph_a, ttglyph_b):
a_pt_list = ttglyph_a.getElementsByTagName('pt')
b_pt_list = ttglyph_b.getElementsByTagName('pt')
a_len = len(a_pt_list)
b_len = len(b_pt_list)
if a_len != b_len:
return False
for i in range(a_len):
if getValue(a_pt_list[i], 'x') != getValue(b_pt_list[i], 'x') or getValue(a_pt_list[i], 'y') != getValue(b_pt_list[i], 'y') or getValue(a_pt_list[i], 'on') != getValue(b_pt_list[i], 'on'):
return False
return True
def refresh(dict, ttGlyphList_a, ttGlyphList_data):
data_dict = {"uniE184":"4","uniE80B":"3","uniF22E":"8","uniE14C":"0",
"uniF5FB":"6","uniEE59":"5","uniEBD3":"1","uniED85":"7","uniECB8":"2","uniE96A":"9"}
data_keys = data_dict.keys()
for ttglyph_data in ttGlyphList_data:
if getValue(ttglyph_data,'name') in data_keys:
for ttglyph_a in ttGlyphList_a:
if isEqual(ttglyph_a, ttglyph_data):
dict[getValue(ttglyph_a,'name')] = data_dict[getValue(ttglyph_data,'name')]
break
return dict
def decode(decode_dict, code):
_lst_uincode = []
for item in code.__repr__().split("\\u"):
_lst_uincode.append("uni" + item[:4].upper())
if item[4:]:
_lst_uincode.append(item[4:])
_lst_uincode = _lst_uincode[1:-1]
result = "".join([str(decode_dict[i]) for i in _lst_uincode])
return result
class RowpieceSpider(CrawlSpider):
name = 'rowpiece'
# allowed_domains = ['http://maoyan.com/']
start_urls = ['http://maoyan.com/cinemas?areaId=-1&districtId=740&offset=0']
rules = (
Rule(LinkExtractor(allow=(r'http://maoyan.com/cinema/\d+')), callback='parse_item'),
Rule(LinkExtractor(allow=(r'http://maoyan.com/cinemas\?areaId=-1&districtId=740&offset=\d+')))
)
def parse_item(self, response):
# print(response.body)
sel = Selector(response)
# online_moive = sel.xpath('//div[@class="movie-list"]//img/@src').extract()
# address = sel.xpath('//div[@class="cinema-brief-container"]/div[1]/text()').extract_first()
# telephone = sel.xpath('//div[@class="cinema-brief-container"]/div[2]/text()').extract_first()
# img_url = sel.xpath('//div[@class="avatar-shadow"]/img/@src').extract_first()
# 电影院名字
cinema_name = sel.xpath('//div[@class="cinema-brief-container"]/h3/text()').extract_first()
# 电影名
movie_name = sel.xpath('//div[contains(@class, "show-list")]//h3/text()').extract()
# 放映日期
date = sel.xpath('//div[contains(@class, "show-list")]//span[contains(@class, "date-item")]/text()').extract()
# 时间
begin_time = sel.xpath('//div[contains(@class, "show-list")]//span[contains(@class, "begin-time")]/text()').extract()
end_time = sel.xpath('//div[contains(@class, "show-list")]//span[contains(@class, "end-time")]/text()').extract()
# 语言
language = sel.xpath('//div[contains(@class, "show-list")]//span[contains(@class, "lang")]/text()').extract()
# 放映厅
hall = sel.xpath('//div[contains(@class, "show-list")]//span[contains(@class, "hall")]/text()').extract()
# 价格
price = sel.xpath('//div[contains(@class, "show-list")]//span[contains(@class, "sell-price")]/span/text()').extract()
# 每个电影排片天数
date_count = []
movie_count = len(movie_name)
for i in range(movie_count):
date_count.append(len(sel.xpath('//div[@data-index = "' + str(i) + '"]//div[@class="show-date"]/span').extract())-1)
# 每个电影每天排片场数
show_count = []
for i in range(movie_count):
for j in range(date_count[i]):
show_count.append(sel.xpath('//div[@data-index = "' + str(i) + '"]//tbody').extract()[j].count("</tr>"))
#下载字体文件
font_url = sel.xpath('/html/head/style/text()').extract()[0]
font_url = 'http:'+font_url[font_url.rfind('url')+5:font_url.find('woff')+4]
print(font_url)
woff_path = 'tmp.woff'
f = urllib.request.urlopen(font_url)
data = f.read()
with open(woff_path, "wb") as code:
code.write(data)
#分析解码字典
font1 = TTFont('tmp.woff')
font1.saveXML('tmp.xml')
decode_dict = dict(enumerate(font1.getGlyphOrder()[2:]))
decode_dict=dict(zip(decode_dict.values(),decode_dict.keys()))
dataTTGlyphList = getTTGlyphList("data.xml")
tmpTTGlyphList = getTTGlyphList("tmp.xml")
decode_dict = refresh(decode_dict,tmpTTGlyphList,dataTTGlyphList)
decode_dict['.'] = '.'
# print(decode_dict)
#解码
for i in range(len(price)):
price[i] = decode(decode_dict, price[i])
item = RowpieceItem()
item['cinema_name'] = cinema_name
item['movie_name'] = movie_name
item['date'] = date
item['begin_time'] = begin_time
item['end_time'] = end_time
item['language'] = language
item['hall'] = hall
item['price'] = price
item['date_count'] = date_count
item['show_count'] = show_count
yield item |
# ============LICENSE_START=======================================================
# Copyright (c) 2019-2022 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
import json
import os
import sys
import unittest
from unittest.mock import patch
from pathlib import Path
import trapd_get_cbs_config
class test_trapd_get_cbs_config(unittest.TestCase):
"""
Test the trapd_get_cbs_config mod
"""
snmptrap_dir = "/tmp/opt/app/snmptrap"
json_dir = snmptrap_dir + "/etc"
# fmt: off
pytest_json_data = json.loads(
'{'
'"snmptrapd": { '
' "version": "1.4.0", '
' "title": "ONAP SNMP Trap Receiver" }, '
'"protocols": { '
' "transport": "udp", '
' "ipv4_interface": "0.0.0.0", '
' "ipv4_port": 6162, '
' "ipv6_interface": "::1", '
' "ipv6_port": 6162 }, '
'"cache": { '
' "dns_cache_ttl_seconds": 60 }, '
'"publisher": { '
' "http_timeout_milliseconds": 1500, '
' "http_retries": 3, '
' "http_milliseconds_between_retries": 750, '
' "http_primary_publisher": "true", '
' "http_peer_publisher": "unavailable", '
' "max_traps_between_publishes": 10, '
' "max_milliseconds_between_publishes": 10000 }, '
'"streams_publishes": { '
' "sec_fault_unsecure": { '
' "type": "message_router", '
' "aaf_password": null, '
' "dmaap_info": { '
' "location": "mtl5", '
' "client_id": null, '
' "client_role": null, '
' "topic_url": "http://localhost:3904/events/ONAP-COLLECTOR-SNMPTRAP" }, '
' "aaf_username": null } }, '
'"files": { '
' "runtime_base_dir": "/tmp/opt/app/snmptrap", '
' "log_dir": "logs", '
' "data_dir": "data", '
' "pid_dir": "tmp", '
' "arriving_traps_log": "snmptrapd_arriving_traps.log", '
' "snmptrapd_diag": "snmptrapd_prog_diag.log", '
' "traps_stats_log": "snmptrapd_stats.csv", '
' "perm_status_file": "snmptrapd_status.log", '
' "eelf_base_dir": "/tmp/opt/app/snmptrap/logs", '
' "eelf_error": "error.log", '
' "eelf_debug": "debug.log", '
' "eelf_audit": "audit.log", '
' "eelf_metrics": "metrics.log", '
' "roll_frequency": "day", '
' "minimum_severity_to_log": 2 }, '
'"trap_config": { '
' "sw_interval_in_seconds": 60, '
' "notify_oids": { '
' ".1.3.6.1.4.1.9.0.1": { '
' "sw_high_water_in_interval": 102, '
' "sw_low_water_in_interval": 7, '
' "category": "logonly" }, '
' ".1.3.6.1.4.1.9.0.2": { '
' "sw_high_water_in_interval": 101, '
' "sw_low_water_in_interval": 7, '
' "category": "logonly" }, '
' ".1.3.6.1.4.1.9.0.3": { '
' "sw_high_water_in_interval": 102, '
' "sw_low_water_in_interval": 7, '
' "category": "logonly" }, '
' ".1.3.6.1.4.1.9.0.4": { '
' "sw_high_water_in_interval": 10, '
' "sw_low_water_in_interval": 3, '
' "category": "logonly" } } }, '
'"snmpv3_config": { '
' "usm_users": [ { '
' "user": "usr-sha-aes256", '
' "engineId": "8000000001020304", '
' "usmHMACSHAAuth": "authkey1", '
' "usmAesCfb256": "privkey1" }, '
' { "user": "user1", '
' "engineId": "8000000000000001", '
' "usmHMACMD5Auth": "authkey1", '
' "usmDESPriv": "privkey1" }, '
' { "user": "user2", '
' "engineId": "8000000000000002", '
' "usmHMACSHAAuth": "authkey2", '
' "usmAesCfb128": "privkey2" }, '
' { "user": "user3", '
' "engineId": "8000000000000003", '
' "usmHMACSHAAuth": "authkey3", '
' "usmAesCfb256": "privkey3" } '
'] } }'
)
# fmt: on
@classmethod
def setUpClass(cls):
""" set up the required directory tree """
try:
Path(test_trapd_get_cbs_config.snmptrap_dir + "/logs").mkdir(parents=True, exist_ok=True)
Path(test_trapd_get_cbs_config.snmptrap_dir + "/tmp").mkdir(parents=True, exist_ok=True)
Path(test_trapd_get_cbs_config.snmptrap_dir + "/etc").mkdir(parents=True, exist_ok=True)
except Exception as e:
print("Error while running %s : %s" % (os.path.basename(__file__), str(e.strerror)))
sys.exit(1)
def write_config(self, filename, config):
"""
write a config file
"""
# create snmptrapd.json for pytest
with open(filename, "w") as outfile:
json.dump(config, outfile)
@patch.dict(os.environ, {"CBS_SIM_JSON": json_dir + "/snmptrapd.json"})
def test_cbs_fallback_env_present(self):
"""
Test that CBS fallback env variable exists and we can get config
from fallback env var
"""
assert os.getenv("CBS_SIM_JSON") == test_trapd_get_cbs_config.json_dir + "/snmptrapd.json"
self.write_config(test_trapd_get_cbs_config.json_dir + "/snmptrapd.json", test_trapd_get_cbs_config.pytest_json_data)
self.assertTrue(trapd_get_cbs_config.get_cbs_config())
@patch.dict(os.environ, {"CBS_SIM_JSON": json_dir + "/snmptrapd.json"})
def test_cbs_fallback_env_present_bad_numbers(self):
"""
Test as in test_cbs_fallback_env_present(), but with
various values reset to be non-numeric.
"""
assert os.getenv("CBS_SIM_JSON") == test_trapd_get_cbs_config.json_dir + "/snmptrapd.json"
with patch.dict(test_trapd_get_cbs_config.pytest_json_data):
test_trapd_get_cbs_config.pytest_json_data["publisher"]["http_milliseconds_between_retries"] = "notanumber"
test_trapd_get_cbs_config.pytest_json_data["files"]["minimum_severity_to_log"] = "notanumber"
test_trapd_get_cbs_config.pytest_json_data["publisher"]["http_retries"] = "notanumber"
self.write_config(test_trapd_get_cbs_config.json_dir + "/snmptrapd.json",
test_trapd_get_cbs_config.pytest_json_data)
self.assertTrue(trapd_get_cbs_config.get_cbs_config())
@patch.dict(os.environ, {"CBS_SIM_JSON": json_dir + "/nosuchfile.json"})
def test_cbs_override_env_invalid(self):
""" """
assert os.getenv("CBS_SIM_JSON") == test_trapd_get_cbs_config.json_dir + "/nosuchfile.json"
with self.assertRaises(SystemExit) as exc:
result = trapd_get_cbs_config.get_cbs_config()
self.assertEqual(str(exc.exception), "1")
@patch.dict(os.environ, {"CONSUL_HOST": "localhost"})
def test_cbs_env_present(self):
"""
Test that CONSUL_HOST env variable exists but fails to
respond
"""
self.assertEqual(os.getenv("CONSUL_HOST"), "localhost")
del os.environ["CBS_SIM_JSON"]
self.assertNotIn("CBS_SIM_JSON", os.environ)
with self.assertRaises(SystemExit) as exc:
trapd_get_cbs_config.get_cbs_config()
@patch.dict(os.environ, {})
def test_cbs_override_env_undefined(self):
""" """
del os.environ["CBS_SIM_JSON"]
self.assertNotIn("CBS_SIM_JSON", os.environ)
with self.assertRaises(SystemExit) as exc:
trapd_get_cbs_config.get_cbs_config()
if __name__ == "__main__": # pragma: no cover
unittest.main()
|
import pickle
import numpy as np
import pandas as pd
from sklearn import preprocessing, model_selection, ensemble
import matplotlib.pyplot as plt
from pandas.plotting import scatter_matrix
f0 = open('knn.pickle', 'rb')
f1 = open('svm.pickle', 'rb')
f2 = open('logisticsR.pickle', 'rb')
f3 = open('gnb.pickle', 'rb')
f4 = open('gbc.pickle', 'rb')
knn = pickle.load(f0)
svm = pickle.load(f1)
lr = pickle.load(f2)
gnb = pickle.load(f3)
gbc = pickle.load(f4)
df = pd.read_csv('C:\\Users\\Chaitanya\\Documents\\Gender\\dsp project\\voice.csv')
X = np.array(df[['meanfun','Q25','sd','IQR','sfm','meanfreq','mode']])
y = np.array(df['label'])
gender_encoder = preprocessing.LabelEncoder()
y = gender_encoder.fit_transform(y)
scaler = preprocessing.StandardScaler()
scaler.fit(X)
X = scaler.transform(X)
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size=0.2)
clf = ensemble.VotingClassifier(estimators=[('lr', lr), ('knn', knn), ('svm', svm), ('gnb', gnb), ('gbc', gbc)], voting='hard')
clf.fit(X, y)
print('the training accuracy is: ', end='')
print(clf.score(X_test, y_test)*100)
df1 = pd.read_csv('test.csv')
x = np.array(df1[['meanfun','Q25','sd','IQR','sfm','meanfreq','mode']])
y1 = np.array(df1['label'])
y1 = gender_encoder.fit_transform(y1)
x = scaler.transform(x)
res = clf.predict(x)
for i in range(len(y1)):
if res[i] == 0:
print("Female", end=', ')
else:
print('Male', end=', ')
print('the training accuracy is: ', end='')
print(clf.score(x, y1)*100)
|
# Copyright (c) 2011 Matthias Matousek <m@tou.io>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import socket, logging, sys, ircbotconf
from threading import Thread
class IrcBot(Thread):
def __init__(self, server, nick, user, room, port=6667):
"""Set up the IrcBot by setting necessary fields. """
Thread.__init__(self)
# set up the data
self.server = server
self.port = port
self.nick = nick
self.user = user
self.room = room
# we need a socket to connect to the irc server
self.sock = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
def run(self):
"""Starts the bot by connecting to the server and joining a room."""
try:
logging.info('trying to connect to %s:%d' % (self.server, self.port))
self.sock.connect( (self.server, self.port) )
connected = True
logging.info('connected to %s:%d' % (self.server, self.port))
except:
logging.fatal('unable to connect to %s:%d' % (self.server, self.port))
sys.exit(1)
logging.debug('trying to register nick "%s"' %self.nick)
self.send('NICK %s' % self.nick)
logging.debug('trying to register user "%s"' %self.user)
self.send('USER %s' %self.user)
logging.debug('trying to join room "%s"' %self.room)
self.send('JOIN %s' % self.room)
# infinite loop handling data
while True:
data = self.sock.recv(4096)
logging.debug('received %s' % data)
# stop if empty data is received
if data == '':
logging.info('disconnected')
break
self.handle(data)
def handle(self, data):
"""Handles received data"""
pass
def send(self, data):
"""Sends the given data to the IRC server."""
self.sock.send( data + '\r\n' )
logging.debug('sent %s' % data)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
bot = IrcBot(ircbotconf.server, ircbotconf.nick, ircbotconf.user, ircbotconf.room)
bot.start()
|
from ._title import Title
from plotly.graph_objs.heatmapgl.colorbar import title
from ._tickformatstop import Tickformatstop
from ._tickfont import Tickfont
|
from KalturaClient import *
from KalturaClient.Plugins.Core import KalturaSessionType, KalturaCategory, \
KalturaPrivacyType, KalturaNullableBoolean, KalturaAppearInListType, \
KalturaInheritanceType, KalturaCategoryUserPermissionLevel, \
KalturaCategoryFilter,KalturaContributionPolicyType, KalturaAppearInListType
from utilityTestFunc import *
# This is class for KALTURA API
# Before use call for self.common.apiClientSession.startCurrentApiClientSession() to start session of current (under test) partner
# Or call startApiClientSession(partnerId, dcUrl, secret) for specific partner
class ApiClientSession:
driver = None
clsCommon = None
client = None
def __init__(self, clsCommon, driver):
self.driver = driver
self.clsCommon = clsCommon
def startApiClientSession(self, partnerId, dcUrl, secret, userId=None, impersonateID=None):
self.partnerId = partnerId
self.dcUrl = dcUrl
self.secret = secret
self.userId= userId
self.impersonateID = impersonateID
if self.client == None:
self.client = self.openSession(None,None,"all:*,disableentitlement")
if self.client == False:
return False
return True
def startCurrentApiClientSession(self):
# Get Partner Details from partnerDetails.csv file: partnerId,serverUrl,adminSecret
serverUrl,adminSecret = getPartnerDetails(localSettings.LOCAL_SETTINGS_PARTNER)
if serverUrl == None or adminSecret == None:
writeToLog("INFO","FAILED to get partner details: service URL and Admin Secret")
return False
if self.startApiClientSession(localSettings.LOCAL_SETTINGS_PARTNER, serverUrl, adminSecret) == False:
writeToLog("INFO","FAILED to start KALTURA API client session")
return False
return True
def getKs(self,userType=2, privileges=None, userId=None):
config = KalturaConfiguration(self.partnerId)
if userId==None:
userId = self.userId
config.serviceUrl = self.dcUrl
#=======================================================================
# config.logger = self.logger
#=======================================================================
client = KalturaClient(config)
result = client.session.start(self.secret, userId, userType, self.partnerId, None, privileges)
if self.impersonateID != None:
client.setPartnerId(self.impersonateID)
dictKs = {1:client,2:result}
return dictKs
#Open a session
def openSession(self, userID=None, userType=None, privileges=None):
if userType!=None:
dictKs = self.getKs(userType,privileges,userID)
else:
dictKs = self.getKs(2, privileges, userID)
try:
dictKs[1].setKs(dictKs[2])
except Exception as exp:
return False
return dictKs[1]
def startSession(self,privileges='scenario_default:* privileges',userType=0):
userTypeDict = {0:KalturaSessionType.USER,
1: KalturaSessionType.ADMIN}
config = KalturaConfiguration(self.partnerId)
config.serviceUrl = self.dcUrl
client = KalturaClient(config)
userType = userTypeDict[userType]
userId = None
expiry = None
return client.session.start(self.secret, userId, userType, self.partnerId, expiry, privileges)
class CategoryApi:
def __init__(self, publisherID, serverURL , userSecret):
mySess = ApiClientSession(publisherID, serverURL, userSecret)
self.client = mySess.openSession(None,None,"all:*,disableentitlement")
################################################# CATEGORY METHODS ############################################################
# Flow example for create and delete category:
# startApiClientSession(self, partnerId, dcUrl, secret, userId=None, impersonateID=None):
# parentId = self.common.apiClientSession.getParentId('galleries')
# self.common.apiClientSession.createCategory(parentId, 'python_automation', 'testCategory', 'description', 'tags')
# self.common.apiClientSession.deleteCategory('testCategory')
###############################################################################################################################
def createCategoryApi(self, client, parentId, owner, name, description=None, tags=None, privacy=KalturaPrivacyType.ALL, addContentToCategory= KalturaContributionPolicyType.ALL, whoCanSeeTheCategory=KalturaAppearInListType.PARTNER_ONLY):
category = KalturaCategory()
category.parentId = parentId
category.name = name
category.description = description
category.tags = tags
category.owner = owner
category.privacy = privacy
category.moderation = KalturaNullableBoolean.FALSE_VALUE
category.appearInList = KalturaAppearInListType.PARTNER_ONLY
category.privacyContext = "public"
category.inheritanceType = KalturaInheritanceType.MANUAL
category.defaultPermissionLevel = KalturaCategoryUserPermissionLevel.MANAGER
category.defaultOrderBy = None
category.contributionPolicy = addContentToCategory
category.appearInList = whoCanSeeTheCategory
try:
result = client.category.add(category)
except Exception as exp:
if "DUPLICATE_CATEGORY" in str(exp):
print("DUPLICATE_CATEGORY")
return -1
return result.id
def getCategoryByName(self, catName):
filter = KalturaCategoryFilter()
filter.freeText = catName
pager = None
try:
result = self.client.category.list(filter, pager)
except Exception as exp:
print(exp)
result = -1
if len(result.objects) == 0:
writeToLog("INFO","No category was found named: " + catName)
return -1
else:
return result.objects[0].id
def deleteCategory(self, categoryName):
moveEntriesToParentCategory = KalturaNullableBoolean.TRUE_VALUE
categoryId = self.getCategoryByName(categoryName)
if categoryId != -1:
try:
self.client.category.delete(categoryId, moveEntriesToParentCategory)
except Exception as exp:
writeToLog("INFO","FAILED to delete category")
return False
writeToLog("INFO","Category deleted: " + str(categoryName) + "; ID: " + str(categoryId))
return True
else:
return False
def createCategory(self, parentId, owner, name, description=None, tags=None, privacy=KalturaPrivacyType.ALL, addContentToCategory= KalturaContributionPolicyType.ALL, whoCanSeeTheCategory=KalturaAppearInListType.PARTNER_ONLY):
categoryId = self.createCategoryApi(self.client, parentId, owner, name, description, tags, privacy, addContentToCategory, whoCanSeeTheCategory)
if categoryId == -1:
writeToLog("INFO","FAILED to create category")
return False
else:
writeToLog("INFO","Category created: " + str(name) + "; ID: " + str(categoryId))
return True
# If parent id not a number, then we got a parent category name, which we need to translate to category ID
def getParentId(self, parentId):
if parentId.isdigit():
return parentId
else:
return self.getCategoryByName(parentId) |
from fastapi.testclient import TestClient
from ..main import app
client = TestClient(app)
def test_login():
login_payload = {
'username': 'user1@gmail.com',
'password': 'user1'
}
response = client.post('/login', data=login_payload)
data = response.json()
assert response.status_code == 200
assert 'access_token' in data.keys()
assert 'token_type' in data.keys()
|
# Generated by Django 2.1.3 on 2019-02-20 18:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0008_auto_20190220_1834'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='title',
new_name='name',
),
]
|
import string
import json
import xlsxwriter
def save_json(data, filename: string):
file = open(f"output/{filename}", 'w')
file.write(json.dumps(data, indent=4, sort_keys=True))
def generate_xlsx(data, filename: string):
try:
workbook = xlsxwriter.Workbook(f"output/{filename}")
for industry in data:
for industry_name in industry:
worksheet = workbook.add_worksheet(industry_name)
add_header(worksheet)
add_tickers_data(worksheet, industry)
workbook.close()
except Exception as e:
error = e
def add_tickers_data(sheet, industries):
line_number = 2
for industry_type in industries:
for tickers in industries[industry_type]:
for ticker in tickers:
ticker_data = tickers[ticker]
sheet.write(f'A{line_number}', ticker)
sheet.write(f'B{line_number}', ticker_data['annualReportExpenseRatio'])
sheet.write(f'C{line_number}', ticker_data['beta'])
sheet.write(f'D{line_number}', ticker_data['beta3Year'])
sheet.write(f'E{line_number}', ticker_data['dividendRate'])
sheet.write(f'F{line_number}', ticker_data['dividendYield'])
sheet.write(f'G{line_number}', ticker_data['enterpriseToEbitda'])
sheet.write(f'H{line_number}', ticker_data['fiftyDayAverage'])
sheet.write(f'I{line_number}', ticker_data['forwardPE'])
sheet.write(f'J{line_number}', ticker_data['lastCapGain'])
sheet.write(f'K{line_number}', ticker_data['lastFiscalYearEnd'])
sheet.write(f'L{line_number}', ticker_data['netIncomeToCommon'])
sheet.write(f'M{line_number}', ticker_data['payoutRatio'])
sheet.write(f'N{line_number}', ticker_data['pegRatio'])
sheet.write(f'O{line_number}', ticker_data['profitMargins'])
sheet.write(f'P{line_number}', ticker_data['revenueQuarterlyGrowth'])
sheet.write(f'Q{line_number}', ticker_data['threeYearAverageReturn'])
sheet.write(f'R{line_number}', ticker_data['trailingAnnualDividendRate'])
sheet.write(f'S{line_number}', ticker_data['trailingEps'])
sheet.write(f'T{line_number}', ticker_data['trailingPE'])
sheet.write(f'U{line_number}', ticker_data['twoHundredDayAverage'])
line_number += 1
def add_header(sheet):
sheet.write('A1', 'Ticker')
sheet.write('B1', 'annualReportExpenseRatio')
sheet.write('C1', 'beta')
sheet.write('D1', 'beta3Year')
sheet.write('E1', 'dividendRate')
sheet.write('F1', 'dividendYield')
sheet.write('G1', 'enterpriseToEbitda')
sheet.write('H1', 'fiftyDayAverage')
sheet.write('I1', 'forwardPE')
sheet.write('J1', 'lastCapGain')
sheet.write('K1', 'lastFiscalYearEnd')
sheet.write('L1', 'netIncomeToCommon')
sheet.write('M1', 'payoutRatio')
sheet.write('N1', 'pegRatio')
sheet.write('O1', 'profitMargins')
sheet.write('P1', 'revenueQuarterlyGrowth')
sheet.write('Q1', 'threeYearAverageReturn')
sheet.write('R1', 'trailingAnnualDividendRate')
sheet.write('S1', 'trailingEps')
sheet.write('T1', 'trailingPE')
sheet.write('U1', 'twoHundredDayAverage')
|
from django.contrib import admin
from .models import Visitor, Measurement
# Register your models here.
@admin.register(Visitor)
class VisitorAdmin(admin.ModelAdmin):
list_display = ('ip', 'latitud', 'longitud')
@admin.register(Measurement)
class MeasurementAdmin(admin.ModelAdmin):
pass
|
"""
Author: Juan M. Montoya
Class structure based on PacmanDQN_Agents.py
The Pacman AI projects were developed at UC Berkeley found at
http://ai.berkeley.edu/project_overview.html
This new version integrates the memory replay into the data flow.
Thus, not saving it into the disk.
In addition, added Rank-based Prioritized Experience Replay and shift option.
The shift option permits to turn the wide component off and on again.
"""
from util import *
# Pacman Game
from game import Agent
from pacman import GameState
from stable_baselines.common.schedules import LinearSchedule
from stable_baselines.deepq.replay_buffer import ReplayBuffer, PrioritizedReplayBuffer
from rankBasedReplay import RankBasedReplay
import pickle
# Neural nets
import tensorflow as tf
from WDQN import WDQN
import os
class PacmanPWDQN(Agent):
"""
Creates the Wide Deep Q-Network Agent that iterates with the environment
In addition, this agent can be set up to purely Linear or DQN Agent
"""
def __init__(self, args):
# Load parameters from user-given arguments
self.params = json_to_dict(args["path"])
os.environ["CUDA_VISIBLE_DEVICES"] = str(self.params["GPU"])
self.params['width'] = args['width']
self.params['height'] = args['height']
self.params['num_training'] = args['numTraining']
self.params['num_games'] = args['numGames']
self.path_extra = ""
self.params["seed"] = args['seed']
self.random = np.random.RandomState(self.params["seed"])
self.beta_schedule = None
# time started
self.general_record_time = time.strftime("%a_%d_%b_%Y_%H_%M_%S", time.localtime())
self.start_time = time.time()
self.rank_sort = None
if self.params["prioritized"]: # For using PrioritizedReplayBuffer
if self.params["ranked"]:
N_list = [self.params["batch_size"]] + [int(x) for x in np.linspace(100, self.params["mem_size"], 5)]
save_quantiles(N_list=N_list, k=self.params["batch_size"],
alpha=self.params["prioritized_replay_alpha"], name=self.params["save_file"])
self.replay_buffer = RankBasedReplay(self.params["mem_size"],
self.params["prioritized_replay_alpha"],
name=self.params["save_file"])
if self.params["sort_rank"] == None: # For sorting rankbased buffer
self.rank_sort = int(self.params["mem_size"] * 0.01)
else:
self.rank_sort = self.params["sort_rank"]
else:
self.replay_buffer = PrioritizedReplayBuffer(self.params["mem_size"],
self.params["prioritized_replay_alpha"])
if self.params["prioritized_replay_beta_iters"] is None:
prioritized_replay_beta_iters = self.params['num_training']
else:
prioritized_replay_beta_iters = self.params['prioritized_replay_beta_iters']
self.beta_schedule = LinearSchedule(prioritized_replay_beta_iters,
initial_p=self.params['prioritized_replay_beta0'],
final_p=1.0)
else:
self.replay_buffer = ReplayBuffer(self.params["mem_size"])
self.beta_schedule = None
if self.params["only_dqn"]:
print("Initialise DQN Agent")
elif self.params["only_lin"]:
print("Initialise Linear Approximative Agent")
else:
print("Initialise WDQN Agent")
print(self.params["save_file"])
if self.params["prioritized"]:
if self.params["ranked"]:
print("Using Rank-Based Experience Replay Buffer")
else:
print("Using Prioritized Experience Replay Buffer")
if self.params["model_shift"]:
print("Using Model Shift")
print("seed", self.params["seed"])
print("Starting time:", self.general_record_time)
# Start Tensorflow session
tf.reset_default_graph()
tf.set_random_seed(self.params["seed"])
self.qnet = WDQN(self.params, "model") # Q-network
self.tnet = WDQN(self.params, "target_model") # Q-target-network
self.saver = tf.train.Saver()
self.sess = tf.Session()
self.qnet.set_session(self.sess)
self.tnet.set_session(self.sess)
self.sess.run(tf.global_variables_initializer())
# Q and cost
self.Q_global = []
# Stats
self.cnt = self.qnet.sess.run(self.qnet.global_step_dqn)
self.local_cnt = 0
self.wins = 0
self.best_int = self.params["shift_best"]
self.numeps = 0
self.model_eps = 0
self.episodeStartTime = time.time()
self.last_steps = 0
self.get_direction = lambda k: ['North', 'South', 'East', 'West', 'Stop'][k]
self.get_value = {'North': 0, 'South': 1, 'East': 2, 'West': 3, 'Stop': 4}
self.lastWindowAccumRewards = 0.0
self.Q_accumulative = 0.0
self.accumTrainRewards = 0.0
self.sub_dir = str(self.params["save_interval"])
def registerInitialState(self, state):
"""Inspects the starting state"""
# Reset reward
self.last_score = 0
self.last_reward = 0.
# Reset state
self.last_state = None
self.current_state = state
# Reset actions
self.last_action = None
# Reset vars
self.terminal = None
self.won = True
self.Q_global = []
# Shift Model between WDQN and DQN during training
if self.params["model_shift"] and (self.numeps + 1) <= self.params['num_training']:
if (self.numeps +1) >= self.params["start_shift"] and (self.numeps +1) % self.params["val_shift"] == 0:
if self.params["only_dqn"]:
self.params["only_dqn"] = False
print("Using WDQN Agent starting from eps", (self.numeps + 1))
else:
self.params["only_dqn"] = True
print("Using DQN Agent starting from eps", (self.numeps + 1))
if self.params["model_shift"] and (self.numeps + 1) == self.params['num_training'] and not self.params["only_dqn"]: # Back to WDQN at the end
self.params["only_dqn"] = False
print("Back to WDQN Agent for testing")
# Load model
self.load_mod()
# Next
self.numeps += 1
def getQvalues(self, model, dropout):
"""Access Q Values by using the model prediction of WDQN.py"""
if self.params["only_dqn"]:
return model.predict_dqn(map_state_mat(self.current_state), dropout)[0]
elif self.params["only_lin"]:
return model.predict_lin(mat_features(self.current_state, ftrs=self.params["feat_val"]), dropout)[0]
else:
return model.predict_wdqn(map_state_mat(self.current_state),
mat_features(self.current_state, ftrs=self.params["feat_val"]), dropout)[0]
def getPolicy(self, model, dropout=1.0):
"""Pick up the policy """
qValues = self.getQvalues(model, dropout)
qVal = {self.get_value[l]: qValues[self.get_value[l]] for l in self.current_state.getLegalActions(0) if
not l == "Stop"}
maxValue = max(qVal.values())
self.Q_global.append(maxValue)
return self.get_direction(self.random.choice([k for k in qVal.keys() if qVal[k] == maxValue]))
def getAction(self, state):
"""Exploit / Explore"""
if self.random.rand() > self.params['eps']:
# Exploit action
move = self.getPolicy(self.qnet) # dropout deactivated
else:
legal = [v for v in state.getLegalActions(0) if not v == "Stop"]
move = self.random.choice(legal)
# Save last_action
self.last_action = self.get_value[move]
return move
def observationFunction(self, state):
"""Do observation"""
self.terminal = False
self.observation_step(state)
return state
def observation_step(self, state):
"""
Realize the observation step
Rewards are balanced in this part
The training occurs in this section
"""
if self.last_action is not None:
# Process current experience state
self.last_state = self.current_state.deepCopy()
self.current_state = state
# Process current experience reward
reward = state.getScore() - self.last_score
self.last_score = state.getScore()
# Reward system
if reward > 20:
self.last_reward = 50 # 0.1 # Eat ghost
elif reward > 0:
self.last_reward = 10 # 0.02 # Eat food
elif reward < -10:
self.last_reward = -500. # -1 # Get eaten
self.won = False
elif reward < 0:
self.last_reward = -1 # -0.002 # Punish time
if (self.terminal and self.won):
self.last_reward = 100 # 0.2 # Won
if self.isInTraining():
# Copy values to target network
if self.local_cnt % self.params["target_update_network"] == 0 \
and self.local_cnt > self.params['train_start']:
self.tnet.rep_network(self.qnet)
print("Copied model parameters to target network. total_t = %s, period = %s" % (
self.local_cnt, self.params["target_update_network"]))
# Store last experience into memory
if self.params["prioritized"] and self.params["ranked"]:
self.replay_buffer.add((self.last_state, self.last_action, float(self.last_reward),
self.current_state,
self.terminal))
else:
self.replay_buffer.add(self.last_state, self.last_action, float(self.last_reward),
self.current_state,
self.terminal)
# Train
self.train()
# Next
self.local_cnt += 1
if self.local_cnt == self.params['train_start']:
print("")
print("Memory Replay populated")
print("")
self.model_eps = self.numeps
# with open('data/lin_rb.pickle', 'wb') as handle:
# pickle.dump(self.replay_buffer, handle)
# print("Pickle Saved")
# print(10 + "n")
self.params['eps'] = max(self.params['eps_final'],
1.00 - float(self.cnt) / float(self.params['eps_step']))
def train(self):
"""Train different agents: WDQN, DQN and Linear"""
if self.local_cnt > self.params['train_start']:
if self.params["only_dqn"]:
batch_s_dqn, batch_a, batch_t, qt_dqn, batch_r, batch_idxes, weights = extract_batches_per(self.params,
self.tnet,
self.replay_buffer,
self.beta_schedule,
(self.numeps-self.model_eps))
self.cnt, td_errors = self.qnet.train(batch_s_dqn, None, batch_a, batch_t, qt_dqn, None, None, batch_r,
self.params["dropout"],
self.params["only_dqn"],
self.params["only_lin"], weights)
elif self.params["only_lin"]:
batch_s_lin, batch_a, batch_t, qt_lin, batch_r, batch_idxes, weights = extract_batches_per(self.params,
self.tnet,
self.replay_buffer,
self.beta_schedule,
(self.numeps-self.model_eps))
self.cnt, td_errors = self.qnet.train(None, batch_s_lin, batch_a, batch_t, None, qt_lin, None, batch_r,
self.params["dropout"],
self.params["only_dqn"],
self.params["only_lin"], weights)
else:
batch_s_dqn, batch_s_lin, batch_a, batch_t, qt_lin, qt_dqn, qt_wdqn, batch_r, batch_idxes, weights = extract_batches_per(
self.params, self.tnet,
self.replay_buffer, self.beta_schedule, (self.numeps-self.model_eps))
self.cnt, td_errors = self.qnet.train(batch_s_dqn, batch_s_lin, batch_a, batch_t, qt_dqn, qt_lin,
qt_wdqn,
batch_r,
self.params["dropout"],
self.params["only_dqn"],
self.params["only_lin"], weights)
if self.params["prioritized"]:
new_priorities = np.abs(td_errors) + self.params["prioritized_replay_eps"]
self.replay_buffer.update_priorities(batch_idxes, new_priorities)
if self.params["ranked"] and self.cnt % self.rank_sort == 0:
self.replay_buffer.sort()
def final(self, state):
"""Inspects the last state"""
# Do observation
self.terminal = True
self.observation_step(state)
NUM_EPS_UPDATE = 100
self.lastWindowAccumRewards += state.getScore() #
self.accumTrainRewards += state.getScore()
self.Q_accumulative += max(self.Q_global, default=float('nan'))
self.wins += self.won
if self.numeps % NUM_EPS_UPDATE == 0:
# Print stats
eps_time = time.time() - self.episodeStartTime
print('Reinforcement Learning Status:')
if self.numeps <= self.params['num_training']:
trainAvg = self.accumTrainRewards / float(self.numeps)
print('\tCompleted %d out of %d training episodes' % (
self.numeps, self.params['num_training']))
print('\tAverage Rewards over all training: %.2f' % (
trainAvg))
windowAvg = self.lastWindowAccumRewards / float(NUM_EPS_UPDATE)
windowQavg = self.Q_accumulative / float(NUM_EPS_UPDATE)
window_steps = (self.cnt - self.last_steps) / float(NUM_EPS_UPDATE)
print('\tAverage Rewards for last %d episodes: %.2f' % (
NUM_EPS_UPDATE, windowAvg))
print('\tEpisode took %.2f seconds' % (eps_time))
print('\tEpisilon is %.8f' % self.params["eps"])
print('\tLinear Decay learning Rate is %.8f' % self.sess.run(self.qnet.lr_lin))
if self.params["save_logs"]:
log_file = open('logs/' + self.params["save_file"] + "-" + str(self.general_record_time) + '-l-' + str(
(self.params["num_training"])) + '.log',
'a')
log_file.write("# %4d | s: %8d | t: %.2f | r: %12f | Q: %10f | won: %r \n" %
(self.numeps, window_steps, eps_time, windowAvg,
windowQavg, self.wins))
# Save Best Model
if windowAvg >= self.params["best_thr"]:
self.params["best_thr"] = windowAvg
self.last_steps = self.cnt
self.save_mod(best_mod=True)
print("Saving the model with:", self.params["best_thr"])
self.params["best_thr"] = self.params["best_thr"] + self.best_int
sys.stdout.flush()
self.lastWindowAccumRewards = 0
self.Q_accumulative = 0
self.last_steps = self.cnt
self.wins = 0
self.episodeStartTime = time.time()
if self.numeps >= self.params['num_training']:
eps_time = time.time() - self.episodeStartTime
if self.numeps == self.params['num_training']:
print("Starting Date of Training:", self.general_record_time)
print("Ending Date of Training:", time.strftime("%a_%d_%b_%Y_%H_%M_%S", time.localtime()))
print("Training time duration in minutes:", (time.time() - self.start_time) / 60)
print('Training Done (turning off epsilon)')
self.params["eps"] = 0.0 # no exploration
log_file = open(
'logs/testedModels/' + self.params["save_file"] + '-s-' + str(self.params["seed"]) + '-n-' + str(
self.params['num_games'] - self.params["num_training"]) + '.log',
'a')
log_file.write("# %4d | s: %8d | t: %.2f | r: %12f | Q: %10f | won: %r \n" %
(self.numeps, self.cnt - self.last_steps, eps_time, state.getScore(),
max(self.Q_global, default=float('nan'))
, int(self.won)))
self.last_steps = self.cnt
# save model
self.save_mod(best_mod=False)
def isInTraining(self):
"""Check is if agent is in training"""
return self.numeps < self.params["num_training"]
def isInTesting(self):
"""Check is if agent is in testing"""
return not self.isInTraining()
def load_mod(self):
""" Load data and model"""
if self.params["load"]:
try:
print("".join([self.path_extra, "model/", self.params["save_file"], "-", self.params["load_file"]]))
self.saver.restore(self.sess,
"".join([self.path_extra, "model/", self.params["save_file"], "-",
self.params["load_file"]]))
if not self.params["load_file"].lower() == "best":
print("Model Restored")
else:
print("Best Model Restored")
try:
load_path = "".join(
[self.path_extra, "parameters/", "params_", self.params["save_file"], "-",
self.params["load_file"].lower(), ".npy"])
# Parameters to be preserved for params when charging
save, save_interval, num_tr, load_data, best_thr, eps_final, dropout, decay_lr, decay_lr_val, only_dqn, only_lin, load_file, num_games, seed, save_logs = \
self.params["save"], self.params["save_interval"], \
self.params["num_training"], self.params["load_data"], \
self.params["best_thr"], self.params["eps_final"], self.params["dropout"], self.params[
"dcy_lrl"], \
self.params["dcy_lrl_val"], self.params["only_dqn"], self.params["only_lin"], self.params[
"load_file"], self.params["num_games"], self.params["seed"], self.params["save_logs"]
# Load saved parameters and hyperparameters of the new starting point
self.last_steps, self.accumTrainRewards, self.numeps, self.params, self.local_cnt, self.cnt, self.sub_dir = np.load(
load_path) #
orig_num_training = self.params["num_training"]
# Load newest Parameters to params
orig_save_int = self.params["save_interval"]
self.params["save"], self.params["save_interval"], self.params["num_training"], \
self.params["load_data"], self.params["best_thr"], self.params["eps_final"], self.params["dropout"], \
self.params["dcy_lrl"], self.params["dcy_lrl_val"], self.params["only_dqn"], self.params[
"only_lin"], self.params["load_file"], \
self.params["num_games"], self.params["seed"], self.params["save_logs"] = save, save_interval, \
num_tr, load_data, best_thr, eps_final, \
dropout, decay_lr, decay_lr_val, only_dqn, \
only_lin, load_file, num_games, seed, save_logs
if self.sub_dir == "best":
print("Best Parameters Restored")
else:
print("Parameters Restored")
if self.params["load_data"]: # Load data and starts with correct data
self.params["num_training"] += orig_num_training
if not self.params["load_file"].lower() == "best":
src = "".join(
[self.path_extra, "data/mem_rep_", self.params["save_file"], "/",
self.params["save_file"], "-", str(self.sub_dir), ".pickle"])
print("Interval Data to be Restored")
else:
src = "".join(
[self.path_extra, "data/mem_rep_", self.params["save_file"], "/",
self.params["save_file"], "-", self.sub_dir, ".pickle"])
print("Best Data to be Restored")
with open(src, 'rb') as handle:
self.replay_buffer = pickle.load(handle)
print("Data Restored")
except Exception as e:
print(e)
print("Parameters don't exist or could not be properly loaded")
except:
print("Model don't exist or could not be properly loaded")
self.params["load"] = False
def save_mod(self, best_mod=False):
"""
Saving model and parameters
Possibility of saving the best model
"""
if (self.numeps % self.params["save_interval"] == 0 and self.params["save"]) or (
best_mod and self.params["save"]):
self.params["global_step"] = self.cnt
save_files = [self.last_steps, self.accumTrainRewards, self.numeps, self.params, self.local_cnt, self.cnt]
try:
if best_mod:
self.saver.save(self.sess,
"".join([self.path_extra, "model/", self.params["save_file"], "-", "best"]))
print("Best Model Saved")
elif not self.sub_dir == "best":
self.saver.save(self.sess, "".join(
[self.path_extra, "model/", self.params["save_file"], "-", str(self.numeps)]))
print("Model Saved")
except Exception as e:
print("Model could not be saved")
print("Error", e)
try:
if str(self.numeps) == self.sub_dir: # Save memory replay and parameters
dic = "".join(
[self.path_extra, "data/mem_rep_", self.params["save_file"], "/"])
f_name = "".join([self.params["save_file"], "-", str(self.sub_dir), ".pickle"])
# 1
save_files.append(self.sub_dir)
np.save("".join(
[self.path_extra, "parameters/", "params_", self.params["save_file"], "-", str(self.numeps)]),
save_files)
print("Pameters Saved")
save_rep_buf(self.replay_buffer, dic, f_name)
self.sub_dir = str(self.numeps + self.params["save_interval"])
print("Memory Replay Saved")
elif best_mod: # Save memory replay of best model in directory "best" and parameters
dic = "".join(
[self.path_extra, "data/mem_rep_", self.params["save_file"], "/"])
f_name = "".join([self.params["save_file"], "-", "best", ".pickle"])
save_files.append("best")
np.save("".join([self.path_extra, "parameters/", "params_", self.params["save_file"], "-", "best"]),
save_files)
print("Best Pameters Saved")
save_rep_buf(self.replay_buffer, dic, f_name)
print("Best Memory Replay Saved")
except Exception as e:
print("Parameters could not be saved")
print("Error", e)
|
class Solution:
def Sum(self, nums, target):
for i in range(len(nums)):
for j in range(i+1, len(nums)):
if nums[i] + nums[j] == target:
return [i, j]
break
else:
continue
s1 = Solution()
list1 = [1, 2, 3, 4]
target1 = 4
sum = s1.Sum(list1, target1)
print(sum)
|
from flask import Flask, render_template, send_file, request, jsonify
from pymongo import MongoClient
client = MongoClient('localhost', 27017)
db = client.myproject
app = Flask('__name__')
# HTML을 불러온다.
@app.route('/')
def Home():
return render_template('index.html')
@app.route('/loginpage')
def Loginpage():
return render_template('Login.html')
@app.route('/signup')
def Loginpage():
return render_template('Login.html')
# join_us(POST) API
@app.route('/signup', methods=['POST'])
def save_info():
name_receive = request.form['name_give']
email_receive = request.form['email_give']
pass_receive = request.form['pass_give']
# if "" not in name_receive:
# return jsonify({'msg': '이름을 입력해주세요.})
if "@" not in email_receive:
return jsonify({'msg': '이메일을 입력해주세요.'})
elif not (email_receive and pass_receive):
return jsonify({'msg': '모두 입력해주세요'})
elif '.' not in email_receive:
return jsonify({'msg': '이메일을 완성해주세요'})
# 위의 과정을 전부 정상으로 받아들여졌을때 서버에 저장
else:
doc = {
'name': name_receive,
'email': email_receive,
'password': pass_receive
}
db.project01.insert_one(doc)
return jsonify({'msg': '회원가입이 완료 되었습니다.'})
# login API
@app.route('/login', methods=['POST'])
def login():
user_email_receive = request.form['user_email_give']
user_pass_receive = request.form['user_pass_give']
user_data = list(db.project01.find({}, {'_id': False}))
if "@" and "." not in user_email_receive:
return jsonify({"msg": "이메일을 확인해주세요"})
elif not user_email_receive and user_pass_receive:
return jsonify({'msg': '모두 입력해주세요'})
else:
for user in user_data:
if user_email_receive == user['email'] and user_pass_receive == user['password']:
return jsonify({'msg': '환영합니다.'})
elif user_email_receive != user['email'] and user_pass_receive != user['password']:
return jsonify({'msg': '입력하신 정보를 확인해주세요.'})
elif user_email_receive == user['email'] and user_pass_receive != user['password']:
return jsonify({'msg': '입력하신 정보를 확인해주세요.'})
# elif user_email_receive != user['email'] and user_pass_receive == user['password']:
# return jsonify({'msg':'입력하신 정보를 확인해주세요.'})
if '__name__' == '__main__':
app.run()
app.run(host='0.0.0.0', port=5000)
|
pi = 3.14159
radius = float(input("Digite o tamanho do raio do círculo: "))
print(f"A área do circulo é {pi * (radius ** 2)}")
|
# from tkinter import *
import Tkinter as tk
from winsound import *
root = tk.Tk() # create tkinter window
play = lambda: PlaySound('Sound.wav', SND_FILENAME)
button = tk.Button(root, text = 'Play', command = play)
button.pack()
root.mainloop() |
f = open("test.txt", "r")
ins = [d.strip("\n") for d in f.readlines()]
f.close()
dirs = {"N": 0, "E": 90, "S": 180, "W": 270}
facing = 90
pos = [0, 0] #N, E
for i in ins:
if i[0] == "R":
facing += int(i[1:])
facing = facing % 360
elif i[0] == "L":
facing -= int(i[1:])
facing = facing % 360
elif i[0] == "F":
if facing % 180 == 0:
pos[0] += ((90 - facing) / 90) * int(i[1:])
else:
pos[1] += ((180 - facing) / 90) * int(i[1:])
elif i[0] == "N":
pos[0] += int(i[1:])
elif i[0] == "E":
pos[1] += int(i[1:])
elif i[0] == "S":
pos[0] -= int(i[1:])
elif i[0] == "W":
pos[1] -= int(i[1:])
print(pos)
print(int(abs(pos[0])) + int(abs(pos[1])))
pos = [0, 0]
waypoint = [1, 10]
for i in ins:
if i[0] == "R":
amt = (int(i[1:]) % 360) / 90
for a in range(int(amt)):
tmp = waypoint[0]
waypoint[0] = -1 * waypoint[1]
waypoint[1] = tmp
elif i[0] == "L":
amt = (int(i[1:]) % 360) / 90
for a in range(int(amt)):
tmp = waypoint[1]
waypoint[1] = -1 * waypoint[0]
waypoint[0] = tmp
elif i[0] == "F":
pos[0] += waypoint[0] * int(i[1:])
pos[1] += waypoint[1] * int(i[1:])
elif i[0] == "N":
waypoint[0] += int(i[1:])
elif i[0] == "E":
waypoint[1] += int(i[1:])
elif i[0] == "S":
waypoint[0] -= int(i[1:])
elif i[0] == "W":
waypoint[1] -= int(i[1:])
print(pos)
print(int(abs(pos[0])) + int(abs(pos[1])))
|
#coding:utf-8
from subject import settings
import os
import xlrd
from exercise.dao import exerciseDao
def fileCon(req):
f_path = settings.MEDIA_ROOT + req['filename']
with open(f_path,'wb+') as info:
for chunk in req['file'].chunks():
info.write(chunk)
data=''
tips = ''
try:
data = xlrd.open_workbook(f_path)
except Exception,e:
tips = str(e)
table = data.sheets()[0]
nrows = table.nrows #行数
rs = []
for i in range(1,nrows):
cell_A1 = table.cell(i,0).value
cell_A2 = table.cell(i,1).value
cell_A3 = table.cell(i,2).value
if cell_A1 and cell_A2 and cell_A3:
rs.append({'title':cell_A1,
'answer':cell_A2,
'tips':cell_A3})
else:
tips = "execl格式不正确"
dao = exerciseDao({'userid':req['userid']})
dao.insert_titles(rs)
os.remove(f_path)
tips = "添加成功"
return tips |
"""
Function: updateChEMBL
--------------------
Download and install the latest version of ChEMBL.
momo.sander@googlemail.com
"""
def updateChEMBL(release, user, pword, host, port):
import os
import sys
# On Mac...
os.system("ftp ftp://ftp.ebi.ac.uk/pub/databases/chembl/ChEMBLdb/releases/chembl_%s/chembl_%s_mysql.tar.gz" %(release, release))
# On Linux...
os.system("wget ftp://ftp.ebi.ac.uk/pub/databases/chembl/ChEMBLdb/releases/chembl_%s/chembl_%s_mysql.tar.gz" %(release, release))
os.system("tar -zxvf chembl_%s_mysql.tar.gz" % release)
os.system("mysqladmin5 -u%s -p%s -h%s -P%s create chembl_%s" %(user, pword, host, port, release))
os.system("mysql5 -u%s -p%s -h%s -P%s chembl_%s < chembl_%s_mysql/chembl_%s.mysqldump.sql" % ( user, pword, host, port, release, release, release))
if __name__ == '__main__':
import sys
import os
release = str(sys.argv[1])
user = str(sys.argv[2])
pword = str(sys.argv[3])
host = str(sys.argv[4])
port = str(sys.argv[5])
updateChEMBL(release, user, pword, host, port)
|
import tkinter
class Screen_Battle (tkinter.Frame):
def __init__ (self, master, player1, player2, call_on_next):
super(Screen_Battle, self).__init__(master)
# Save references to the two player objects
self.player1 = player1
self.player2 = player2
# Store the maximum number of hit points which are needed on the screen display.
self.player1_max_hp = player1.hit_points
self.player2_max_hp = player2.hit_points
# Save the method reference to which we return control after this page Exits.
self.call_on_selected = call_on_next
self.create_widgets()
self.grid()
def create_widgets (self):
tkinter.Label(self, text="You").grid(row=3, column=0)
tkinter.Label(self, text="Computer").grid(row=3, column=1)
self.attack_button = tkinter.Button(self, text="Attack!", command=self.attack_clicked)
self.attack_button.grid(row=0, column=0)
imageLarge = tkinter.PhotoImage(file="images/" + self.player1.large_image)
w = tkinter.Label (self, image=imageLarge)
w.photo = imageLarge
w.grid(row=4, column=0)
imageLarge = tkinter.PhotoImage(file="images/" + self.player2.large_image)
w = tkinter.Label (self, image=imageLarge)
w.photo = imageLarge
w.grid(row=4, column=1)
self.youResult = tkinter.Label(self, text="")
self.youResult.grid(row=0, column=1)
self.cpuResult = tkinter.Label(self, text="")
self.cpuResult.grid(row=1, column=1)
tkinter.Label(self, text=str(self.player1.hit_points) + "/" + str(self.player1_max_hp) + "HP").grid(row=5, column=0)
tkinter.Label(self, text=str(self.player2.hit_points) + "/" + str(self.player2_max_hp) + "HP").grid(row=5, column=1)
def attack_clicked(self):
''' This method is called when the user presses the "Attack" button.
This method does the following:
1) Calls the character.attack method for both the player and the computer.
2) Updates the labels on the top right with the result of the attacks.
3) Determines if there is a victor.
4) If there is a victor, removes that Attack button and replaces it with an Exit button.
'''
result = self.player1.attack(self.player2)
self.youResult["text"] = result
result = self.player2.attack(self.player1)
self.cpuResult["text"] = result
if self.player1.hit_points < 0 and self.player2.hit_points < 0:
tkinter.Label(self, text="It is a tie!").grid(row=2, column=1)
self.attack_button.destroy()
tkinter.Button(self, text="Exit", command=self.exit_clicked).grid(row=6, column=1, sticky=tkinter.E)
elif self.player1.hit_points < 0:
tkinter.Label(self, text=self.player2.name + " is victorious").grid(row=2, column=1)
self.attack_button.destroy()
tkinter.Button(self, text="Exit", command=self.exit_clicked).grid(row=6, column=1, sticky=tkinter.E)
elif self.player2.hit_points < 0:
tkinter.Label(self, text=self.player1.name + " is victorious").grid(row=2, column=1)
self.attack_button.destroy()
tkinter.Button(self, text="Exit", command=self.exit_clicked).grid(row=6, column=1, sticky=tkinter.E)
tkinter.Label(self, text=str(self.player1.hit_points) + "/" + str(self.player1_max_hp) + " HP").grid(row=5, column=0)
tkinter.Label(self, text=str(self.player2.hit_points) + "/" + str(self.player2_max_hp) + " HP").grid(row=5, column=1)
def exit_clicked(self):
''' This method is called when the Exit button is clicked.
It passes control back to the callback method. '''
self.call_on_selected()
|
from flask.blueprints import Blueprint
import logging
from flask_login import login_required, current_user
from waitlist.ts3.connection import send_poke
from flask import jsonify
bp = Blueprint('api_ts3', __name__)
logger = logging.getLogger(__name__)
@bp.route("/test_poke")
@login_required
def test_poke():
send_poke(current_user.get_eve_name(), "Test Poke")
resp = jsonify(status_code=201, message="Poke was send!")
resp.status_code = 201
return resp
|
'''
Created on 30. mar. 2017
@author: tsy
'''
class rules(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
self.gen = {'Thunderous Charge':'self.S+=1',
'Frenzy':'self.A +=1',
'AP1':'self.bonus.armour +=1',
'Innate Defence (2+)':'self.AS-=5',
'Innate Defence (3+)':'self.AS-=4',
'Innate Defence (4+)':'self.AS-=3',
'Innate Defence (5+)':'self.AS-=2',
'Innate Defence (6+)':'self.AS-=2',
'Multiple Wounds (D3)':'self.special.multiple="D3"',
'Lightning Reflexes':'self.bonus.hit -=1',
'Mounts Protection (6+)': 'self.AS-=1'
}
self.SE = {
'Forest Walker':'self.rerolls.wound=1',
'Blades of Cenyrn - attack':'self.A+=1',
'Sylvan Blades':'self.A+=1;self.bonus.armour+=1'
}
self.SA = {
'Born Predator':'if self.rerolls.hit<1:self.rerolls.hit=1',
'audacity':'self.rerolls.hit = 7; self.rerolls.wound = 7'
}
self.KOE = {
'Born Predator':'if self.rerolls.hit<1:self.rerolls.hit=1',
'audacity':'self.rerolls.hit = 7; self.rerolls.wound = 7',
'might':'self.A +=1;self.S+=1;self.special.extraAttacksOnWound =1',
'renown':'self.special.lethal = True;self.special.multipleWoundOnLethal ="d3+1" ',
'Oath: Questing Oath': 'self.special.multiple = 2',
'Oath: Grail Oath':'self.WS += 1',
'Blessing: Favour of the Grail':'if (self.bonus.armour > 0): self.WA = 5',
'Blessing: Favour of the King':'if (self.S >= 5): self.WA = 5',
'Blessed Sword':'self.rerolls.wound = 7;self.rerolls.ward = -1',
'Crusaders Helm':'self.AS -= 1; self.rerolls.armour = 7'
}
self.magicItems = {
'Axe of Battle':'self.special.woundMin = 3;self.A = 6',
'Flesrender':'self.bonus.armour += 1;self.S += 2;self.I=0',
'Dragon Lance':'self.special.multiple = "D3";self.S += 2',
'Bluffers Helm':'self.AS -= 1;self.rerolls.wound = -1',
'Dragonscale Helm':'self.AS -= 1;self.special.fireborn=True',
'Dragon Mantle':'self.AS -= 2',
'Hardened Shield':'self.AS -= 2; self.I = 1 if ((self.I - 3) < 1) else self.I -= 3',
'Potion of Strength':'self.s+=2'
}
self.mundaneItems = {
'Shield':'self.AS-=1',
'Halberd':'self.S+=1',# WHAT ABOUT BOTH HANDS RULE
'Great Weapon':'self.S+=2;self.I=0',
'Lance':'self.S+=2',
'Barding':'self.AS-=1',
'spear':'self.bonus.armour +=1',
'Heavy Armor':'self.AS-=2'
}
def makefullDict(self):
fullList = dict(self.gen)
fullList.update(self.SA)
fullList.update(self.SE)
fullList.update(self.KOE)
fullList.update(self.magicItems)
fullList.update(self.mundaneItems)
self.fullDict = fullList |
import math
from bitarray import bitarray
class octet_array(bitarray):
def __init__(self, *args, **kwargs):
super(octet_array, self).__init__(*args, **kwargs)
@classmethod
def from_val(cls, val):
new_array = octet_array(8)
new_array.setall(0)
shift_val = val
for i in xrange(len(new_array)):
new_array[i] = shift_val % 2
shift_val = shift_val >> 1
return new_array
def get(self, index):
start = index*8
end = (index+1)*8
return octet_array(self[start:end])
def set(self, index, val):
if len(val) != 8:
raise Exception("Value too long to add to octet_array")
start = index*8
end = (index+1)*8
self[start:end] = val
def val(self):
my_val = 0
for i in xrange(len(self)):
my_val += int((self[i] * math.pow(2, i)))
return my_val |
__all__ = [
'CheckRepositoryEvents',
'CheckRepositoryHook',
]
from random import randint
from limpyd_jobs import STATUSES
from gim.core.tasks.repository import RepositoryJob
class CheckRepositoryEvents(RepositoryJob):
"""
Every minute, if the hook is not set, check the new events.
"""
queue_name = 'check-repo-events'
permission = 'read'
def run(self, queue):
"""
Get the last events of the repository to update data and fetch updated
issues. Return the delay before a new fetch as told by github
"""
super(CheckRepositoryEvents, self).run(queue)
identifier = self.identifier.hget()
if not identifier or identifier == 'None':
self.status.hset(STATUSES.CANCELED)
return -1
# Do a check for repository existence before
repository = self.repository
if repository.hook_set or not repository.has_subscriptions():
# now the hook seems set, stop going on the "check-events" mode,
# we'll run on the "hook" mode
# also, do not fetch events if no sbuscriptions for a repository
self.status.hset(STATUSES.CANCELED)
return
gh = self.gh
if not gh:
return # it's delayed !
updated_issues_count, delay = repository.check_events(gh)
delay = max(delay or 60, 60)
issues_events_count = repository.fetch_issues_events(gh)
return updated_issues_count, issues_events_count, delay
def on_success(self, queue, result):
"""
Go check events again in the minimal delay given by gtthub, but only if
the hook is not set on this repository
This delay is passed as the result argument.
"""
if result == -1:
return
if result:
delay = result[2]
else:
delay = 60
self.clone(delayed_for=delay)
def success_message_addon(self, queue, result):
"""
Display the count of updated issues
"""
updated_issues_count, issues_events_count, delay = result
return ' [updated=%d, issues events=%d]' % (updated_issues_count, issues_events_count)
class CheckRepositoryHook(RepositoryJob):
"""
Every 15 minutes (+-2mn), check if the hook is set and if None and if there
is no job to fetch events every minute, create one.
"""
queue_name = 'check-repo-hook'
permission = 'admin'
def run(self, queue):
"""
Check if the hook exist for this modele. If not, try to add a job to
start checking events every minute (if one already exists, no new one
will be added)
"""
super(CheckRepositoryHook, self).run(queue)
identifier = self.identifier.hget()
if not identifier or identifier == 'None':
self.status.hset(STATUSES.CANCELED)
return -1
# Do a check for repository existence before
repository = self.repository
gh = self.gh
if not gh:
return # it's delayed !
repository.check_hook(gh)
return repository.hook_set
def on_success(self, queue, result):
"""
If the repository hook is not set, add a job to fetch events, and check
the hook again in 15 +- 2mn
"""
if result is False:
# no hook, we need to go on the "check-events" mode
CheckRepositoryEvents.add_job(self.identifier.hget())
elif result != -1:
# we have a hook, stop checking events
for j in CheckRepositoryEvents.collection(
queued=1, identifier=self.identifier.hget()).instances(skip_exist_test=True):
try:
j.status.hset(STATUSES.CANCELED)
except CheckRepositoryEvents.DoesNotExist:
continue
if result != -1:
self.clone(delayed_for=60 * 13 + randint(0, 60 * 4))
|
from email import Charset
from django.conf import settings
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.urlresolvers import get_callable
from django.utils import translation
from django.utils.encoding import force_unicode
import commonware.log
import jingo
import tower
from emailer.models import Recipient
from html2text import html2text
from subscriptions.models import Subscription
log = commonware.log.getLogger('basket')
charsets = {
'ja': 'ISO-2022-JP',
'it': 'ISO-8859-1',
'de': 'ISO-8859-15',
'fr': 'ISO-8859-15',
'zh-CN': 'GB18030',
'ko': 'EUC-KR',
'cs': 'ISO-8859-2',
'tr': 'ISO-8859-9',
}
for c in charsets.values():
Charset.add_charset(c)
class Email(object):
id = 'email-id'
subject = 'subject'
lang = settings.LANGUAGE_CODE
encoding = settings.DEFAULT_CHARSET
from_email = settings.DEFAULT_FROM_EMAIL
from_name = settings.DEFAULT_FROM_NAME
reply_email = settings.DEFAULT_FROM_EMAIL
emailer_class = 'emailer.Emailer'
template = 'test'
@classmethod
def get(cls, name):
email = get_callable(name)
return email()
@property
def html(self):
path = 'emails/{0}.html'.format(self.template)
return jingo.env.get_template(path).render({'lang': self.lang})
@property
def text(self):
return html2text(self.html)
def _activate_lang(self):
tower.activate(self.lang)
lang = translation.get_language()
if lang in charsets:
self.encoding = charsets[lang]
elif lang[:2] in charsets:
self.encoding = charsets[lang[:2]]
else:
self.encoding = settings.DEFAULT_CHARSET
def emailer(self, campaign, email, force=False):
emailer_class = get_callable(self.emailer_class)
return emailer_class(campaign, email, force)
def message(self, address):
self._activate_lang()
d = {
'subject': force_unicode(self.subject),
'from_email': u'{0} <{1}>'.format(self.from_name, self.from_email),
'body': self.text,
'headers': {
'Reply-To': self.reply_email,
'X-Mailer': 'Basket Emailer %s' % (
'.'.join(map(str, settings.VERSION)))}
}
msg = mail.EmailMultiAlternatives(to=(address,), **d)
msg.encoding = self.encoding
msg.attach_alternative(self.html, 'text/html')
return msg
class Emailer(object):
"""
Base Emailer class.
Given a template and a campaign, emails all active subscribers to that
campaign that haven't received that email yet.
Subclass and override to change behavior, such as excluding subscriptions
based on complex criteria. For an example, check out
lib/custom_emailers/*.py.
"""
def __init__(self, campaign, email, force=False):
"""Initialize emailer with campaign name and email model instance."""
self.campaign = campaign
self.email = email
self.force = force
def get_subscriptions(self):
"""
Return all subscribers to the chosen campaign that are active and have
not yet received this email.
"""
subscriptions = Subscription.objects.filter(
campaign=self.campaign, active=True)
if not self.force:
subscriptions = subscriptions.exclude(
subscriber__received__email_id=self.email.id)
return subscriptions
def send_email(self):
"""Send out the email and record the subscriptions."""
subscriptions = self.get_subscriptions()
if not subscriptions:
log.info('Nothing to do: List of subscriptions is empty.')
return
emails = dict((s.subscriber.email, s) for s in subscriptions)
messages = []
for (address, subscription) in emails.items():
self.email.lang = subscription.locale
msg = self.email.message(address)
messages.append(msg)
log.info('Establishing SMTP connection...')
connection = mail.get_connection()
connection.open()
# We don't want to silence connection errors, but now we want to see
# (success, failed) from send_messages).
connection.fail_silently = True
success, failed = connection.send_messages(messages)
log.info('%d failed messages' % len(failed))
log.debug([x.to for x in failed])
log.info('%d successful messages' % len(success))
for msg in success:
dest = msg.to[0]
sent = Recipient(subscriber_id=emails[dest].subscriber.id, email_id=self.email.id)
try:
sent.validate_unique()
except ValidationError, e:
# Already exists? Sending was probably forced.
pass
else:
sent.save()
for msg in failed:
dest = msg.to[0]
email = emails[dest]
email.active = False
email.save()
connection.close()
|
"""
=================== TASK 3 ====================
* Name: Area Of Circle
*
* Write a function `area_of_circle` that will
* return area enclosed by a circle of radius `r`.
* Consider that only float value for radius will
* be passed. Negative values should be considered
* as typo and function should ignore sign of a
* number.
*
* Note: Please describe in details possible cases
* in which your solution might not work.
*
* Use main() function to test your solution.
===================================================
"""
import math
def area_of_circle(r):
if type(r) != float:
print("The radius value must be float,try again!")
else:
x = (abs(r)**2)*math.pi
return x
def main():
circle_area = area_of_circle(-2)
print("The area of circle is",circle_area)
main()
|
import db_functions
import email_functions
from nova_config import config
db_functions.init(config)
print(db_functions.courses_all())
print(db_functions.courses_day('M'))
|
import sys
from numpy import *
LABEL = sys.argv[1]
NUM_LABELS = int(sys.argv[2])
OUT_FILE = sys.argv[3]
labels = tile(array(LABEL), NUM_LABELS)
savetxt(OUT_FILE, labels, fmt='%s')
|
ur = []
for i in range(0, 7):
ur.append("0")
ur[0] = "https://en.wikipedia.org/wiki/Sachin_Tendulkar"
ur[1] = "https://species.wikimedia.org/wiki/Heliconia_angusta"
ur[2] = "https://en.wikipedia.org/wiki/India"
ur[3] = "https://species.wikimedia.org/wiki/Agama_sinaita"
ur[4] = "https://species.wikimedia.org/wiki/Phyllidia_varicosa"
ur[5] = "https://species.wikimedia.org/wiki/Aepyceros_melampus"
ur[6] = "https://en.wikipedia.org/wiki/Ancient_Aliens"
from tkinter import *
from tkinter.ttk import *
import random
import webbrowser
master = Tk()
master.geometry("400x400")
master.title("WIKIPEDIA WEBPAGE GENERATOR")
def openNewWindow():
newWindow = Toplevel(master)
newWindow.title("WIKIPEDIA WEBPAGE GENERATOR")
newWindow.geometry("400x400")
num = random.randint(0, 5)
print(num)
url = str(ur[num])
newWindow.config(bg="black")
def openweb():
webbrowser.open(url)
Btn = Button(newWindow, text=str(url[35:]) + " page", command=openweb)
Btn.pack(padx=10, pady=19)
btn2 = Button(newWindow, text="LOOK FOR ANOTHER ARTICLE", command=openNewWindow)
btn2.pack(padx=10, pady=19)
Btn3 = Button(newWindow, text="QUIT", command=newWindow.destroy)
Btn3.pack(padx=10, pady=19)
label = Label(master, text="Wikipedia Webpage Generator")
label.pack(pady=20, padx=20)
btn = Button(master, text="START", command=openNewWindow)
btn.pack(pady=10)
master.config(bg="black")
mainloop() |
import pandas as pd
import seaborn as sns
import csv
data=pd.read_csv("lang-8-L1-all_original_english_sent.txt.sen.prd.line.sc_pair_furukawa_and_nishi.csv")
|
"""
绘图工具类
"""
from typing import List, Tuple
import matplotlib.pyplot as plt
import numpy as np
class Plot3:
INIT: bool = False
ax = None
@staticmethod
def __init():
plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号
fig = plt.figure()
Plot3.ax = fig.gca(projection='3d')
Plot3.ax.grid(False)
Plot3.INIT = True
@staticmethod
def plot3d(lines: List[Tuple[np.ndarray, str]]) -> None:
if not Plot3.INIT:
Plot3.__init()
for lc in lines:
x = lc[0][:, 0]
y = lc[0][:, 1]
z = lc[0][:, 2]
Plot3.ax.plot(x, y, z, lc[1])
@staticmethod
def show():
if not Plot3.INIT:
raise RuntimeError("Plot3::请在show前调用plot3d")
plt.show()
class Plot2:
INIT = False
@staticmethod
def __init():
plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号
Plot2.INIT = True
@staticmethod
def plot2d(lines: List[Tuple[np.ndarray, str]]) -> None:
if not Plot2.INIT:
Plot2.__init()
for lc in lines:
x = lc[0][:, 0]
y = lc[0][:, 1]
plt.plot(x, y, lc[1])
@staticmethod
def plot2d_xy(x: np.ndarray, y: np.ndarray, describe='r') -> None:
if not Plot2.INIT:
Plot2.__init()
plt.plot(x, y, describe)
@staticmethod
def show():
if not Plot2.INIT:
raise RuntimeError("Plot3::请在show前调用plot3d")
plt.show()
|
import threading
import time
def descend(arr):
a = []
a.extend(arr)
for i in range(len(a)):
for j in range(len(a)-i-1):
if a[j] < a[j+1]:
a[j],a[j+1] = a[j+1],a[j]
print(a)
time.sleep(0.02)
print("finaldsc: ",a)
def ascend(arr):
ar = []
ar.extend(arr)
for i in range(len(ar)):
for j in range(len(ar)-i-1):
if ar[j] > ar[j+1]:
ar[j],ar[j+1] = ar[j+1],ar[j]
print(ar)
time.sleep(0.01)
print("finalasc: ",ar)
arr = [int(x) for x in input("Enter the list of numbers: ").split()]
if __name__ == "__main__":
t1 = threading.Thread(target=ascend, args=(arr,))
t2 = threading.Thread(target=descend, args=(arr,))
t1.start()
t2.start()
t1.join()
t2.join()
|
from django.forms import ModelForm
from django.forms.utils import ErrorList
from bike_parts.models import BikeParts
# Сущность "запчасть" характеризуется следующими характеристиками:
# * название (обязательное поле)
# * марка (обязательное поле)
# * цена запчасти
# * телефон/email (обязательное поле)
class AddPart(ModelForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None, initial=None, error_class=ErrorList,
label_suffix=None, empty_permitted=False, instance=None):
super().__init__(data, files, auto_id, prefix, initial, error_class, label_suffix, empty_permitted, instance)
self.fields['price'].required = False
class Meta:
model = BikeParts
fields = ['name', 'brand', 'contact_info', 'price']
|
"""
Training function to train new GCE's with our own classifiers defined in
models.classifiers
"""
# import standard libraries
import argparse
import numpy as np
import scipy.io as sio
import os
import torch
# Import user defined libraries
from models import classifiers
from src.models.CVAE import Decoder, Encoder
import src.util as util
import src.plotting as plotting
from src.models import CNN_classifier
from src.GCE import GenerativeCausalExplainer
from src.load_mnist import *
def train_GCE(model_file, K, L, train_steps=5000,
Nalpha=15, Nbeta=75, lam=0.05, batch_size=64,
lr=5e-4, seed=1, retrain=False):
save_folder_root = "models/"
# Gather params from model name
model_params = model_file.split("_")
if len(model_params) != 4:
raise InputError("model_file must be in the format: <model_name>_<data_type>_<class_use>_classifier and must be located in models/classifiers/")
model_name = model_params[0]
data = model_params[1]
data_classes = np.array(list(model_params[2]), dtype=int)
# Create path of GCE from other model
gce_path = os.path.join(save_folder_root, "GCEs",
model_params[0] + "_" + model_params[1]
+ "_" + model_params[2] + "_gce" +
"_K" + str(K) + "_L" + str(L) +
"_lambda" + str(lam).replace(".", ""))
# Continue training if model already exists
if not os.path.exists(gce_path + "/model.pt"):
retrain = True
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
if seed is not None:
np.random.seed(seed)
torch.manual_seed(seed)
ylabels = range(0, len(data_classes))
# Load data
if data.lower() == "mnist":
X, Y, tridx = load_mnist_classSelect('train', data_classes, ylabels)
vaX, vaY, vaidx = load_mnist_classSelect('val', data_classes, ylabels)
elif data.lower() == "fmnist":
X, Y, tridx = load_fashion_mnist_classSelect('train', data_classes, ylabels)
vaX, vaY, vaidx = load_fashion_mnist_classSelect('val', data_classes, ylabels)
elif data.lower() == 'cifar':
from load_cifar import load_cifar_classSelect
X, Y, _ = load_cifar_classSelect('train', data_classes, ylabels)
vaX, vaY, _ = load_cifar_classSelect('val', data_classes, ylabels)
X, vaX = X / 255, vaX / 255
ntrain, nrow, ncol, c_dim = X.shape
x_dim = nrow * ncol
y_dim = len(data_classes)
# Import stated model
if model_name.lower() == "inceptionnet":
classifier = classifiers.InceptionNetDerivative(num_classes=y_dim).to(device)
elif model_name.lower() == "resnet":
classifier = classifiers.ResNetDerivative(num_classes=y_dim).to(device)
elif model_name.lower() == "densenet":
classifier = classifiers.DenseNetDerivative(num_classes=y_dim).to(device)
elif model_name.lower() == "base":
classifier = CNN_classifier.CNN(y_dim, c_dim, img_size=nrow).to(device)
# Load previously trained classifier
checkpoint = torch.load('%s/model.pt' % (save_folder_root + "/classifiers/" + model_file), map_location=device)
classifier.load_state_dict(checkpoint['model_state_dict_classifier'])
# Train a new model
if retrain:
# Declare GCE and it's needed variables
encoder = Encoder(K+L, c_dim, x_dim).to(device)
decoder = Decoder(K+L, c_dim, x_dim).to(device)
encoder.apply(util.weights_init_normal)
decoder.apply(util.weights_init_normal)
gce = GenerativeCausalExplainer(classifier, decoder, encoder, device,
save_output=True, save_dir=gce_path + "/")
traininfo = gce.train(X, K, L,
steps=train_steps,
Nalpha=Nalpha,
Nbeta=Nbeta,
lam=lam,
batch_size=batch_size,
lr=lr)
torch.save({
"model_state_dict_classifier": gce.classifier.state_dict(),
"model_state_dict_encoder": gce.encoder.state_dict(),
"model_state_dict_decoder": gce.decoder.state_dict(),
"step": train_steps
}, os.path.join(gce_path, 'model.pt'))
# Continue training a partly trained model
else:
encoder = Encoder(K+L, c_dim, x_dim).to(device)
decoder = Decoder(K+L, c_dim, x_dim).to(device)
# Load GCE from stored model
gce = GenerativeCausalExplainer(classifier, decoder, encoder, device,
save_output=True, save_dir=gce_path + "/")
checkpoint = torch.load(os.path.join(gce_path, 'model.pt'), map_location=device)
gce.classifier.load_state_dict(checkpoint["model_state_dict_classifier"])
gce.encoder.load_state_dict(checkpoint["model_state_dict_encoder"])
gce.decoder.load_state_dict(checkpoint["model_state_dict_decoder"])
if checkpoint["step"] < train_steps:
print(f"Continuing training previous model from step: {checkpoint['step']}")
traininfo = gce.train(X, K, L,
steps=train_steps - checkpoint["step"],
Nalpha=Nalpha,
Nbeta=Nbeta,
lam=lam,
batch_size=batch_size,
lr=lr)
os.makedirs(gce_path, exist_ok=True)
torch.save({
"model_state_dict_classifier": gce.classifier.state_dict(),
"model_state_dict_encoder": gce.encoder.state_dict(),
"model_state_dict_decoder": gce.decoder.state_dict(),
"step": train_steps + checkpoint["step"]
}, os.path.join(gce_path, 'model.pt'))
else:
raise ValueError(f"Not continuing training previous model since {checkpoint['step']} >= {train_steps}")
return traininfo
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--model_file", type=str, default="base_cifar_35_classifier",
help="Specification of path to model to be explained by GCE.")
parser.add_argument("--batch_size", type=int, default=64,
help="Specification of batch size to be used.")
parser.add_argument("--train_steps", type=int, default=3000,
help="Specification of training steps for GCE.")
parser.add_argument("--lr", type=float, default=5e-4,
help="Specification of learning rate")
parser.add_argument("--K", type=int, default=1,
help="Specification of number of causal Factors")
parser.add_argument("--L", type=int, default=16,
help="Specification of number of non-causal Factors")
parser.add_argument("--lam", type=float, default=0.05,
help="Specification of lambda parameter")
parser.add_argument("--Nalpha", type=int, default=15,
help="Specification of number of samples to estimate alpha")
parser.add_argument("--Nbeta", type=int, default=75,
help="Specification of number of samples to estimate beta")
parser.add_argument("--seed", type=int, default=1,
help="Specification of random seed of this run")
args = parser.parse_args()
# Training Parameters
K = args.K
L = args.L
train_steps = args.train_steps
Nalpha = args.Nalpha
Nbeta = args.Nbeta
lam = args.lam
batch_size = args.batch_size
lr = args.lr
seed = args.seed
train_GCE(args.model_file, K, L, train_steps, Nalpha, Nbeta, lam, batch_size, lr, seed)
|
all_guests = {}
unliked_meals = 0
while True:
tokens = input()
if tokens == "Stop":
break
tokens = tokens.split("-")
command = tokens[0]
guest = tokens[1]
meal = tokens[2]
if command == "Like":
if guest not in all_guests.keys():
all_guests[guest] = []
if meal in all_guests[guest]:
continue
all_guests[guest].append(meal)
elif command == "Unlike":
if guest not in all_guests.keys():
print(f"{guest} is not at the party.")
elif meal not in all_guests[guest]:
print(f"{guest} doesn't have the {meal} in his/her collection.")
else:
print(f"{guest} doesn't like the {meal}.")
all_guests[guest].remove(meal)
unliked_meals += 1
all_guests = dict(sorted(all_guests.items(), key=lambda x: (-len(x[1]), x[0])))
for guest, meal in all_guests.items():
print(f"{guest}: {', '.join(meal)}")
print(f"Unliked meals: {unliked_meals}") |
# Copyright 2010 Alon Zakai ('kripken'). All rights reserved.
# This file is part of Syntensity/the Intensity Engine, an open source project. See COPYING.txt for licensing.
"""
Manages loading maps etc.
"""
import os, tarfile, re, httplib
import uuid
from _dispatch import Signal
from intensity.base import *
from intensity.logging import *
from intensity.utility import *
# Signals
map_load_start = Signal(providing_args=['activity_id', 'map_asset_id'])
map_load_finish = Signal() # Only sent if map loads successfully
# Globals
curr_activity_id = None ##< The activity ID of the current activity
curr_map_asset_id = None ##< The asset id of this map, whose location gives us the prefix, etc.
curr_map_prefix = None
def get_curr_activity_id():
return curr_activity_id
def set_curr_activity_id(activity_id):
global curr_activity_id
curr_activity_id = activity_id
def get_curr_map_asset_id():
return curr_map_asset_id
def set_curr_map_asset_id(map_asset_id):
global curr_map_asset_id
curr_map_asset_id = map_asset_id
def get_curr_map_prefix():
return curr_map_prefix
def set_curr_map_prefix(prefix):
global curr_map_prefix
curr_map_prefix = prefix
class WorldClass:
scenario_code = None
def start_scenario(self):
old_scenario_code = self.scenario_code
while old_scenario_code == self.scenario_code:
self.scenario_code = str(uuid.uuid4())
def running_map(self):
return self.scenario_code is not None
## Singleton with current world info
World = WorldClass()
# Parses a URL to an activity, finding the activity ID, and then contacting the master to
# find the map asset id as well, for that activity
def autodiscover_activity(activity_id):
if get_config('Network', 'master_server', '') == '':
return '', ''
if '/' in activity_id:
activity_id = re.search('/(\w+)/$', activity_id).group(1)
# Get the map asset ID using a request to the master
log(logging.DEBUG, 'Contacting master to find map asset ID for activity %s' % activity_id)
conn = httplib.HTTPConnection(get_master_server())
conn.request('GET', '/tracker/activity/view/%s/' % activity_id)
response = conn.getresponse()
assert(response.status == 200)
data = response.read()
conn.close()
map_asset_id = re.search('asset/view/(\w+)/', data).group(1)
return activity_id, map_asset_id
## Sets a map to be currently active, and starts a new scenario
## @param _map The asset id for the map (see curr_map_asset_id)
def set_map(activity_id, map_asset_id):
log(logging.DEBUG, "Setting the map to %s / %s" % (activity_id, map_asset_id))
# Determine map activity and asset and get asset info
need_lookup = True
if Global.SERVER:
forced_location = get_config('Activity', 'force_location', '')
if forced_location != '':
need_lookup = False
activity_id = '*FORCED*'
map_asset_id = forced_location # Contains 'base/'
else: # CLIENT
parts = map_asset_id.split('/')
if parts[0] == 'base':
need_lookup = False
set_config('Activity', 'force_location', map_asset_id)
# If given a URL of an activity, or don't have the map asset id, autodiscover the activity and map asset ids
if need_lookup and '/' in activity_id or map_asset_id == '':
activity_id, map_asset_id = autodiscover_activity(activity_id)
if need_lookup:
try:
asset_info = AssetManager.acquire(map_asset_id)
except AssetRetrievalError, e:
log(logging.ERROR, "Error in retrieving assets for map: %s" % str(e))
if Global.CLIENT:
CModule.show_message("Error", "Could not retrieve assets for the map: " + str(e))
CModule.disconnect()
CModule.logout()
return False
else:
# Working entirely locally - use config location and run from there
asset_info = AssetInfo('xyz', map_asset_id, '?', 'NONE', [], 'b')
log(logging.DEBUG, "final setting values: %s / %s" % (activity_id, map_asset_id))
map_load_start.send(None, activity_id=activity_id, map_asset_id=map_asset_id)
World.start_scenario()
# Server may take a while to load and set up the map, so tell clients
if Global.SERVER:
MessageSystem.send(ALL_CLIENTS, CModule.PrepareForNewScenario, World.scenario_code)
CModule.force_network_flush() # Flush message immediately to clients
# Set globals
set_curr_activity_id(activity_id)
set_curr_map_asset_id(map_asset_id)
World.asset_info = asset_info
curr_map_prefix = asset_info.get_zip_location() + os.sep # asset_info.location
set_curr_map_prefix(curr_map_prefix)
log(logging.DEBUG, "Map locations: %s -- %s ++ %s" % (asset_info.location, curr_map_prefix, AssetManager.get_full_location(asset_info)))
# Load the geometry and map settings in the .ogz
if not CModule.load_world(curr_map_prefix + "map"):
log(logging.ERROR, "Could not load map %s" % curr_map_prefix)
raise Exception("set_map failure")
if Global.SERVER:
# Create script entities for connected clients
log(logging.DEBUG, "Creating scripting entities for map")
CModule.create_scripting_entities()
auth.InstanceStatus.map_loaded = True
# Update master server - we are finished preparing
auth.update_master({ 'finished_preparing': 1 })
# Send map to all connected clients, if any
send_curr_map(ALL_CLIENTS)
# Initialize instance status for this new map
auth.InstanceStatus.private_edit_mode = False
map_load_finish.send(None)
return True # TODO: Do something with this value
def restart_map():
AssetManager.clear_cache() # Make sure we will load the latest assets
set_map(get_curr_activity_id(), get_curr_map_asset_id())
## Returns the path to a file in the map script directory, i.e., a file is given in
## relative position to the current map, and we return the full path
def get_mapfile_path(relative_path):
# Check first in the installation packages
install_path = os.path.sep.join( os.path.join('packages', World.asset_info.get_zip_location(), relative_path).split('/') )
if os.path.exists(install_path):
return install_path
return os.path.join(World.asset_info.get_zip_location(AssetManager.get_full_location(World.asset_info)), relative_path)
## Reads a file for Scripting. Must be done safely. The path is under /packages,
## and we ensure that no attempt is made to 'break out'
def read_file_safely(name):
assert(".." not in name)
assert("~" not in name)
assert(name[0] != '/')
# TODO: More checks
# Use relative paths, if asked for, or just a path under the asset dir
if len(name) >= 2 and name[0:2] == './':
path = get_mapfile_path(name[2:])
else:
path = os.path.join( get_asset_dir(), name )
try:
f = open(path, 'r')
except IOError:
try:
install_path = os.path.join('packages', name)
f = open(install_path, 'r') # Look under install /packages
except IOError:
print "Could not load file %s (%s, %s)" % (name, path, install_path)
assert(0)
data = f.read()
f.close()
return data
## Returns the path to the map script. TODO: As an option, other map script names?
def get_map_script_filename():
return get_mapfile_path('map.lua')
## Runs the startup script for the current map. Called from worldio.loadworld
def run_map_script():
script = open( get_map_script_filename(), "r").read()
log(logging.DEBUG, "Running map script...")
CModule.run_script(script)
log(logging.DEBUG, "Running map script complete..")
## Packages an asset for uploading, and handles some backups for internal files
## Recursively adds directories, but doesn't filter out BAK and ~ files in them, just in the root - FIXME
def upload_asset(asset_id, backup_postfix = None, num_backups = 0, num_backups_to_keep = 0):
asset_info = AssetManager.get_info( asset_id )
full_location = AssetManager.get_full_location(asset_info)
if asset_info.is_zipfile():
prefix = asset_info.get_zip_location(full_location)
# Create
zip_name = prefix + ".tar.gz"
zipfile = tarfile.open(zip_name, 'w:gz')
filenames = os.listdir(prefix)
total = len(filenames)
counter = 0
for inner_filename in filenames:
CModule.render_progress(float(counter)/total, 'packaging archive asset...')
if Global.CLIENT: CModule.intercept_key(0)
counter += 1
# Don't add backup files
if inner_filename[-4:] != '.BAK' and inner_filename[-1] != '~':
zipfile.add(prefix + os.sep + inner_filename, arcname = inner_filename)
if backup_postfix is not None:
if inner_filename[-len(backup_postfix):] == backup_postfix:
shutil.copyfile(prefix + os.sep + inner_filename, prefix + os.sep + inner_filename + "." + str(time.time()) + ".BAK");
num_backups += 1
zipfile.close()
# Backups were created for the ogz and entities, do some cleaning up
if num_backups_to_keep > 0:
clean_up_backups(prefix, "BAK", num_backups * num_backups_to_keep)
# Upload
AssetManager.upload_asset(asset_info)
## @param location e.g. textures/mypack.tar.gz. No need for 'packages/'.
def upload_asset_by_location(location):
try:
upload_asset(AssetMetadata.get_by_path('packages/' +location).asset_id)
print "Asset %s uploaded successfully" % location
except Exception, e:
CModule.show_message("Error", "Could not upload the asset to the asset server: " + str(e))
def upload_map():
if get_config('Network', 'master_server', '') != '' and get_config('Activity', 'force_location', '') == '':
try:
upload_asset(
get_curr_map_asset_id(),
backup_postfix = '.js',
num_backups = 2, # We already backed up the ogz and entities beforehand
num_backups_to_keep = 3
)
except Exception, e:
CModule.show_message("Error", "Could not upload the map to the asset server: " + str(e))
return
# Notify server
MessageSystem.send(CModule.RestartMap)
def export_entities(filename):
full_path = os.path.join(get_asset_dir(), get_curr_map_prefix(), filename)
data = CModule.run_script_string("saveEntities()")
# Save backup, if needed
if os.path.exists(full_path):
try:
shutil.copyfile(full_path, full_path + "." + str(time.time())[-6:].replace('.', '') + '.BAK')
except:
pass # No worries mate
# Save new data
out = open(full_path, 'w')
out.write(data)
out.close()
# Prevent loops
from intensity.asset import *
from intensity.message_system import *
from intensity.master import get_master_server
if Global.SERVER:
from intensity.server.persistence import *
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from pandas import ExcelWriter
from sklearn.preprocessing import Imputer
from sklearn.datasets import load_breast_cancer
import statsmodels.api as sm
import statsmodels.formula.api as smf
from scipy.stats import t
from scipy import stats
# import CustStat as stat
from math import pow, sqrt
def clean():
data = load_breast_cancer()
df = pd.DataFrame(data.data, columns=data.feature_names)
df = df[(df != 0).all(1)]
writer = ExcelWriter('BreastCancer.xlsx')
df.to_excel(writer, 'Sheet1')
writer.save()
def main():
clean()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import dbus
try:
session_bus = dbus.SessionBus()
spotify_bus = session_bus.get_object("org.mpris.MediaPlayer2.spotify",
"/org/mpris/MediaPlayer2")
spotify_properties = dbus.Interface(spotify_bus,
"org.freedesktop.DBus.Properties")
metadata = spotify_properties.Get(
"org.mpris.MediaPlayer2.Player", "Metadata")
# The property Metadata behaves like a python dict
# for key, value in metadata.items():
# print(key, value)
# To just print the title
print("{} - {}".format(metadata['xesam:title'], metadata['xesam:artist'][0]))
except:
print("")
|
ssq=0;
sqs=0;
for i in range (1,101):
ssq+=i**2
for i in range (1,101):
sqs+=i
print sqs**2-ssq |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import json
import logging
import os
import re
from dataclasses import asdict, dataclass
from functools import partial
from itertools import chain
from typing import Iterator, cast
from typing_extensions import Literal
# Re-exporting BuiltDockerImage here, as it has its natural home here, but has moved out to resolve
# a dependency cycle from docker_build_context.
from pants.backend.docker.package_types import BuiltDockerImage as BuiltDockerImage
from pants.backend.docker.registries import DockerRegistries, DockerRegistryOptions
from pants.backend.docker.subsystems.docker_options import DockerOptions
from pants.backend.docker.target_types import (
DockerBuildOptionFieldMixin,
DockerBuildOptionFieldMultiValueMixin,
DockerBuildOptionFieldValueMixin,
DockerBuildOptionFlagFieldMixin,
DockerImageContextRootField,
DockerImageRegistriesField,
DockerImageRepositoryField,
DockerImageSourceField,
DockerImageTags,
DockerImageTagsField,
DockerImageTagsRequest,
DockerImageTargetStageField,
)
from pants.backend.docker.util_rules.docker_binary import DockerBinary
from pants.backend.docker.util_rules.docker_build_context import (
DockerBuildContext,
DockerBuildContextRequest,
)
from pants.backend.docker.utils import format_rename_suggestion
from pants.core.goals.package import BuiltPackage, OutputPathField, PackageFieldSet
from pants.engine.addresses import Address
from pants.engine.fs import CreateDigest, Digest, FileContent
from pants.engine.process import FallibleProcessResult, Process, ProcessExecutionFailure
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import Target, WrappedTarget, WrappedTargetRequest
from pants.engine.unions import UnionMembership, UnionRule
from pants.option.global_options import GlobalOptions, KeepSandboxes
from pants.util.strutil import bullet_list
from pants.util.value_interpolation import InterpolationContext, InterpolationError
logger = logging.getLogger(__name__)
class DockerImageTagValueError(InterpolationError):
pass
class DockerRepositoryNameError(InterpolationError):
pass
class DockerBuildTargetStageError(ValueError):
pass
class DockerImageOptionValueError(ValueError):
pass
@dataclass(frozen=True)
class DockerPackageFieldSet(PackageFieldSet):
required_fields = (DockerImageSourceField,)
context_root: DockerImageContextRootField
registries: DockerImageRegistriesField
repository: DockerImageRepositoryField
source: DockerImageSourceField
tags: DockerImageTagsField
target_stage: DockerImageTargetStageField
output_path: OutputPathField
def format_tag(self, tag: str, interpolation_context: InterpolationContext) -> str:
source = InterpolationContext.TextSource(
address=self.address, target_alias="docker_image", field_alias=self.tags.alias
)
return interpolation_context.format(tag, source=source, error_cls=DockerImageTagValueError)
def format_repository(
self,
default_repository: str,
interpolation_context: InterpolationContext,
registry: DockerRegistryOptions | None = None,
) -> str:
repository_context = InterpolationContext.from_dict(
{
"directory": os.path.basename(self.address.spec_path),
"name": self.address.target_name,
"parent_directory": os.path.basename(os.path.dirname(self.address.spec_path)),
"default_repository": default_repository,
"target_repository": self.repository.value or default_repository,
**interpolation_context,
}
)
if registry and registry.repository:
repository_text = registry.repository
source = InterpolationContext.TextSource(
options_scope=f"[docker.registries.{registry.alias or registry.address}].repository"
)
elif self.repository.value:
repository_text = self.repository.value
source = InterpolationContext.TextSource(
address=self.address, target_alias="docker_image", field_alias=self.repository.alias
)
else:
repository_text = default_repository
source = InterpolationContext.TextSource(options_scope="[docker].default_repository")
return repository_context.format(
repository_text, source=source, error_cls=DockerRepositoryNameError
).lower()
def format_image_ref_tags(
self,
repository: str,
tags: tuple[str, ...],
interpolation_context: InterpolationContext,
uses_local_alias,
) -> Iterator[ImageRefTag]:
for tag in tags:
formatted = self.format_tag(tag, interpolation_context)
yield ImageRefTag(
template=tag,
formatted=formatted,
full_name=":".join(s for s in [repository, formatted] if s),
uses_local_alias=uses_local_alias,
)
def image_refs(
self,
default_repository: str,
registries: DockerRegistries,
interpolation_context: InterpolationContext,
additional_tags: tuple[str, ...] = (),
) -> Iterator[ImageRefRegistry]:
"""The per-registry image refs: each returned element is a collection of the tags applied to
the image in a single registry.
In the Docker world, the term `tag` is used both for what we here prefer to call the image
`ref`, as well as for the image version, or tag, that is at the end of the image name
separated with a colon. By introducing the image `ref` we can retain the use of `tag` for
the version part of the image name.
This function returns all image refs to apply to the Docker image, grouped by
registry. Within each registry, the `tags` attribute contains a metadata about each tag in
the context of that registry, and the `full_name` attribute of each `ImageRefTag` provides
the image ref, of the following form:
[<registry>/]<repository-name>[:<tag>]
Where the `<repository-name>` may contain any number of separating slashes `/`, depending on
the `default_repository` from configuration or the `repository` field on the target
`docker_image`.
This method will always return at least one `ImageRefRegistry`, and there will be at least
one tag.
"""
image_tags = (self.tags.value or ()) + additional_tags
registries_options = tuple(registries.get(*(self.registries.value or [])))
if not registries_options:
# The image name is also valid as image ref without registry.
repository = self.format_repository(default_repository, interpolation_context)
yield ImageRefRegistry(
registry=None,
repository=repository,
tags=tuple(
self.format_image_ref_tags(
repository, image_tags, interpolation_context, uses_local_alias=False
)
),
)
return
for registry in registries_options:
repository = self.format_repository(default_repository, interpolation_context, registry)
address_repository = "/".join([registry.address, repository])
if registry.use_local_alias and registry.alias:
alias_repository = "/".join([registry.alias, repository])
else:
alias_repository = None
yield ImageRefRegistry(
registry=registry,
repository=repository,
tags=(
*self.format_image_ref_tags(
address_repository,
image_tags + registry.extra_image_tags,
interpolation_context,
uses_local_alias=False,
),
*(
self.format_image_ref_tags(
alias_repository,
image_tags + registry.extra_image_tags,
interpolation_context,
uses_local_alias=True,
)
if alias_repository
else []
),
),
)
def get_context_root(self, default_context_root: str) -> str:
"""Examines `default_context_root` and `self.context_root.value` and translates that to a
context root for the Docker build operation.
That is, in the configuration/field value, the context root is relative to build root when
in the form `path/..` (implies semantics as `//path/..` for target addresses) or the BUILD
file when `./path/..`.
The returned path is always relative to the build root.
"""
if self.context_root.value is not None:
context_root = self.context_root.value
else:
context_root = cast(
str, self.context_root.compute_value(default_context_root, self.address)
)
if context_root.startswith("./"):
context_root = os.path.join(self.address.spec_path, context_root)
return os.path.normpath(context_root)
@dataclass(frozen=True)
class ImageRefRegistry:
registry: DockerRegistryOptions | None
repository: str
tags: tuple[ImageRefTag, ...]
@dataclass(frozen=True)
class ImageRefTag:
template: str
formatted: str
full_name: str
uses_local_alias: bool
@dataclass(frozen=True)
class DockerInfoV1:
"""The format of the `$target_name.docker-info.json` file."""
version: Literal[1]
image_id: str
# It'd be good to include the digest here (e.g. to allow 'docker run
# registry/repository@digest'), but that is only known after pushing to a V2 registry
registries: list[DockerInfoV1Registry]
@staticmethod
def serialize(image_refs: tuple[ImageRefRegistry, ...], image_id: str) -> bytes:
# make sure these are in a consistent order (the exact order doesn't matter
# so much), no matter how they were configured
sorted_refs = sorted(image_refs, key=lambda r: r.registry.address if r.registry else "")
info = DockerInfoV1(
version=1,
image_id=image_id,
registries=[
DockerInfoV1Registry(
alias=r.registry.alias if r.registry and r.registry.alias else None,
address=r.registry.address if r.registry else None,
repository=r.repository,
tags=[
DockerInfoV1ImageTag(
template=t.template,
tag=t.formatted,
uses_local_alias=t.uses_local_alias,
name=t.full_name,
)
# consistent order, as above
for t in sorted(r.tags, key=lambda t: t.full_name)
],
)
for r in sorted_refs
],
)
return json.dumps(asdict(info)).encode()
@dataclass(frozen=True)
class DockerInfoV1Registry:
# set if registry was specified as `@something`
alias: str | None
address: str | None
repository: str
tags: list[DockerInfoV1ImageTag]
@dataclass(frozen=True)
class DockerInfoV1ImageTag:
template: str
tag: str
uses_local_alias: bool
# for convenience, include the concatenated registry/repository:tag name (using this tag)
name: str
def get_build_options(
context: DockerBuildContext,
field_set: DockerPackageFieldSet,
global_target_stage_option: str | None,
global_build_hosts_options: dict | None,
global_build_no_cache_option: bool | None,
target: Target,
) -> Iterator[str]:
# Build options from target fields inheriting from DockerBuildOptionFieldMixin
for field_type in target.field_types:
if issubclass(field_type, DockerBuildOptionFieldMixin):
source = InterpolationContext.TextSource(
address=target.address, target_alias=target.alias, field_alias=field_type.alias
)
format = partial(
context.interpolation_context.format,
source=source,
error_cls=DockerImageOptionValueError,
)
yield from target[field_type].options(format, global_build_hosts_options)
elif issubclass(field_type, DockerBuildOptionFieldValueMixin):
yield from target[field_type].options()
elif issubclass(field_type, DockerBuildOptionFieldMultiValueMixin):
yield from target[field_type].options()
elif issubclass(field_type, DockerBuildOptionFlagFieldMixin):
yield from target[field_type].options()
# Target stage
target_stage = None
if global_target_stage_option in context.stages:
target_stage = global_target_stage_option
elif field_set.target_stage.value:
target_stage = field_set.target_stage.value
if target_stage not in context.stages:
raise DockerBuildTargetStageError(
f"The {field_set.target_stage.alias!r} field in `{target.alias}` "
f"{field_set.address} was set to {target_stage!r}"
+ (
f", but there is no such stage in `{context.dockerfile}`. "
f"Available stages: {', '.join(context.stages)}."
if context.stages
else f", but there are no named stages in `{context.dockerfile}`."
)
)
if target_stage:
yield from ("--target", target_stage)
if global_build_no_cache_option:
yield "--no-cache"
@rule
async def build_docker_image(
field_set: DockerPackageFieldSet,
options: DockerOptions,
global_options: GlobalOptions,
docker: DockerBinary,
keep_sandboxes: KeepSandboxes,
union_membership: UnionMembership,
) -> BuiltPackage:
"""Build a Docker image using `docker build`."""
context, wrapped_target = await MultiGet(
Get(
DockerBuildContext,
DockerBuildContextRequest(
address=field_set.address,
build_upstream_images=True,
),
),
Get(
WrappedTarget,
WrappedTargetRequest(field_set.address, description_of_origin="<infallible>"),
),
)
image_tags_requests = union_membership.get(DockerImageTagsRequest)
additional_image_tags = await MultiGet(
Get(DockerImageTags, DockerImageTagsRequest, image_tags_request_cls(wrapped_target.target))
for image_tags_request_cls in image_tags_requests
if image_tags_request_cls.is_applicable(wrapped_target.target)
)
image_refs = tuple(
field_set.image_refs(
default_repository=options.default_repository,
registries=options.registries(),
interpolation_context=context.interpolation_context,
additional_tags=tuple(chain.from_iterable(additional_image_tags)),
)
)
tags = tuple(tag.full_name for registry in image_refs for tag in registry.tags)
# Mix the upstream image ids into the env to ensure that Pants invalidates this
# image-building process correctly when an upstream image changes, even though the
# process itself does not consume this data.
env = {
**context.build_env.environment,
"__UPSTREAM_IMAGE_IDS": ",".join(context.upstream_image_ids),
}
context_root = field_set.get_context_root(options.default_context_root)
process = docker.build_image(
build_args=context.build_args,
digest=context.digest,
dockerfile=context.dockerfile,
context_root=context_root,
env=env,
tags=tags,
extra_args=tuple(
get_build_options(
context=context,
field_set=field_set,
global_target_stage_option=options.build_target_stage,
global_build_hosts_options=options.build_hosts,
global_build_no_cache_option=options.build_no_cache,
target=wrapped_target.target,
)
),
)
result = await Get(FallibleProcessResult, Process, process)
if result.exit_code != 0:
maybe_msg = format_docker_build_context_help_message(
address=field_set.address,
context_root=context_root,
context=context,
colors=global_options.colors,
)
if maybe_msg:
logger.warning(maybe_msg)
raise ProcessExecutionFailure(
result.exit_code,
result.stdout,
result.stderr,
process.description,
keep_sandboxes=keep_sandboxes,
)
image_id = parse_image_id_from_docker_build_output(result.stdout, result.stderr)
docker_build_output_msg = "\n".join(
(
f"Docker build output for {tags[0]}:",
"stdout:",
result.stdout.decode(),
"stderr:",
result.stderr.decode(),
)
)
if options.build_verbose:
logger.info(docker_build_output_msg)
else:
logger.debug(docker_build_output_msg)
metadata_filename = field_set.output_path.value_or_default(file_ending="docker-info.json")
metadata = DockerInfoV1.serialize(image_refs, image_id=image_id)
digest = await Get(Digest, CreateDigest([FileContent(metadata_filename, metadata)]))
return BuiltPackage(
digest,
(BuiltDockerImage.create(image_id, tags, metadata_filename),),
)
def parse_image_id_from_docker_build_output(*outputs: bytes) -> str:
"""Outputs are typically the stdout/stderr pair from the `docker build` process."""
# NB: We use the extracted image id for invalidation. The short_id may theoretically
# not be unique enough, although in a non adversarial situation, this is highly unlikely
# to be an issue in practice.
image_id_regexp = re.compile(
"|".join(
(
# BuildKit output.
r"(writing image (?P<digest>sha256:\S+) done)",
# Docker output.
r"(Successfully built (?P<short_id>\S+))",
),
)
)
for output in outputs:
image_id_match = next(
(
match
for match in (
re.search(image_id_regexp, line)
for line in reversed(output.decode().split("\n"))
)
if match
),
None,
)
if image_id_match:
image_id = image_id_match.group("digest") or image_id_match.group("short_id")
return image_id
return "<unknown>"
def format_docker_build_context_help_message(
address: Address, context_root: str, context: DockerBuildContext, colors: bool
) -> str | None:
paths_outside_context_root: list[str] = []
def _chroot_context_paths(paths: tuple[str, str]) -> tuple[str, str]:
"""Adjust the context paths in `copy_source_vs_context_source` for `context_root`."""
instruction_path, context_path = paths
if not context_path:
return paths
dst = os.path.relpath(context_path, context_root)
if dst.startswith("../"):
paths_outside_context_root.append(context_path)
return ("", "")
if instruction_path == dst:
return ("", "")
return instruction_path, dst
# Adjust context paths based on `context_root`.
copy_source_vs_context_source: tuple[tuple[str, str], ...] = tuple(
filter(any, map(_chroot_context_paths, context.copy_source_vs_context_source))
)
if not (copy_source_vs_context_source or paths_outside_context_root):
# No issues found.
return None
msg = f"Docker build failed for `docker_image` {address}. "
has_unsourced_copy = any(src for src, _ in copy_source_vs_context_source)
if has_unsourced_copy:
msg += (
f"The {context.dockerfile} has `COPY` instructions for source files that may not have "
f"been found in the Docker build context.\n\n"
)
renames = sorted(
format_rename_suggestion(src, dst, colors=colors)
for src, dst in copy_source_vs_context_source
if src and dst
)
if renames:
msg += (
f"However there are possible matches. Please review the following list of "
f"suggested renames:\n\n{bullet_list(renames)}\n\n"
)
unknown = sorted(src for src, dst in copy_source_vs_context_source if src and not dst)
if unknown:
msg += (
f"The following files were not found in the Docker build context:\n\n"
f"{bullet_list(unknown)}\n\n"
)
unreferenced = sorted(dst for src, dst in copy_source_vs_context_source if dst and not src)
if unreferenced:
msg += (
f"There are files in the Docker build context that were not referenced by "
f"any `COPY` instruction (this is not an error):\n\n{bullet_list(unreferenced, 10)}\n\n"
)
if paths_outside_context_root:
unreachable = sorted({os.path.dirname(pth) for pth in paths_outside_context_root})
context_paths = tuple(dst for src, dst in context.copy_source_vs_context_source if dst)
new_context_root = os.path.commonpath(context_paths)
msg += (
"There are unreachable files in these directories, excluded from the build context "
f"due to `context_root` being {context_root!r}:\n\n{bullet_list(unreachable, 10)}\n\n"
f"Suggested `context_root` setting is {new_context_root!r} in order to include all "
"files in the build context, otherwise relocate the files to be part of the current "
f"`context_root` {context_root!r}."
)
return msg
def rules():
return [
*collect_rules(),
UnionRule(PackageFieldSet, DockerPackageFieldSet),
]
|
import numpy as np
import matplotlib.pyplot as plt
import skfuzzy as fuzz
def find_index(arr, value):
for i in xrange(arr.size):
if( arr[i] == value):
return i
return -1
def find_closest(arr, value):
dif = abs(arr[0] - value)
min_dif = dif
index = 0
for i in xrange(arr.size):
dif = abs(arr[i] - value)
if dif < min_dif:
min_dif = dif
index = i
return index
def eval_membership_functions(initial_values):
'''Technically they should have been calcultated using fuzz.someMf but since they are same to save computation we do this'''
egf_high = fuzz.gaussmf(initial_values[0], 1, 0.1)
egf_low = fuzz.gaussmf(initial_values[0], 0, 0.1)
hrg_high = egf_high
hrg_low = egf_low
egfr_high = egf_high
egfr_low = egf_low
erk_high = egfr_high
erk_low = egfr_low
pi3k_high = egfr_high
pi3k_low = egfr_low
akt_high = egfr_high
akt_low = egfr_low
raf_high = egfr_high
raf_low = egfr_low
time_high = fuzz.smf(initial_values[7], 0, 1)
time_low = fuzz.zmf(initial_values[7], 0, 1)
return ((egf_low, egf_high), (hrg_low, hrg_high), (egfr_low, egfr_high), (raf_low, raf_high), (pi3k_low, pi3k_high), (erk_low, erk_high), \
(akt_low, akt_high), (time_low, time_high))
def compute_egfr(egf_value, hrg_value, time_value, initial_values, mfs ):
"""Rules---
If egf is high or hrg is high and time is high then egfr is high
if egf is low and hrg is low or time is low then egfr is low"""
a1_1 = mfs[0][1][initial_values[0] == egf_value] #egf_high[egf == egf_value]
a1_2 = mfs[1][1][ initial_values[1] == hrg_value ] #hrg_high[hrg == hrg_value]
a1_3 = mfs[3][1][ initial_values[3] == time_value]
if( a1_1.size == 0):
a1_1 = mfs[0][1][ find_closest(initial_values[0], egf_value)]
if( a1_2.size == 0):
a1_2 = mfs[1][1][ find_closest(initial_values[1], hrg_value)]
if( a1_3.size == 0):
a1_3 = mfs[3][1][ find_closest(initial_values[3], time_value)]
a1 = max( a1_1, a1_2)
a1 = min(a1, a1_3 )
c1 = np.fmin(np.linspace(a1, a1, 100), mfs[2][1])
a2_1 = mfs[0][0][initial_values[0] == egf_value] #egf_low[egf == egf_value]
a2_2 = mfs[1][0][initial_values[1] == hrg_value] #hrg_low[hrg == hrg_value]
a2_3 = mfs[3][0][initial_values[3] == time_value]
if( a2_1.size == 0):
a2_1 = mfs[0][0][ find_closest(initial_values[0], egf_value)]
if( a2_2.size == 0):
a2_2 = mfs[1][0][ find_closest(initial_values[1], hrg_value)]
if( a2_3.size == 0):
a2_3 = mfs[3][0][ find_closest(initial_values[3], hrg_value)]
a2 = min(a2_1, a2_2)
a2 = max(a2, a2_3)
c2 = np.fmin(np.linspace(a2,a2,100), mfs[2][0])
c_com = np.fmax(c1, c2)
a = fuzz.defuzz(initial_values[2], c_com, 'centroid')
return a
def compute_raf(egfr_value, akt_value, time_value, initial_values, mfs):
"""Rules---
If egfr is high or akt is high and time is high then raf is high
if egfr is low and akt is low or time is low then raf is low"""
a1_1 = mfs[0][1][initial_values[0] == egfr_value] #egfr_high[egfr == egfr_value]
a1_2 = mfs[1][1][initial_values[1] == akt_value] #akt_high[akt == akt_value]
a1_3 = mfs[3][1][initial_values[3] == time_value]
if( a1_1.size == 0):
a1_1 = mfs[0][1][ find_closest(initial_values[0], egfr_value)]
if( a1_2.size == 0):
a1_2 = mfs[1][1][ find_closest(initial_values[1], akt_value)]
if(a1_3.size == 0):
a1_3 = mfs[3][1][ find_closest(initial_values[3], time_value)]
a1 = max( a1_1 , a1_2 )
a1 = min(a1_3, a1)
#print egfr_value
c1 = np.fmin( np.linspace(a1, a1, 100), mfs[2][1])
a2_1 = mfs[0][0][initial_values[0] == egfr_value] #egfr_low[egfr == egfr_value]
a2_2 = mfs[1][0][initial_values[1] == akt_value] #akt_low[akt == akt_value]
a2_3 = mfs[3][0][initial_values[3] == time_value]
if( a2_1.size == 0):
a2_1 = mfs[0][0][ find_closest(initial_values[0], egfr_value)]
if( a2_2.size == 0):
a2_2 = mfs[1][0][ find_closest(initial_values[1], akt_value)]
if( a2_3.size == 0):
a2_3 = mfs[3][0][ find_closest(initial_values[3], time_value)]
a2 = min(a2_1 ,a2_2 )
a2 = max(a2_3, a2)
c2 = np.fmin( np.linspace(a2, a2, 100), mfs[2][0])
c_com = np.fmax(c1, c2)
return fuzz.defuzz(initial_values[2], c_com, 'centroid')
def compute_pi3k(egfr_value, erk_value, time_value, initial_values, mfs):
"""Rules ---
if egfr is high and erk is low and time is high then pi3k is high
if egfr is low or erk is high or time is low then pi3k is low"""
a1_1 = mfs[0][1][initial_values[0] == egfr_value] #egfr_high[egfr == egfr_value]
a1_2 = mfs[1][0][ initial_values[1] == erk_value] #erk_low[erk == erk_value]
a1_3 = mfs[3][1][ initial_values[3] == time_value]
if( a1_1.size == 0):
a1_1 = mfs[0][1][ find_closest(initial_values[0], egfr_value)]
if( a1_2.size == 0):
a1_2 = mfs[1][0][ find_closest(initial_values[1], erk_value)]
if( a1_3.size == 0):
a1_3 = mfs[3][1][ find_closest(initial_values[3], time_value)]
a1 = min(a1_1 , a1_2, a1_3)
c1 = np.fmin( np.linspace(a1, a1, 100), mfs[2][1])
a2_1 = mfs[0][0][ initial_values[0] == egfr_value] #egfr_low[egfr == egfr_value]
a2_2 = mfs[1][1][ initial_values[1] == erk_value] #erk_high[erk == erk_value]
a2_3 = mfs[3][0][ initial_values[3] == time_value]
if( a2_1.size == 0):
a2_1 = mfs[0][0][ find_closest(initial_values[0], egfr_value)]
if( a2_2.size == 0):
a2_2 = mfs[1][1][ find_closest(initial_values[1], erk_value)]
if( a2_3.size == 0):
a2_3 = mfs[3][0][ find_closest(initial_values[3], time_value)]
a2 = max(a2_1 , a2_2, a2_3)
c2 = np.fmin( np.linspace(a2, a2, 100), mfs[2][0] )
c_com = np.fmax(c1, c2)
return fuzz.defuzz(initial_values[2], c_com, 'centroid')
def compute_erk(raf_value, time_value, initial_values, mfs):
"""Rules-
If raf is high and time is high erk is high
If raf is low or time is low then erk is low"""
a1_1 = mfs[0][1][initial_values[0] == raf_value]
a1_2 = mfs[2][1][initial_values[2] == time_value]
if( a1_1.size == 0):
a1_1 = mfs[0][1][ find_closest(initial_values[0], raf_value)]
if( a1_2.size == 0):
a1_2 = mfs[2][1][ find_closest(initial_values[2], time_value)]
a1 = min(a1_1, a1_2)
c1 = np.fmin( np.linspace(a1, a1, 100), mfs[1][1])
a2_1 = mfs[0][0][initial_values[0] == raf_value]
a2_2 = mfs[2][0][initial_values[2] == time_value]
if( a2_1.size == 0):
a2_1 = mfs[0][0][ find_closest(initial_values[0], raf_value)]
if( a2_2.size == 0):
a2_2 = mfs[2][0][ find_closest(initial_values[2], time_value)]
a2 = max(a2_1, a2_2)
c2 = np.fmin( np.linspace(a2, a2, 100), mfs[1][0])
c_com = np.fmax(c1,c2)
return fuzz.defuzz( initial_values[1], c_com, 'centroid')
def compute_akt(pi3k_value, time_value, initial_values, mfs):
"""Rules-
If pi3k is high and time is high akt is high
If pi3k is low or time is low then akt is low"""
a1_1 = mfs[0][1][initial_values[0] == pi3k_value]
a1_2 = mfs[2][1][initial_values[2] == time_value]
if( a1_1.size == 0):
a1_1 = mfs[0][1][ find_closest(initial_values[0], pi3k_value)]
if( a1_2.size == 0):
a1_2 = mfs[2][1][ find_closest(initial_values[2], time_value)]
a1 = min(a1_1, a1_2)
c1 = np.fmin( np.linspace(a1, a1, 100), mfs[1][1])
a2_1 = mfs[0][0][initial_values[0] == pi3k_value]
a2_2 = mfs[2][0][initial_values[2] == time_value]
if( a2_1.size == 0):
a2_1 = mfs[0][0][ find_closest(initial_values[0], pi3k_value)]
if( a2_2.size == 0):
a2_2 = mfs[2][0][ find_closest(initial_values[2], time_value)]
a2 = max(a2_1, a2_2)
c2 = np.fmin( np.linspace(a2, a2, 100), mfs[1][0])
c_com = np.fmax(c1,c2)
return fuzz.defuzz( initial_values[1], c_com, 'centroid')
def check_egfr(not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs):
y = np.copy(initial_cond)
if 0 in not_updated and 1 in not_updated:
y[2] = compute_egfr(initial_cond[0], initial_cond[1], initial_values[7][ time_indexes[0] ], (initial_values[0], initial_values[1], initial_values[2],\
initial_values[7]), (mfs[0], mfs[1], mfs[2], mfs[7]))
time_indexes[0] = time_indexes[0] + 1
else:
time_indexes[0] = 1
return (y, time_indexes)
def check_raf(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs):
if 2 in not_updated or 6 in not_updated:
time_indexes[1] = time_indexes[1] + 1
else:
time_indexes[1] = 2
y[3] = compute_raf(initial_cond[2], initial_cond[6], initial_values[7][ time_indexes[1] - 1], \
(initial_values[2], initial_values[6], initial_values[3], initial_values[7]), (mfs[2], mfs[6], mfs[3], mfs[7]))
return (y, time_indexes)
def check_pi3k(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs ):
if 2 in not_updated and 5 in not_updated:
time_indexes[2] = time_indexes[2] + 1
else:
time_indexes[2] = 2
y[4] = compute_pi3k(initial_cond[2], initial_cond[5], initial_values[7][ time_indexes[2] - 1], \
(initial_values[2], initial_values[5], initial_values[4], initial_values[7]), (mfs[2], mfs[5], mfs[4], mfs[7]))
return (y, time_indexes)
def check_erk(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs ):
if 3 in not_updated:
time_indexes[3] = time_indexes[3] + 1
else:
time_indexes[3] = 2
y[5] = compute_erk(initial_cond[3], initial_values[7][ time_indexes[3]-1], \
(initial_values[3], initial_values[5], initial_values[7]), (mfs[3], mfs[5], mfs[7]))
return (y, time_indexes)
def check_akt(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs ):
if 4 in not_updated:
y[6] = compute_akt(initial_cond[4], initial_values[7][ time_indexes[4]], \
(initial_values[4], initial_values[6], initial_values[7]), (mfs[4], mfs[6], mfs[7]))
time_indexes[4] = time_indexes[4] + 1
else:
time_indexes[4] = 1
return (y, time_indexes)
def rules(prev_cond, initial_cond, time_indexes, (initial_values, mfs)):
not_updated = []
for i in xrange(len(prev_cond)):
if prev_cond[i] == initial_cond[i]:
not_updated.append(i)
y, time_indexes = check_egfr(not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs )
y, time_indexes = check_raf(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs )
y, time_indexes = check_pi3k(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs )
y, time_indexes = check_erk(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs )
y, time_indexes = check_akt(y, not_updated, prev_cond, initial_cond, time_indexes, initial_values, mfs )
#y[5] = initial_cond[3]
#y[6] = initial_cond[4]
return (y,time_indexes)
def main():
#Universal sets
#c_egfr_aggregated = compute_initial_egfr(egf, hrg, egfr)
#c_raf_aggregated = compute_initial_raf(egfr, akt, raf)
#c_pi3k_aggregated = compute_initial_pi3k(egfr, erk, pi3k)
initial_cond = np.array([1, 1, 0, 0, 0, 0, 0], dtype = "float64")
time_stop = 10
y = np.copy(initial_cond)
y.resize(1, 7)
step = 1
egf = np.linspace(0, 1, 100)
hrg = egf
egfr = egf
akt = egf
raf = egf
pi3k = egf
erk = egf
time = np.linspace(0, 10, 1000)
vals = (egf, hrg, egfr, raf, pi3k, erk, akt, time)
mfs = eval_membership_functions(vals)
#print rules(y[0], 0, (vals,mfs))
times = [ 1, 1, 1, 1, 1]
for i in xrange(1, time.size):
temp, times = rules(y[i-2], y[i - 1], times , (vals, mfs))
#print times
y = np.vstack((y, temp))
#for i in xrange(290,400):
# print y[i,6]
# if i > 290 and i < 350:
# print times
#print time[i ], y[i]
#for i in xrange(200,400):
# print y[i]
#for i in xrange(400):
# print y[i]
plt.title("Synch")
lines = plt.plot(time, y[:,2] )
plt.legend(loc='upper right')
plt.xlabel('Time')
plt.ylabel('Species')
plt.axis([-0.2,10.1,-0.05,1.2])
plt.grid(True)
plt.show()
if __name__ == "__main__":
main() |
from .settings_utils import get_root_prefix, get_scope_prefix
def get_quote(str):
single = '\''
double = '"'
if single not in str and double in str:
return double
if single in str and double not in str:
return single
if single in str and double in str:
index = min([str.find(single), str.find(double)])
return str[index]
return
def get_prefix(str):
root_prefix = get_root_prefix()
scope_prefix = get_scope_prefix()
if str.startswith(root_prefix):
return root_prefix
if str.startswith(scope_prefix):
return scope_prefix
return
|
import os,sys,glob,pdb
import numpy as np
import scikits.audiolab as audiolab
import scikits.samplerate as samplerate
from gablab import *
'''
----------------
Processing function
_____________________
'''
def Process(z,nvar,flag):
L = len(z)
D = GaborBlock(L,1024,4)
z = np.hstack((z,np.zeros(D.M-L))) # pad to block boundary
tonemap = np.reshape(range(D.N),(D.N/D.fftLen,D.fftLen)).transpose()
# choose objective function
if flag=='BPDN':
f,fgrad = BP_factory()
elif flag=='GBPDN':
f,fgrad = Tone_factory(tonemap,gamma=0.5)
else: raise Exception('Unrecognized option')
xe = GBPDN_momentum(D,z,f,fgrad,maxerr=nvar,maxits=500,stoptol=1e-3,muinit=1e-1,momentum=0.9,smoothinit=1e-5,anneal=0.96)
ye = np.real(D.dot(xe))
return ye[:L]
def fail_usage():
print 'usage:'
print ' python.py denoising_experiment [dir] [mode]'
print ' [dir] is that dataset directory'
print ' [mode] can be either BPDN or GPBDN'
def get_dataset(root):
http = 'http://homepage.univie.ac.at/monika.doerfler/'
files = ['sig_1.wav', 'sig_2.wav', 'sig_3.wav', 'sig_4.wav', 'sig_5.wav', 'sig_6.wav']
files_0dB = ['sig_n0_1.wav', 'sig_n0_2.wav', 'sig_n0_3.wav', 'sig_n0_4.wav', 'sig_n0_5.wav', 'sig_n0_6.wav']
files_10dB = ['sig_n10_1.wav', 'sig_n10_2.wav', 'sig_n10_3.wav', 'sig_n10_4.wav', 'sig_n10_5.wav', 'sig_n10_6.wav']
files_20dB = ['sig_n20_1.wav', 'sig_n20_2.wav', 'sig_n20_3.wav', 'sig_n20_4.wav', 'sig_n20_5.wav', 'sig_n20_6.wav']
if not os.path.exists(os.path.join(root,'Original')):
os.makedirs(os.path.join(root,'Original'))
if not os.path.exists(os.path.join(root,'0dB')):
os.makedirs(os.path.join(root,'0dB'))
if not os.path.exists(os.path.join(root,'10dB')):
os.makedirs(os.path.join(root,'10dB'))
if not os.path.exists(os.path.join(root,'20dB')):
os.makedirs(os.path.join(root,'20dB'))
for f in files:
if not os.path.exists(os.path.join(root,'Original',f)):
os.system('wget %s%s -O %s' %(http,f,os.path.join(root,'Original',f)))
for f in files_0dB:
if not os.path.exists(os.path.join(root,'0dB',f)):
os.system('wget %s%s -O %s' %(http,f,os.path.join(root,'0dB',f)))
for f in files_10dB:
if not os.path.exists(os.path.join(root,'10dB',f)):
os.system('wget %s%s -O %s' %(http,f,os.path.join(root,'10dB',f)))
for f in files_20dB:
if not os.path.exists(os.path.join(root,'20dB',f)):
os.system('wget %s%s -O %s' %(http,f,os.path.join(root,'20dB',f)))
'''
------------
Main program
____________
This experiment requires data which may be downloaded from:
http://homepage.univie.ac.at/monika.doerfler/StrucAudio.html
This script will attempt to automatically download the dataset
if it is not detected (requires wget, see the get_dataset() function)
Example usage:
python denoising_experiment.py /Users/corey/School/Datasets/kai BPDN
'''
if __name__ == '__main__':
if len(sys.argv) is not 3:
fail_usage()
exit();
root = sys.argv[1]
mode = sys.argv[2]
dirs = ['0dB','10dB','20dB'] # files for each noise-level are kept in their own directory
get_dataset(root) # attempt to get dataset if missing
for d in dirs:
output_dir = os.path.join(root,d+'_'+mode)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
origFiles = glob.glob(os.path.join(root, 'Original', '*.wav'))
for d in dirs:
noisyFiles = glob.glob(os.path.join(root, d, '*.wav'))
for a,b in zip(origFiles,noisyFiles):
f = os.path.split(b)[1]
print 'Processing %s' % f
print '-----------------------------'
y = audiolab.wavread(a)[0]
z = audiolab.wavread(b)[0]
r = y-z
inSNR = 10*np.log10(y.dot(y)/r.dot(r))
nvar = np.sum(np.abs(y)**2)/(10**(inSNR/10))
ye = Process(z, nvar, mode)
r = y-ye
outSNR = 10*np.log10(y.dot(y)/r.dot(r))
print 'File: %s, Input SNR = %f, output SNR = %f' % (f, inSNR, outSNR)
audiolab.wavwrite(ye,os.path.join(root,d+'_'+mode,'t_'+f),44100.)
|
import sys, re, datetime
def _coalesce(dates):
_dates = []
for i in xrange(len(dates)):
added = False
for j in xrange(len(_dates)):
if _dates[j][0] > dates[i][1] or _dates[j][1] < dates[i][0]:
pass
else:
if _dates[j][0] <= dates[i][0] <= _dates[j][1]:
if dates[i][1] > _dates[j][1]:
_dates[j][1] = dates[i][1]
added = True
elif _dates[j][0] <= dates[i][1] <= _dates[j][1]:
if dates[i][0] < _dates[j][0]:
_dates[j][0] = dates[i][0]
added = True
elif _dates[j][0] < dates[i][0] and _dates[j][1] > dates[i][1]:
added = True
elif _dates[j][0] > dates[i][0] and _dates[j][1] < dates[i][1]:
_dates[j] = dates[i]
if not added:
_dates.append(dates[i])
return _dates
def coalesce(dates):
l = 0
while l != len(dates):
l = len(dates)
dates = _coalesce(dates)
return dates
def process(line):
dates = re.findall(r'(\w{3} \d{4})-(\w{3} \d{4})',line)
dates = [[datetime.datetime.strptime(i,"%b %Y") for i in date] for date in dates]
odates = dates
for i in xrange(len(dates)):
if dates[i][1].month == 12:
dates[i][1] = dates[i][1].replace(day=31)
else:
dates[i][1] = dates[i][1].replace(month=dates[i][1].month + 1)
dates[i][1] = dates[i][1] - datetime.timedelta(days=1)
dates = coalesce(dates)
exp = datetime.datetime(1,1,1)
for date in dates:
exp += date[1]-date[0]
print (exp-datetime.datetime(1,1,1)
with open(sys.argv[1],'r') as f:
for line in f:
process(line)
|
def solve(s):
return s.upper() if len([i for i in s if i.isupper()]) > len(s)//2 else s.lower()
'''
In this Kata, you will be given a string that may have mixed uppercase and lowercase
letters and your task is to convert that string to either lowercase only
or uppercase only based on:
make as few changes as possible.
if the string contains equal number of uppercase and lowercase letters,
convert the string to lowercase.
For example:
solve("coDe") = "code". Lowercase characters > uppercase. Change only the "D" to lowercase.
solve("CODe") = "CODE". Uppercase characters > lowecase. Change only the "e" to uppercase.
solve("coDE") = "code". Upper == lowercase. Change all to lowercase.
'''
|
from rest_framework.exceptions import NotAuthenticated
from rest_framework.authentication import BaseAuthentication
from rest_framework.permissions import BasePermission
class MMRestAuthentication(BaseAuthentication):
def authenticate(self, request):
user = request.session.get('user', None)
if not user: # check if logged in
raise NotAuthenticated()
return (user, None)
class MMRestPermission(BasePermission):
required_permissions = None
def _is_permission_restricted(self):
return (self.required_permissions is not None and self.required_permissions)
def has_permission(self, request, view):
user = request.session.get('user', None)
if user is None:
return False
if self._is_permission_restricted():
return set(self.required_permissions).issubset(set(user.groups))
return True
|
import sys
a = int(sys.stdin.readline())
if a%5 ==0:
print("5의 배수입니다")
else:
print("5의 배수 아님")
if a > 0 and a < 100 :
print("정상")
else:
print("비정상")
if a%3 == 0 and a%2 ==0:
print(a)
b,c,d = map(int, sys.stdin.readline().split())
avg = (b+c+d)/3
if avg >= 60 :
print("합격")
else:
print("불합격") |
# if practice
names = ["laowang", "laowen", "laotian", "laomu", "laohao"]
name = input("pls input your name:")
if name in names:
print("it's ok.you are in list")
else:
print("who you're? you are not in list")
|
# Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
# IMPORTANT:
# Before adding or updating dependencies, please review the documentation here:
# https://github.com/dart-lang/sdk/wiki/Adding-and-Updating-Dependencies
#
# Packages can be rolled to the latest version with `tools/manage_deps.dart`.
#
# For example
#
# dart tools/manage_deps.dart bump third_party/pkg/dart_style
allowed_hosts = [
'boringssl.googlesource.com',
'chrome-infra-packages.appspot.com',
'chromium.googlesource.com',
'dart.googlesource.com',
'dart-internal.googlesource.com',
'fuchsia.googlesource.com',
'llvm.googlesource.com',
]
vars = {
# The dart_root is the root of our sdk checkout. This is normally
# simply sdk, but if using special gclient specs it can be different.
"dart_root": "sdk",
# We use mirrors of all github repos to guarantee reproducibility and
# consistency between what users see and what the bots see.
# We need the mirrors to not have 100+ bots pulling github constantly.
# We mirror our github repos on Dart's git servers.
# DO NOT use this var if you don't see a mirror here:
# https://dart.googlesource.com/
"dart_git": "https://dart.googlesource.com/",
"dart_internal_git": "https://dart-internal.googlesource.com",
# If the repo you want to use is at github.com/dart-lang, but not at
# dart.googlesource.com, please file an issue
# on github and add the label 'area-infrastructure'.
# When the repo is mirrored, you can add it to this DEPS file.
# Chromium git
"chromium_git": "https://chromium.googlesource.com",
"fuchsia_git": "https://fuchsia.googlesource.com",
"llvm_git": "https://llvm.googlesource.com",
# Checked-in SDK version. The checked-in SDK is a Dart SDK distribution in a
# cipd package used to run Dart scripts in the build and test infrastructure,
# which is automatically built on the release commits.
# Use a dev commit because Windows ARM64 is not built on beta or stable.
"sdk_tag": "version:3.1.0-298.0.dev",
# co19 is a cipd package. Use update.sh in tests/co19[_2] to update these
# hashes.
"co19_rev": "910330408dd6af6b8f58c5d26464dbe0ce76e476",
# This line prevents conflicts when both packages are rolled simultaneously.
"co19_2_rev": "0454b178fdf6697e898b5e5c7ee553a9bc266faa",
# The internal benchmarks to use. See go/dart-benchmarks-internal
"benchmarks_internal_rev": "f048a4a853e3062056d39c3db100acdde42f16d6",
"checkout_benchmarks_internal": False,
# Checkout the flute benchmark only when benchmarking.
"checkout_flute": False,
# Checkout Android dependencies only on Mac and Linux.
"download_android_deps":
"host_os == mac or (host_os == linux and host_cpu == x64)",
# Checkout extra javascript engines for testing or benchmarking.
# d8, the V8 shell, is always checked out.
"checkout_javascript_engines": False,
"d8_tag": "version:11.6.145",
"jsshell_tag": "version:95.0",
# As Flutter does, we use Fuchsia's GN and Clang toolchain. These revision
# should be kept up to date with the revisions pulled by the Flutter engine.
# The list of revisions for these tools comes from Fuchsia, here:
# https://fuchsia.googlesource.com/integration/+/HEAD/toolchain
# If there are problems with the toolchain, contact fuchsia-toolchain@.
"clang_version": "git_revision:6d667d4b261e81f325756fdfd5bb43b3b3d2451d",
"gn_version": "git_revision:e3978de3e8dafb50a2b11efa784e08699a43faf8",
# Update from https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/gn
"fuchsia_sdk_version": "version:12.20230407.0.1",
"download_fuchsia_deps": False,
# Ninja, runs the build based on files generated by GN.
"ninja_tag": "version:2@1.11.1.chromium.7",
# Scripts that make 'git cl format' work.
"clang_format_scripts_rev": "bb994c6f067340c1135eb43eed84f4b33cfa7397",
### /third_party/ dependencies
# Prefer to use hashes of binaryen that have been reviewed & rolled into g3.
"binaryen_rev" : "cdb7aeab40b4c522de20b242019f7e88641445d5",
"boringssl_gen_rev": "a468ba9fec3f59edf46a7db98caaca893e1e4d96",
"boringssl_rev": "74646566e93de7551bfdfc5f49de7462f13d1d05",
"browser-compat-data_tag": "ac8cae697014da1ff7124fba33b0b4245cc6cd1b", # v1.0.22
"devtools_rev": "acbc179425b4596b7c2ba7d9c4263077f2e18098",
"icu_rev": "81d656878ec611cb0b42d52c82e9dae93920d9ba",
"jinja2_rev": "2222b31554f03e62600cd7e383376a7c187967a1",
"libcxx_rev": "44079a4cc04cdeffb9cfe8067bfb3c276fb2bab0",
"libcxxabi_rev": "2ce528fb5e0f92e57c97ec3ff53b75359d33af12",
"libprotobuf_rev": "24487dd1045c7f3d64a21f38a3f0c06cc4cf2edb",
"markupsafe_rev": "8f45f5cfa0009d2a70589bcda0349b8cb2b72783",
"perfetto_rev": "b8da07095979310818f0efde2ef3c69ea70d62c5",
"ply_rev": "604b32590ffad5cbb82e4afef1d305512d06ae93",
"protobuf_gn_rev": "ca669f79945418f6229e4fef89b666b2a88cbb10",
"root_certificates_rev": "692f6d6488af68e0121317a9c2c9eb393eb0ee50",
"WebCore_rev": "bcb10901266c884e7b3740abc597ab95373ab55c",
"zlib_rev": "14dd4c4455602c9b71a1a89b5cafd1f4030d2e3f",
### /third_party/pkg dependencies
# 'tools/rev_sdk_deps.dart' can rev pkg dependencies to their latest; put an
# EOL comment after a dependency to disable this and pin it at its current
# revision.
"args_rev": "da56b18ebcb600e050bf57b9c1103b1d2a9fb2ff",
"async_rev": "b65622afa33c5bfc574ae6b34d5a61f18a98f83c",
"bazel_worker_rev": "c29d1620b1a935dc88d13a4eec0d9950d3e9df27",
"benchmark_harness_rev": "fde73cb8810b1f8efed41e1994bc7b8327047379",
"boolean_selector_rev": "303635d0262e679fb6a81686724a5dc1dbc850a7",
"browser_launcher_rev": "27ec600af41b0d0ebe9a3db6ad36e9ed11976b84",
"characters_rev": "ec844db851b940d9013719f8474f064e35a01d0f",
"cli_util_rev": "9b7ce784c2889d62be0d6f66022331cb1e53b5b6",
"clock_rev": "263e508a36ed90e4d85b60dd70552d20e71a9ae9",
"collection_rev": "1a9b7eb64be10a8ba4ced7eb36b4b265a49d5d41",
"convert_rev": "79ee174280149817f9925db0613983aadb46eeca",
"crypto_rev": "8b704c601f4843050624cd334e3b74f6c17315a4",
"csslib_rev": "7e91228c2c2428455e5bc63bbf89c7bf0f3401b0",
# Note: Updates to dart_style have to be coordinated with the infrastructure
# team so that the internal formatter `tools/sdks/dart-sdk/bin/dart format`
# matches the version here. Please follow this process to make updates:
#
# * Create a commit that updates the version here to the desired version and
# adds any appropriate CHANGELOG text.
# * Send that to eng-prod to review. They will update the checked-in SDK
# and land the review.
#
# For more details, see https://github.com/dart-lang/sdk/issues/30164.
"dart_style_rev": "2956b1a705953f880a5dae9d3a0969df0fc45e99", # disable rev_sdk_deps.dart
"dartdoc_rev": "5fda5eb2e004b6cf7c73fbcffbc246a71119be98",
"ecosystem_rev": "f777da70c65d158fa3b9dbfe7483bdc70b67c709",
"ffi_rev": "e2c01a960b84d1074b0a1849909ae2d269d004be",
"file_rev": "5d9a6027756b5846e8f5380f983390f61f564a75",
"fixnum_rev": "00fa1207768bd07d04c895cbe0f1fe99af14e727",
"flute_rev": "f42b09f77132210499ec8ed819a60c260af03db6",
"glob_rev": "5b243935154daf53c54981b98f625bace90b2112",
"html_rev": "4060496b0443451c38f8b789db2e44c0d7966171",
"http_rev": "cad7d609b18512d74cc30ef8ad9faf02d2ea4451",
"http_multi_server_rev": "aa128cfaf6ef1c9c1ace962ca2dcf6e5dddad441",
"http_parser_rev": "c14fbf6aa7ada5e8912eab4581eb26ff4d101452",
"intl_rev": "5d65e3808ce40e6282e40881492607df4e35669f",
"json_rpc_2_rev": "509f71eef90ec5afb5486b69dab7fed97b9f1eef",
"leak_tracker_rev": "098bafcf99a5220e3c352d895d991e163568ee03", # b/292240713
"lints_rev": "54cd7a033881ccfd9ec66133bf9a4f128870cb9e",
"logging_rev": "521498757ed3eeae151c2d4796404e8947baa04c",
"markdown_rev": "56e75df897ac01a886358e79124844977aa8157c",
"matcher_rev": "ce8f40934c90e12992071172795b3bca29fac295",
"mime_rev": "799b398140817fdb134f639d84e91c552e129136",
"mockito_rev": "f5abf11f8e21e61eebc2081e322bdfcab057e988",
"native_rev": "5a1361b6d98a84f8070c97872e3d3587fc0ba435",
"package_config_rev": "981c49dfec1e3e3e90f336dcd7c225923d2fd321",
"path_rev": "7c2324bdb4c75a17de8a3d1e6afe8cc0756ef5f9",
"pool_rev": "77001024a16126cc5718e654ea3e57bbf6e7fac3",
"protobuf_rev": "5e8f36b48f015532cd1165b47686b659fc8870da",
"pub_rev": "42819a1e10f803eb7f6296692c5a976e1c647360", # disable rev_sdk_deps.dart
"pub_semver_rev": "028b43506a3f7ec7f7b4673a78ba3da3d5fb138d",
"shelf_rev": "73edd2b6e18ee50afac57e4e224b8c714b81e66d",
"source_map_stack_trace_rev": "16e54fd9fc088961773340cb5c3688a089387135",
"source_maps_rev": "97c4833100b1bd8ea7e4a2fa1808383007e2d1e8",
"source_span_rev": "37735aecc5d8c0fb75ed61691bae056510b357bb",
"sse_rev": "8cc5b11aa0c82cd0d89758d20782221cc6ac6dec",
"stack_trace_rev": "4ddd86d5d22aad9a8e8e9a06fd0a6a6271736135",
"stream_channel_rev": "e54234f94da929153b012de2bba75c5246a52538",
"string_scanner_rev": "413b57a3b14fa273e8ed52578edfbe0446084795",
"sync_http_rev": "c3d6ad48ec997c56b7f076bc9f8b4134c4a9225c",
"term_glyph_rev": "423700a3c019dc67f93d2bd6578016a1402506f7",
"test_rev": "d0fc4bde2e05e62c75bc3ac7b3de3f510816ea44",
"test_descriptor_rev": "36d8617fafccbe36dfcf74ad4921c61911a6a411",
"test_process_rev": "b360784a9149b15888aed8d7cf167bb46fe733d5",
"test_reflective_loader_rev": "0bfaad91ed308ce9da11b48395c8210d7542c16b",
"tools_rev": "b72fae8673a5fa30b0eff4077005ac95f960dc9b",
"typed_data_rev": "a20be901e11eddcbd6e5735fb01b64d28c94c49d",
"usage_rev": "09bb8472fdafff2c48a19aabbcf57b3af0f43934",
"vector_math_rev": "88bada3c32ba3f1d53073a003085131d60b09213",
"watcher_rev": "7457413060ed7403b90b01533a61bd959932122e",
"web_socket_channel_rev": "4d1b5438d1bdfc6317bf99fd9d9c6e4edb7e9ec5",
"webdev_rev": "fc876cb0de59526160ed17efaa920557a6e2ba32", # https://github.com/dart-lang/webdev/issues/2201
"webdriver_rev": "20ec47f1976c5deaf5106f85f5bf4a025d2afb1e",
"webkit_inspection_protocol_rev": "39a3c297ff573635e7936b015ce4f3466e4739d6",
"yaml_rev": "7930148a3d03d7985ce2b53bc5eb2be9c878dab8",
"yaml_edit_rev": "87dcf31fcaada207ae7c3527f9885982534badce",
# Windows deps
"crashpad_rev": "bf327d8ceb6a669607b0dbab5a83a275d03f99ed",
"minichromium_rev": "8d641e30a8b12088649606b912c2bc4947419ccc",
"googletest_rev": "f854f1d27488996dc8a6db3c9453f80b02585e12",
# Pinned browser versions used by the testing infrastructure. These are not
# meant to be downloaded by users for local testing.
"download_chrome": False,
"chrome_tag": "115.0.5790.170+1",
"download_firefox": False,
"firefox_tag": "112.0.2",
# Emscripten is used in dart2wasm tests.
"download_emscripten": False,
"emsdk_rev": "e41b8c68a248da5f18ebd03bd0420953945d52ff",
"emsdk_ver": "3.1.3",
}
gclient_gn_args_file = Var("dart_root") + '/build/config/gclient_args.gni'
gclient_gn_args = [
]
deps = {
# Stuff needed for GN build.
Var("dart_root") + "/buildtools/clang_format/script":
Var("chromium_git") + "/chromium/llvm-project/cfe/tools/clang-format.git" +
"@" + Var("clang_format_scripts_rev"),
Var("dart_root") + "/benchmarks-internal": {
"url": Var("dart_internal_git") + "/benchmarks-internal.git" +
"@" + Var("benchmarks_internal_rev"),
"condition": "checkout_benchmarks_internal",
},
Var("dart_root") + "/tools/sdks/dart-sdk": {
"packages": [{
"package": "dart/dart-sdk/${{platform}}",
"version": Var("sdk_tag"),
}],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/d8": {
"packages": [{
"package": "dart/d8",
"version": Var("d8_tag"),
}],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/firefox_jsshell": {
"packages": [{
"package": "dart/third_party/jsshell/${{platform}}",
"version": Var("jsshell_tag"),
}],
"condition": "checkout_javascript_engines",
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/devtools": {
"packages": [{
"package": "dart/third_party/flutter/devtools",
"version": "git_revision:" + Var("devtools_rev"),
}],
"dep_type": "cipd",
},
Var("dart_root") + "/tests/co19/src": {
"packages": [{
"package": "dart/third_party/co19",
"version": "git_revision:" + Var("co19_rev"),
}],
"dep_type": "cipd",
},
Var("dart_root") + "/tests/co19_2/src": {
"packages": [{
"package": "dart/third_party/co19/legacy",
"version": "git_revision:" + Var("co19_2_rev"),
}],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/markupsafe":
Var("chromium_git") + "/chromium/src/third_party/markupsafe.git" +
"@" + Var("markupsafe_rev"),
Var("dart_root") + "/third_party/babel": {
"packages": [{
"package": "dart/third_party/babel",
"version": "version:7.4.5",
}],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/zlib":
Var("chromium_git") + "/chromium/src/third_party/zlib.git" +
"@" + Var("zlib_rev"),
Var("dart_root") + "/third_party/libcxx":
Var("llvm_git") + "/llvm-project/libcxx" + "@" + Var("libcxx_rev"),
Var("dart_root") + "/third_party/libcxxabi":
Var("llvm_git") + "/llvm-project/libcxxabi" + "@" + Var("libcxxabi_rev"),
Var("dart_root") + "/third_party/boringssl":
Var("dart_git") + "boringssl_gen.git" + "@" + Var("boringssl_gen_rev"),
Var("dart_root") + "/third_party/boringssl/src":
"https://boringssl.googlesource.com/boringssl.git" +
"@" + Var("boringssl_rev"),
Var("dart_root") + "/third_party/binaryen/src" :
Var("chromium_git") + "/external/github.com/WebAssembly/binaryen.git" +
"@" + Var("binaryen_rev"),
Var("dart_root") + "/third_party/gsutil": {
"packages": [{
"package": "infra/3pp/tools/gsutil",
"version": "version:2@5.5",
}],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/root_certificates":
Var("dart_git") + "root_certificates.git" +
"@" + Var("root_certificates_rev"),
Var("dart_root") + "/third_party/emsdk":
Var("dart_git") + "external/github.com/emscripten-core/emsdk.git" +
"@" + Var("emsdk_rev"),
Var("dart_root") + "/third_party/jinja2":
Var("chromium_git") + "/chromium/src/third_party/jinja2.git" +
"@" + Var("jinja2_rev"),
Var("dart_root") + "/third_party/perfetto":
Var("fuchsia_git") +
"/third_party/android.googlesource.com/platform/external/perfetto" +
"@" + Var("perfetto_rev"),
Var("dart_root") + "/third_party/ply":
Var("chromium_git") + "/chromium/src/third_party/ply.git" +
"@" + Var("ply_rev"),
Var("dart_root") + "/build/secondary/third_party/protobuf":
Var("fuchsia_git") + "/protobuf-gn" +
"@" + Var("protobuf_gn_rev"),
Var("dart_root") + "/third_party/protobuf":
Var("fuchsia_git") + "/third_party/protobuf" +
"@" + Var("libprotobuf_rev"),
Var("dart_root") + "/third_party/icu":
Var("chromium_git") + "/chromium/deps/icu.git" +
"@" + Var("icu_rev"),
Var("dart_root") + "/third_party/WebCore":
Var("dart_git") + "webcore.git" + "@" + Var("WebCore_rev"),
Var("dart_root") + "/third_party/mdn/browser-compat-data/src":
Var('chromium_git') + '/external/github.com/mdn/browser-compat-data' +
"@" + Var("browser-compat-data_tag"),
Var("dart_root") + "/third_party/pkg/args":
Var("dart_git") + "args.git" + "@" + Var("args_rev"),
Var("dart_root") + "/third_party/pkg/async":
Var("dart_git") + "async.git" + "@" + Var("async_rev"),
Var("dart_root") + "/third_party/pkg/bazel_worker":
Var("dart_git") + "bazel_worker.git" + "@" + Var("bazel_worker_rev"),
Var("dart_root") + "/third_party/pkg/benchmark_harness":
Var("dart_git") + "benchmark_harness.git" + "@" +
Var("benchmark_harness_rev"),
Var("dart_root") + "/third_party/pkg/boolean_selector":
Var("dart_git") + "boolean_selector.git" +
"@" + Var("boolean_selector_rev"),
Var("dart_root") + "/third_party/pkg/browser_launcher":
Var("dart_git") + "browser_launcher.git" + "@" + Var("browser_launcher_rev"),
Var("dart_root") + "/third_party/pkg/characters":
Var("dart_git") + "characters.git" + "@" + Var("characters_rev"),
Var("dart_root") + "/third_party/pkg/cli_util":
Var("dart_git") + "cli_util.git" + "@" + Var("cli_util_rev"),
Var("dart_root") + "/third_party/pkg/clock":
Var("dart_git") + "clock.git" + "@" + Var("clock_rev"),
Var("dart_root") + "/third_party/pkg/collection":
Var("dart_git") + "collection.git" + "@" + Var("collection_rev"),
Var("dart_root") + "/third_party/pkg/convert":
Var("dart_git") + "convert.git" + "@" + Var("convert_rev"),
Var("dart_root") + "/third_party/pkg/crypto":
Var("dart_git") + "crypto.git" + "@" + Var("crypto_rev"),
Var("dart_root") + "/third_party/pkg/csslib":
Var("dart_git") + "csslib.git" + "@" + Var("csslib_rev"),
Var("dart_root") + "/third_party/pkg/dart_style":
Var("dart_git") + "dart_style.git" + "@" + Var("dart_style_rev"),
Var("dart_root") + "/third_party/pkg/dartdoc":
Var("dart_git") + "dartdoc.git" + "@" + Var("dartdoc_rev"),
Var("dart_root") + "/third_party/pkg/ecosystem":
Var("dart_git") + "ecosystem.git" + "@" + Var("ecosystem_rev"),
Var("dart_root") + "/third_party/pkg/ffi":
Var("dart_git") + "ffi.git" + "@" + Var("ffi_rev"),
Var("dart_root") + "/third_party/pkg/fixnum":
Var("dart_git") + "fixnum.git" + "@" + Var("fixnum_rev"),
Var("dart_root") + "/third_party/pkg/flute": {
"url": Var("dart_git") + "flute.git" + "@" + Var("flute_rev"),
"condition": "checkout_flute",
},
Var("dart_root") + "/third_party/pkg/file":
Var("dart_git") + "external/github.com/google/file.dart"
+ "@" + Var("file_rev"),
Var("dart_root") + "/third_party/pkg/glob":
Var("dart_git") + "glob.git" + "@" + Var("glob_rev"),
Var("dart_root") + "/third_party/pkg/html":
Var("dart_git") + "html.git" + "@" + Var("html_rev"),
Var("dart_root") + "/third_party/pkg/http":
Var("dart_git") + "http.git" + "@" + Var("http_rev"),
Var("dart_root") + "/third_party/pkg/http_multi_server":
Var("dart_git") + "http_multi_server.git" +
"@" + Var("http_multi_server_rev"),
Var("dart_root") + "/third_party/pkg/http_parser":
Var("dart_git") + "http_parser.git" + "@" + Var("http_parser_rev"),
Var("dart_root") + "/third_party/pkg/intl":
Var("dart_git") + "intl.git" + "@" + Var("intl_rev"),
Var("dart_root") + "/third_party/pkg/json_rpc_2":
Var("dart_git") + "json_rpc_2.git" + "@" + Var("json_rpc_2_rev"),
Var("dart_root") + "/third_party/pkg/leak_tracker":
Var("dart_git") + "leak_tracker.git" + "@" + Var("leak_tracker_rev"),
Var("dart_root") + "/third_party/pkg/lints":
Var("dart_git") + "lints.git" + "@" + Var("lints_rev"),
Var("dart_root") + "/third_party/pkg/logging":
Var("dart_git") + "logging.git" + "@" + Var("logging_rev"),
Var("dart_root") + "/third_party/pkg/markdown":
Var("dart_git") + "markdown.git" + "@" + Var("markdown_rev"),
Var("dart_root") + "/third_party/pkg/matcher":
Var("dart_git") + "matcher.git" + "@" + Var("matcher_rev"),
Var("dart_root") + "/third_party/pkg/mime":
Var("dart_git") + "mime.git" + "@" + Var("mime_rev"),
Var("dart_root") + "/third_party/pkg/mockito":
Var("dart_git") + "mockito.git" + "@" + Var("mockito_rev"),
Var("dart_root") + "/third_party/pkg/native":
Var("dart_git") + "native.git" + "@" + Var("native_rev"),
Var("dart_root") + "/third_party/pkg/package_config":
Var("dart_git") + "package_config.git" +
"@" + Var("package_config_rev"),
Var("dart_root") + "/third_party/pkg/path":
Var("dart_git") + "path.git" + "@" + Var("path_rev"),
Var("dart_root") + "/third_party/pkg/pool":
Var("dart_git") + "pool.git" + "@" + Var("pool_rev"),
Var("dart_root") + "/third_party/pkg/protobuf":
Var("dart_git") + "protobuf.git" + "@" + Var("protobuf_rev"),
Var("dart_root") + "/third_party/pkg/pub_semver":
Var("dart_git") + "pub_semver.git" + "@" + Var("pub_semver_rev"),
Var("dart_root") + "/third_party/pkg/pub":
Var("dart_git") + "pub.git" + "@" + Var("pub_rev"),
Var("dart_root") + "/third_party/pkg/shelf":
Var("dart_git") + "shelf.git" + "@" + Var("shelf_rev"),
Var("dart_root") + "/third_party/pkg/source_maps":
Var("dart_git") + "source_maps.git" + "@" + Var("source_maps_rev"),
Var("dart_root") + "/third_party/pkg/source_span":
Var("dart_git") + "source_span.git" + "@" + Var("source_span_rev"),
Var("dart_root") + "/third_party/pkg/source_map_stack_trace":
Var("dart_git") + "source_map_stack_trace.git" +
"@" + Var("source_map_stack_trace_rev"),
Var("dart_root") + "/third_party/pkg/sse":
Var("dart_git") + "sse.git" + "@" + Var("sse_rev"),
Var("dart_root") + "/third_party/pkg/stack_trace":
Var("dart_git") + "stack_trace.git" + "@" + Var("stack_trace_rev"),
Var("dart_root") + "/third_party/pkg/stream_channel":
Var("dart_git") + "stream_channel.git" +
"@" + Var("stream_channel_rev"),
Var("dart_root") + "/third_party/pkg/string_scanner":
Var("dart_git") + "string_scanner.git" +
"@" + Var("string_scanner_rev"),
Var("dart_root") + "/third_party/pkg/sync_http":
Var("dart_git") + "sync_http.git" + "@" + Var("sync_http_rev"),
Var("dart_root") + "/third_party/pkg/term_glyph":
Var("dart_git") + "term_glyph.git" + "@" + Var("term_glyph_rev"),
Var("dart_root") + "/third_party/pkg/test":
Var("dart_git") + "test.git" + "@" + Var("test_rev"),
Var("dart_root") + "/third_party/pkg/test_descriptor":
Var("dart_git") + "test_descriptor.git" + "@" + Var("test_descriptor_rev"),
Var("dart_root") + "/third_party/pkg/test_process":
Var("dart_git") + "test_process.git" + "@" + Var("test_process_rev"),
Var("dart_root") + "/third_party/pkg/test_reflective_loader":
Var("dart_git") + "test_reflective_loader.git" +
"@" + Var("test_reflective_loader_rev"),
Var("dart_root") + "/third_party/pkg/tools":
Var("dart_git") + "tools.git" + "@" + Var("tools_rev"),
Var("dart_root") + "/third_party/pkg/typed_data":
Var("dart_git") + "typed_data.git" + "@" + Var("typed_data_rev"),
Var("dart_root") + "/third_party/pkg/usage":
Var("dart_git") + "usage.git" + "@" + Var("usage_rev"),
Var("dart_root") + "/third_party/pkg/vector_math":
Var("dart_git") + "external/github.com/google/vector_math.dart.git" +
"@" + Var("vector_math_rev"),
Var("dart_root") + "/third_party/pkg/watcher":
Var("dart_git") + "watcher.git" + "@" + Var("watcher_rev"),
Var("dart_root") + "/third_party/pkg/webdev":
Var("dart_git") + "webdev.git" + "@" + Var("webdev_rev"),
Var("dart_root") + "/third_party/pkg/webdriver":
Var("dart_git") + "external/github.com/google/webdriver.dart.git" +
"@" + Var("webdriver_rev"),
Var("dart_root") + "/third_party/pkg/webkit_inspection_protocol":
Var("dart_git") + "external/github.com/google/webkit_inspection_protocol.dart.git" +
"@" + Var("webkit_inspection_protocol_rev"),
Var("dart_root") + "/third_party/pkg/web_socket_channel":
Var("dart_git") + "web_socket_channel.git" +
"@" + Var("web_socket_channel_rev"),
Var("dart_root") + "/third_party/pkg/yaml_edit":
Var("dart_git") + "yaml_edit.git" + "@" + Var("yaml_edit_rev"),
Var("dart_root") + "/third_party/pkg/yaml":
Var("dart_git") + "yaml.git" + "@" + Var("yaml_rev"),
# Keep consistent with pkg/test_runner/lib/src/options.dart.
Var("dart_root") + "/buildtools/linux-x64/clang": {
"packages": [
{
"package": "fuchsia/third_party/clang/linux-amd64",
"version": Var("clang_version"),
},
],
"condition": "host_cpu == x64 and host_os == linux",
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools/mac-x64/clang": {
"packages": [
{
"package": "fuchsia/third_party/clang/mac-amd64",
"version": Var("clang_version"),
},
],
"condition": "host_os == mac", # On ARM64 Macs too because Goma doesn't support the host-arm64 toolchain.
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools/win-x64/clang": {
"packages": [
{
"package": "fuchsia/third_party/clang/windows-amd64",
"version": Var("clang_version"),
},
],
"condition": "host_os == win", # On ARM64 Windows too because Fuchsia doesn't provide the host-arm64 toolchain.
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools/linux-arm64/clang": {
"packages": [
{
"package": "fuchsia/third_party/clang/linux-arm64",
"version": Var("clang_version"),
},
],
"condition": "host_os == 'linux' and host_cpu == 'arm64'",
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools/mac-arm64/clang": {
"packages": [
{
"package": "fuchsia/third_party/clang/mac-arm64",
"version": Var("clang_version"),
},
],
"condition": "host_os == 'mac' and host_cpu == 'arm64'",
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/webdriver/chrome": {
"packages": [
{
"package": "dart/third_party/chromedriver/${{platform}}",
"version": "version:" + Var("chrome_tag"),
}
],
"condition": "download_chrome",
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools": {
"packages": [
{
"package": "gn/gn/${{platform}}",
"version": Var("gn_version"),
},
],
"condition": "host_os != 'win'",
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools/win": {
"packages": [
{
"package": "gn/gn/windows-amd64",
"version": Var("gn_version"),
},
],
"condition": "host_os == 'win'",
"dep_type": "cipd",
},
Var("dart_root") + "/buildtools/ninja": {
"packages": [{
"package": "infra/3pp/tools/ninja/${{platform}}",
"version": Var("ninja_tag"),
}],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/android_tools": {
"packages": [
{
"package": "flutter/android/sdk/all/${{os}}-amd64",
"version": "version:33v6"
}
],
"condition": "download_android_deps",
"dep_type": "cipd",
},
# TODO(38752): Confirm if mac sdk is necessary in dart.
Var("dart_root") + "/third_party/fuchsia/sdk/mac": {
"packages": [
{
"package": "fuchsia/sdk/gn/mac-amd64",
"version": Var("fuchsia_sdk_version"),
}
],
"condition":
'download_fuchsia_deps and host_os == "mac" and host_cpu == "x64"',
"dep_type": "cipd",
},
# TODO(38752): Migrate to core sdk, gn sdk is deprecating.
Var("dart_root") + "/third_party/fuchsia/sdk/linux": {
"packages": [
{
"package": "fuchsia/sdk/gn/linux-amd64",
"version": Var("fuchsia_sdk_version"),
}
],
"condition":
'download_fuchsia_deps and host_os == "linux" and host_cpu == "x64"',
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/fuchsia/test_scripts": {
"packages": [
{
"package": "chromium/fuchsia/test-scripts/fuchsia",
"version": "version:2@0d97902a72c9bc224f64630177cf95cd632604a2",
}
],
"condition":
'download_fuchsia_deps and host_os == "linux" and host_cpu == "x64"',
"dep_type": "cipd",
},
Var("dart_root") + "/pkg/front_end/test/fasta/types/benchmark_data": {
"packages": [
{
"package": "dart/cfe/benchmark_data",
"version": "sha1sum:5b6e6dfa33b85c733cab4e042bf46378984d1544",
}
],
"dep_type": "cipd",
},
# TODO(37531): Remove these cipd packages and build with sdk instead when
# benchmark runner gets support for that.
Var("dart_root") + "/benchmarks/FfiBoringssl/native/out/": {
"packages": [
{
"package": "dart/benchmarks/ffiboringssl",
"version": "commit:a86c69888b9a416f5249aacb4690a765be064969",
},
],
"dep_type": "cipd",
},
Var("dart_root") + "/benchmarks/FfiCall/native/out/": {
"packages": [
{
"package": "dart/benchmarks/fficall",
"version": "ebF5aRXKDananlaN4Y8b0bbCNHT1MnkGbWqfpCpiND4C",
},
],
"dep_type": "cipd",
},
Var("dart_root") + "/benchmarks/NativeCall/native/out/": {
"packages": [
{
"package": "dart/benchmarks/nativecall",
"version": "w1JKzCIHSfDNIjqnioMUPq0moCXKwX67aUfhyrvw4E0C",
},
],
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/browsers/chrome": {
"packages": [
{
"package": "dart/browsers/chrome/${{platform}}",
"version": "version:" + Var("chrome_tag"),
},
],
"condition": "download_chrome",
"dep_type": "cipd",
},
Var("dart_root") + "/third_party/browsers/firefox": {
"packages": [
{
"package": "dart/browsers/firefox/${{platform}}",
"version": "version:" + Var("firefox_tag"),
},
],
"condition": "download_firefox",
"dep_type": "cipd",
},
}
deps_os = {
"win": {
Var("dart_root") + "/third_party/cygwin":
Var("chromium_git") + "/chromium/deps/cygwin.git" + "@" +
"c89e446b273697fadf3a10ff1007a97c0b7de6df",
Var("dart_root") + "/third_party/crashpad/crashpad":
Var("chromium_git") + "/crashpad/crashpad.git" + "@" +
Var("crashpad_rev"),
Var("dart_root") + "/third_party/mini_chromium/mini_chromium":
Var("chromium_git") + "/chromium/mini_chromium" + "@" +
Var("minichromium_rev"),
Var("dart_root") + "/third_party/googletest":
Var("fuchsia_git") + "/third_party/googletest" + "@" +
Var("googletest_rev"),
}
}
hooks = [
{
# Generate the .dart_tool/package_confg.json file.
'name': 'Generate .dart_tool/package_confg.json',
'pattern': '.',
'action': ['python3', 'sdk/tools/generate_package_config.py'],
},
{
# Generate the sdk/version file.
'name': 'Generate sdk/version',
'pattern': '.',
'action': ['python3', 'sdk/tools/generate_sdk_version_file.py'],
},
{
'name': 'sysroot_arm',
'pattern': '.',
'condition': 'checkout_linux',
'action': ['python3', 'sdk/build/linux/sysroot_scripts/install-sysroot.py',
'--arch=arm'],
},
{
'name': 'sysroot_arm64',
'pattern': '.',
'condition': 'checkout_linux',
'action': ['python3', 'sdk/build/linux/sysroot_scripts/install-sysroot.py',
'--arch=arm64'],
},
{
'name': 'sysroot_x86',
'pattern': '.',
'condition': 'checkout_linux',
'action': ['python3', 'sdk/build/linux/sysroot_scripts/install-sysroot.py',
'--arch=x86'],
},
{
'name': 'sysroot_x64',
'pattern': '.',
'condition': 'checkout_linux',
'action': ['python3', 'sdk/build/linux/sysroot_scripts/install-sysroot.py',
'--arch=x64'],
},
{
'name': 'buildtools',
'pattern': '.',
'action': ['python3', 'sdk/tools/buildtools/update.py'],
},
{
# Update the Windows toolchain if necessary.
'name': 'win_toolchain',
'pattern': '.',
'action': ['python3', 'sdk/build/vs_toolchain.py', 'update'],
'condition': 'checkout_win'
},
# Install and activate the empscripten SDK.
{
'name': 'install_emscripten',
'pattern': '.',
'action': ['python3', 'sdk/third_party/emsdk/emsdk.py', 'install',
Var('emsdk_ver')],
'condition': 'download_emscripten'
},
{
'name': 'activate_emscripten',
'pattern': '.',
'action': ['python3', 'sdk/third_party/emsdk/emsdk.py', 'activate',
Var('emsdk_ver')],
'condition': 'download_emscripten'
},
{
'name': 'Download Fuchsia system images',
'pattern': '.',
'action': [
'python3',
'sdk/build/fuchsia/with_envs.py',
'sdk/third_party/fuchsia/test_scripts/update_product_bundles.py',
'terminal.qemu-x64',
],
'condition': 'download_fuchsia_deps'
},
]
|
# Generated by Django 2.2 on 2020-04-17 06:45
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import phone_field.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='languages',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(max_length=50)),
],
options={
'verbose_name_plural': 'Languages Spoken',
},
),
migrations.CreateModel(
name='listings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('student_class', models.CharField(choices=[('6', 'Class 6'), ('7', 'Class 7'), ('8', 'Class 8'), ('9', 'Class 9'), ('10', 'Class 10'), ('11', 'Class 11'), ('12', 'Class 12')], max_length=2)),
('class_type', models.CharField(choices=[('1-1', 'One-to-One Class'), ('1-n', 'One-to-Many Class')], max_length=5)),
('hourly_rate', models.IntegerField()),
('methodology', models.TextField(blank=True, null=True)),
('details', models.TextField(blank=True, null=True)),
],
options={
'verbose_name_plural': 'Class Listings',
},
),
migrations.CreateModel(
name='notifications_type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notifications_title', models.CharField(max_length=20)),
],
options={
'verbose_name_plural': 'Notification Types',
},
),
migrations.CreateModel(
name='subjects',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject_name', models.CharField(max_length=50)),
('sub_detail', models.CharField(blank=True, max_length=500, null=True)),
],
options={
'verbose_name_plural': 'Subjects',
},
),
migrations.CreateModel(
name='TimeSlots',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.CharField(max_length=10)),
('end_time', models.CharField(max_length=10)),
],
options={
'verbose_name_plural': 'Time Slots',
},
),
migrations.CreateModel(
name='TutorProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('dob', models.DateField(blank=True, null=True)),
('gender', models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other')], max_length=1)),
('email', models.EmailField(max_length=254)),
('phone_number', phone_field.models.PhoneField(max_length=31)),
('skype_id', models.CharField(max_length=15)),
('profile_pic', models.ImageField(upload_to='')),
('active', models.BooleanField(default=1)),
('identity_document', models.FileField(upload_to='')),
('curriculum_vitae', models.TextField()),
('about', models.TextField()),
('languages_spoken', models.ManyToManyField(to='core.languages')),
('tutor', models.OneToOneField(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Tutors',
},
),
migrations.CreateModel(
name='StudentProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('dob', models.DateField(blank=True, null=True)),
('gender', models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other')], max_length=1)),
('email', models.EmailField(max_length=254)),
('phone_number', phone_field.models.PhoneField(blank=True, max_length=31, null=True)),
('skype_id', models.CharField(blank=True, max_length=15, null=True)),
('profile_pic', models.ImageField(blank=True, null=True, upload_to='')),
('active', models.BooleanField(default=1)),
('student_class', models.CharField(choices=[('6', 'Class 6'), ('7', 'Class 7'), ('8', 'Class 8'), ('9', 'Class 9'), ('10', 'Class 10'), ('11', 'Class 11'), ('12', 'Class 12')], max_length=5)),
('school', models.CharField(blank=True, max_length=100, null=True)),
('board', models.CharField(blank=True, max_length=50, null=True)),
('notifications', models.ManyToManyField(blank=True, to='core.notifications_type')),
('student', models.OneToOneField(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Students',
},
),
migrations.CreateModel(
name='payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_time', models.DateTimeField()),
('payment_id', models.CharField(max_length=20)),
('amount', models.IntegerField()),
('listing', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.listings')),
('student', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.StudentProfile')),
],
options={
'verbose_name_plural': 'Payments',
},
),
migrations.AddField(
model_name='listings',
name='class_slot',
field=models.ManyToManyField(to='core.TimeSlots'),
),
migrations.AddField(
model_name='listings',
name='subject',
field=models.ManyToManyField(to='core.subjects'),
),
migrations.AddField(
model_name='listings',
name='tutor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.TutorProfile'),
),
migrations.CreateModel(
name='class_request',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_time', models.DateTimeField()),
('accepted_status', models.BooleanField(default=0)),
('listing', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.listings')),
('student', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.StudentProfile')),
('time_slot', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.TimeSlots')),
],
options={
'verbose_name_plural': 'Class Requests',
},
),
]
|
"""
Project: Forecasting graduate admissions with logistic regression
Name: Kevin Trinh
Date: 7/29/19
"""
from __future__ import print_function
import math
from IPython import display
from matplotlib import cm
from matplotlib import gridspec
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
from sklearn import metrics
import tensorflow as tf
from tensorflow.python.data import Dataset
tf.logging.set_verbosity(tf.logging.ERROR)
pd.options.display.max_rows = 10
pd.options.display.float_format = '{:.1f}'.format
# read in the data set as a Panda dataframe
admission_dataframe = pd.read_csv("Admission_Predict_Ver1.1.csv", sep=",")
# shuffle UCLA Graduate Admission data set
admission_dataframe = admission_dataframe.reindex(
np.random.permutation(admission_dataframe.index))
def preprocess_features(admission_dataframe):
"""Prepares input features from UCLA Graduate Admission data set.
Args:
admission_dataframe: A Pandas DataFrame expected to contain data
from the UCLA Graduate Admission data set.
Returns:
A DataFrame that contains the features to be used for the model, including
synthetic features.
"""
selected_features = admission_dataframe[
["GRE_Score",
"TOEFL_Score",
"University_Rating",
"SOP",
"LOR",
"CGPA",
"Research"]]
processed_features = selected_features.copy()
return processed_features
def preprocess_targets(admission_dataframe):
"""Prepares target features (i.e., labels) from UCLA Graduate Admission data set.
Args:
admission_dataframe: A Pandas DataFrame expected to contain data
from the UCLA Graduate Admission data set.
Returns:
A DataFrame that contains the target feature.
"""
output_targets = pd.DataFrame()
# Create a boolean categorical feature representing whether the
# chance of admit is above a set threshold of 50%.
threshhold = 0.50
output_targets["Admission"] = (
admission_dataframe["Chance of Admit"] > threshhold).astype(float)
return output_targets
# Choose the first 300 (out of 500) examples for training.
training_examples = preprocess_features(admission_dataframe.head(300))
training_targets = preprocess_targets(admission_dataframe.head(300))
# Choose the next 100 (out of 500) examples for validation.
validation_examples = preprocess_features(admission_dataframe[300:400])
validation_targets = preprocess_targets(admission_dataframe[300:400])
# Chose the last 100 (out of 500) examples for testing.
test_examples = preprocess_features(admission_dataframe.tail(100))
test_targets = preprocess_targets(admission_dataframe.tail(100))
# Double-check that we've done the right thing.
print("Training examples summary:")
display.display(training_examples.describe())
print("Validation examples summary:")
display.display(validation_examples.describe())
print("Training targets summary:")
display.display(training_targets.describe())
print("Validation targets summary:")
display.display(validation_targets.describe())
def construct_feature_columns(input_features):
"""Construct the TensorFlow Feature Columns.
Args:
input_features: The names of the numerical input features to use.
Returns:
A set of feature columns.
"""
return set([tf.feature_column.numeric_column(my_feature)
for my_feature in input_features])
def my_input_fn(features, targets, batch_size=1, shuffle=True, num_epochs=None):
"""Trains a linear regression model.
Args:
features: pandas DataFrame of features
targets: pandas DataFrame of targets
batch_size: Size of batches to be passed to the model
shuffle: True or False. Whether to shuffle the data.
num_epochs: Number of epochs for which data should be repeated. None = repeat indefinitely
Returns:
Tuple of (features, labels) for next data batch
"""
# Convert pandas data into a dict of np arrays.
features = {key:np.array(value) for key,value in dict(features).items()}
# Construct a dataset, and configure batching/repeating.
ds = Dataset.from_tensor_slices((features,targets)) # warning: 2GB limit
ds = ds.batch(batch_size).repeat(num_epochs)
# Shuffle the data, if specified.
if shuffle:
ds = ds.shuffle(10000)
# Return the next batch of data.
features, labels = ds.make_one_shot_iterator().get_next()
return features, labels
def train_linear_classifier_model(
learning_rate,
steps,
batch_size,
training_examples,
training_targets,
validation_examples,
validation_targets):
"""Trains a linear classification model.
In addition to training, this function also prints training progress information,
as well as a plot of the training and validation loss over time.
Args:
learning_rate: A `float`, the learning rate.
steps: A non-zero `int`, the total number of training steps. A training step
consists of a forward and backward pass using a single batch.
batch_size: A non-zero `int`, the batch size.
training_examples: A `DataFrame` containing one or more columns from
`admission_dataframe` to use as input features for training.
training_targets: A `DataFrame` containing exactly one column from
`admission_dataframe` to use as target for training.
validation_examples: A `DataFrame` containing one or more columns from
`admission_dataframe` to use as input features for validation.
validation_targets: A `DataFrame` containing exactly one column from
`admission_dataframe` to use as target for validation.
Returns:
A `LinearClassifier` object trained on the training data.
"""
# set number of periods to see evolution of our model
periods = 15
steps_per_period = steps / periods
# Create a linear classifier object.
my_optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
my_optimizer = tf.contrib.estimator.clip_gradients_by_norm(my_optimizer, 5.0)
linear_classifier = tf.estimator.LinearClassifier(
feature_columns=construct_feature_columns(training_examples),
optimizer=my_optimizer
)
# Create input functions.
training_input_fn = lambda: my_input_fn(training_examples,
training_targets["Admission"],
batch_size=batch_size)
predict_training_input_fn = lambda: my_input_fn(training_examples,
training_targets["Admission"],
num_epochs=1,
shuffle=False)
predict_validation_input_fn = lambda: my_input_fn(validation_examples,
validation_targets["Admission"],
num_epochs=1,
shuffle=False)
# Train the model, but do so inside a loop so that we can periodically assess
# loss metrics.
print("Training model...")
print("LogLoss (on training data):")
training_log_losses = []
validation_log_losses = []
for period in range (0, periods):
# Train the model, starting from the prior state.
linear_classifier.train(
input_fn=training_input_fn,
steps=steps_per_period
)
# Take a break and compute predictions.
training_probabilities = linear_classifier.predict(input_fn=predict_training_input_fn)
training_probabilities = np.array([item['probabilities'] for item in training_probabilities])
validation_probabilities = linear_classifier.predict(input_fn=predict_validation_input_fn)
validation_probabilities = np.array([item['probabilities'] for item in validation_probabilities])
training_log_loss = metrics.log_loss(training_targets, training_probabilities)
validation_log_loss = metrics.log_loss(validation_targets, validation_probabilities)
# Occasionally print the current loss.
print(" period %02d : %0.5f" % (period, training_log_loss))
# Add the loss metrics from this period to our list.
training_log_losses.append(training_log_loss)
validation_log_losses.append(validation_log_loss)
print("Model training finished.")
# Output a graph of loss metrics over periods.
plt.figure(1)
plt.ylabel("LogLoss")
plt.xlabel("Periods")
plt.title("LogLoss vs. Periods")
plt.tight_layout()
plt.plot(training_log_losses, label="training")
plt.plot(validation_log_losses, label="validation")
plt.legend()
return linear_classifier
# train our model and examine performance on validation set
execute_training = "y"
while (execute_training == "y"):
# train our data
if (execute_training == "y"):
# prompt user for hyperparameters
print("Enter value for learning rate (order of 0.000001 recommended):")
learning_rate = input()
learning_rate = float(learning_rate)
print("Enter value for steps:")
steps = input()
steps = int(steps)
print("Enter value for batch size:")
batch_size = input()
batch_size = int(batch_size)
# train our model with logistic regression
linear_classifier = train_linear_classifier_model(
learning_rate,
steps,
batch_size,
training_examples=training_examples,
training_targets=training_targets,
validation_examples=validation_examples,
validation_targets=validation_targets)
# examine model accuracy, ROC, and AUC
predict_validation_input_fn = lambda: my_input_fn(validation_examples,
validation_targets["Admission"],
num_epochs=1,
shuffle=False)
evaluation_metrics = linear_classifier.evaluate(input_fn=predict_validation_input_fn)
print("AUC on the validation set: %0.5f" % evaluation_metrics['auc'])
print("Accuracy on the validation set: %0.5f" % evaluation_metrics['accuracy'])
# prompt user to retrain data
print("Would you like to retrain your data with new hyperparameters? (y/n)")
execute_training = input()
# test model accuracy, ROC, and AUC
predict_test_input_fn = lambda: my_input_fn(test_examples,
test_targets["Admission"],
num_epochs=1,
shuffle=False)
evaluation_metrics = linear_classifier.evaluate(input_fn=predict_test_input_fn)
test_probabilities = linear_classifier.predict(input_fn=predict_test_input_fn)
print("AUC on the test set: %0.5f" % evaluation_metrics['auc'])
print("Accuracy on the test set: %0.5f" % evaluation_metrics['accuracy'])
# Compare our model against a random classifier
test_probabilities = np.array([item['probabilities'][1] for item in test_probabilities])
false_positive_rate, true_positive_rate, thresholds = metrics.roc_curve(
test_targets, test_probabilities)
plt.figure(2)
plt.plot(false_positive_rate, true_positive_rate, label="our model")
plt.plot([0, 1], [0, 1], label="random classifier")
plt.title("ROC")
plt.xlabel("False positive rate")
plt.ylabel("True positive rate")
plt.legend(loc=2) |
class Circle():
def __init__(self, radius):
self.radius = radius
def area(self):
return self.radius ** 2 * 3.14
def perimeter(self):
return 2 * self.radius * 3.14
print("Enter the value of radius")
r = int(input())
desiredCircle = Circle(r)
print("AREA OF DESIRED CIRCLE")
print(desiredCircle.area())
print("PERIMETER OF DESIRED CIRCLE")
print(desiredCircle.perimeter())
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
""" Gen correlation id
Utils function
"""
from base64 import b64encode
from datetime import datetime, timezone
from secrets import token_urlsafe
__all__ = [
'gen_correlation_id',
]
CORRELATION_ID_PREFIX_LENGTH = 6
CORRELATION_ID_TOKEN_LENGTH = 3
def gen_correlation_id(prefix: str = None) -> str:
"""
Generates a correlation ID that looks like `prefix:date:random`, where:
- `prefix` is a fixed part that you can specify (any length)
- `date` only depends on the current date (8 characters)
- `random` is a random part (4 characters)
`date` and `random` are encoded as `[a-zA-Z0-9_-]`.
"""
def ts2000res65536() -> bytes:
"""
Converts current date to 6 bytes
"""
ts_now = datetime.now(timezone.utc).timestamp()
ts_2k = datetime(2000, 1, 1, tzinfo=timezone.utc).timestamp()
return int(65536 * (ts_now - ts_2k)).to_bytes(6, 'big')
if prefix is None:
prefix = token_urlsafe(CORRELATION_ID_PREFIX_LENGTH)
date = b64encode(ts2000res65536(), b'_-').decode('ascii')
random = token_urlsafe(CORRELATION_ID_TOKEN_LENGTH)
return f'{prefix}:{date}:{random}'
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import math
import numpy as np
from gym import logger, spaces
from mtenv import MTEnv
from mtenv.utils import seeding
"""
Classic cart-pole system implemented based on Rich Sutton et al.
Copied from http://incompleteideas.net/sutton/book/code/pole.c
permalink: https://perma.cc/C9ZM-652R
"""
class MTCartPole(MTEnv):
"""A cartpole environment with varying physical values
(see the self._mu_to_vars function)
"""
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 50}
def _mu_to_vars(self, mu):
self.gravity = 9.8 + mu[0] * 5
self.masscart = 1.0 + mu[1] * 0.5
self.masspole = 0.1 + mu[2] * 0.09
self.total_mass = self.masspole + self.masscart
self.length = 0.5 + mu[3] * 0.3
self.polemass_length = self.masspole * self.length
self.force_mag = 10 * mu[4]
if mu[4] == 0:
self.force_mag = 10
def __init__(self):
# Angle limit set to 2 * theta_threshold_radians so failing observation is still within bounds
self.x_threshold = 2.4
self.theta_threshold_radians = 12 * 2 * math.pi / 360
high = np.array(
[
self.x_threshold * 2,
np.finfo(np.float32).max,
self.theta_threshold_radians * 2,
np.finfo(np.float32).max,
]
)
observation_space = spaces.Box(-high, high, dtype=np.float32)
action_space = spaces.Discrete(2)
high = np.array([1.0 for k in range(5)])
task_space = spaces.Box(-high, high, dtype=np.float32)
super().__init__(
action_space=action_space,
env_observation_space=observation_space,
task_observation_space=task_space,
)
self.gravity = 9.8
self.masscart = 1.0
self.masspole = 0.1
self.total_mass = self.masspole + self.masscart
self.length = 0.5 # actually half the pole's length
self.polemass_length = self.masspole * self.length
self.force_mag = 10.0
self.tau = 0.02 # seconds between state updates
self.kinematics_integrator = "euler"
# Angle at which to fail the episode
self.state = None
self.steps_beyond_done = None
self.task_state = None
def step(self, action):
self.t += 1
self._mu_to_vars(self.task_state)
assert self.action_space.contains(action), "%r (%s) invalid" % (
action,
type(action),
)
state = self.state
x, x_dot, theta, theta_dot = state
force = self.force_mag if action == 1 else -self.force_mag
costheta = math.cos(theta)
sintheta = math.sin(theta)
temp = (
force + self.polemass_length * theta_dot * theta_dot * sintheta
) / self.total_mass
thetaacc = (self.gravity * sintheta - costheta * temp) / (
self.length
* (4.0 / 3.0 - self.masspole * costheta * costheta / self.total_mass)
)
xacc = temp - self.polemass_length * thetaacc * costheta / self.total_mass
if self.kinematics_integrator == "euler":
x = x + self.tau * x_dot
x_dot = x_dot + self.tau * xacc
theta = theta + self.tau * theta_dot
theta_dot = theta_dot + self.tau * thetaacc
else: # semi-implicit euler
x_dot = x_dot + self.tau * xacc
x = x + self.tau * x_dot
theta_dot = theta_dot + self.tau * thetaacc
theta = theta + self.tau * theta_dot
self.state = [x, x_dot, theta, theta_dot]
done = (
x < -self.x_threshold
or x > self.x_threshold
or theta < -self.theta_threshold_radians
or theta > self.theta_threshold_radians
)
done = bool(done)
reward = 0
if not done:
reward = 1.0
elif self.steps_beyond_done is None:
# Pole just fell!
self.steps_beyond_done = 0
reward = 1.0
else:
if self.steps_beyond_done == 0:
logger.warn(
"You are calling 'step()' even though this environment has already returned done = True. You should always call 'reset()' once you receive 'done = True' -- any further steps are undefined behavior."
)
print(
"You are calling 'step()' even though this environment has already returned done = True. You should always call 'reset()' once you receive 'done = True' -- any further steps are undefined behavior."
)
self.steps_beyond_done += 1
reward = 0.0
return (
{"env_obs": self.state, "task_obs": self.get_task_obs()},
reward,
done,
{},
)
def reset(self, **args):
self.assert_env_seed_is_set()
assert self.task_state is not None
self._mu_to_vars(self.task_state)
self.state = self.np_random_env.uniform(low=-0.05, high=0.05, size=(4,))
self.steps_beyond_done = None
self.t = 0
return {"env_obs": self.state, "task_obs": self.get_task_obs()}
def get_task_obs(self):
return self.task_state
def get_task_state(self):
return self.task_state
def set_task_state(self, task_state):
self.task_state = task_state
def sample_task_state(self):
self.assert_task_seed_is_set()
super().sample_task_state()
new_task_state = [
self.np_random_task.uniform(-1, 1),
self.np_random_task.uniform(-1, 1),
self.np_random_task.uniform(-1, 1),
self.np_random_task.uniform(-1, 1),
self.np_random_task.uniform(-1, 1),
]
return new_task_state
def seed(self, env_seed):
self.np_random_env, seed = seeding.np_random(env_seed)
return [seed]
def seed_task(self, task_seed):
self.np_random_task, seed = seeding.np_random(task_seed)
return [seed]
class CartPole(MTCartPole):
"""The original cartpole environment in the MTEnv fashion"""
def __init__(self):
super().__init__()
def sample_task_state(self):
new_task_state = [0.0, 0.0, 0.0, 0.0, 0.0]
return new_task_state
if __name__ == "__main__":
env = MTCartPole()
env.seed(5)
env.seed_task(15)
env.reset_task_state()
obs = env.reset()
print(obs)
done = False
while not done:
obs, rew, done, _ = env.step(np.random.randint(env.action_space.n))
print(obs)
|
#!/usr/bin/python
from django.core.management.base import BaseCommand
from django.conf import settings
from django.contrib.auth.models import User, Permission
from cms.models.permissionmodels import PageUserGroup, GlobalPagePermission
from zinnia.models import Category
class Command(BaseCommand):
help = "Make sure the Developer Portal database is set up properly."
def handle(self, *args, **options):
all_perms = Permission.objects.filter()
print("Creating admin user.")
admin, created = User.objects.get_or_create(username='system')
admin.is_staff = True
admin.is_superuser = True
admin.save()
if hasattr(settings, 'ADMIN_GROUP') and settings.ADMIN_GROUP != "":
print("Configuring {} group.".format(settings.ADMIN_GROUP))
admins, created = PageUserGroup.objects.get_or_create(
name=settings.ADMIN_GROUP, defaults={'created_by': admin})
admins.permissions.add(*list(all_perms))
print("Configuring global permissions for group.")
adminperms, created = GlobalPagePermission.objects.get_or_create(
# who:
group=admins,
# what:
defaults={
'can_change': True,
'can_add': True,
'can_delete': True,
'can_change_advanced_settings': True,
'can_publish': True,
'can_change_permissions': True,
'can_move_page': True,
'can_view': True,
}
)
adminperms.sites.add(settings.SITE_ID)
if hasattr(settings, 'EDITOR_GROUP') and settings.EDITOR_GROUP != "":
print("Configuring {} group.".format(settings.EDITOR_GROUP))
editors, created = PageUserGroup.objects.get_or_create(
name=settings.EDITOR_GROUP, defaults={'created_by': admin})
page_perms = Permission.objects.filter(
content_type__app_label='cms', content_type__model='page')
editors.permissions.add(*list(page_perms))
print("Configuring global permissions for group.")
editorsperms, created = GlobalPagePermission.objects.get_or_create(
# who:
group=editors,
# what:
defaults={
'can_change': True,
'can_add': True,
'can_delete': True,
'can_change_advanced_settings': False,
'can_publish': True,
'can_change_permissions': False,
'can_move_page': True,
'can_view': True,
}
)
editorsperms.sites.add(settings.SITE_ID)
print('Adding zinnia categories for the following: {}.'.format(
', '.join([a[0] for a in settings.LANGUAGES])))
for lang in settings.LANGUAGES:
if lang[1] == 'Simplified Chinese':
Category.objects.get_or_create(title='Chinese', slug=lang[0])
else:
Category.objects.get_or_create(title=lang[1], slug=lang[0])
|
# pylint: disable=g-bad-file-header
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Graph editor module allows to modify an existing graph in place.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.graph_editor import edit as edit
from tensorflow.contrib.graph_editor import select as select
from tensorflow.contrib.graph_editor import subgraph as subgraph
from tensorflow.contrib.graph_editor import transform as transform
from tensorflow.contrib.graph_editor import util as util
from tensorflow.contrib.graph_editor.edit import connect
# edit: detach
from tensorflow.contrib.graph_editor.edit import detach
from tensorflow.contrib.graph_editor.edit import detach_inputs
from tensorflow.contrib.graph_editor.edit import detach_outputs
from tensorflow.contrib.graph_editor.edit import remove
# edit: reroute
from tensorflow.contrib.graph_editor.edit import reroute
from tensorflow.contrib.graph_editor.edit import reroute_a2b
from tensorflow.contrib.graph_editor.edit import reroute_a2b_inputs
from tensorflow.contrib.graph_editor.edit import reroute_a2b_outputs
from tensorflow.contrib.graph_editor.edit import reroute_b2a
from tensorflow.contrib.graph_editor.edit import reroute_b2a_inputs
from tensorflow.contrib.graph_editor.edit import reroute_b2a_outputs
from tensorflow.contrib.graph_editor.edit import reroute_inputs
from tensorflow.contrib.graph_editor.edit import reroute_outputs
from tensorflow.contrib.graph_editor.edit import swap
from tensorflow.contrib.graph_editor.edit import swap_inputs
from tensorflow.contrib.graph_editor.edit import swap_outputs
from tensorflow.contrib.graph_editor.select import select_ops
from tensorflow.contrib.graph_editor.select import select_ts
from tensorflow.contrib.graph_editor.subgraph import SubGraphView
from tensorflow.contrib.graph_editor.transform import copy
from tensorflow.contrib.graph_editor.transform import Transformer
# TODO(fkp): Add unit tests for all the files.
# some useful aliases
ph = util.make_placeholder_from_dtype_and_shape
sgv = subgraph.make_view
ts = select.select_ts
ops = select.select_ops
|
from django.conf.urls import patterns,include,url
from teaman.tea import models,views
urlpatterns=patterns('teaman.tea.views',
url(r'^$','index'),
url(r'^clean/(?P<action>\w{5})$','clean_upload'),
url(r'^template/(?P<pid>\d+)$','template_download'),
)
|
import socket
import uuid
import zmq.green as zmq
from basesocket import BaseSocket
class Client(BaseSocket):
def __init__(self, host='127.0.0.1', port=12305):
self._id = socket.gethostname() + '_' + uuid.uuid1().get_hex()
context = zmq.Context()
self.receiver = context.socket(zmq.SUB)
self.receiver.connect('tcp://%s:%i' % (host, port + 1))
self.receiver.setsockopt(zmq.SUBSCRIBE, b'')
self.sender = context.socket(zmq.PUSH)
self.sender.connect('tcp://%s:%i' % (host, port))
def get_id(self):
return self._id |
# a file hosts all global referred parameters/sets/etcself.
from pyomo import environ as pe
m = pe.ConcreteModel()
m.COMP_OLEFIN = pe.Set(initialize=['C{0}H{1}'.format(i,2*i) for i in range(2,21)],ordered=True)
m.COMP_PARAFFIN = pe.Set(initialize=['C{0}H{1}'.format(i,2*i+2) for i in range(1,57)],ordered=True)
m.COMP_INORG = pe.Set(initialize=['H2','CO','CO2','H2O'],ordered=True)
m.COMP_ORG = m.COMP_OLEFIN | m.COMP_PARAFFIN
m.COMP_TOTAL = m.COMP_INORG | m.COMP_OLEFIN | m.COMP_PARAFFIN
m.COMP_FEED = pe.Set(initialize=['H2','CO','C30H62'],ordered=True)
# m.COMP_FEED = m.COMP_INORG | m.COMP_OLEFIN | m.COMP_PARAFFIN
|
from scipy.signal import butter, lfilter, resample
from tqdm import tqdm
import scipy.io as io
import numpy as np
import lib.utils as utils
import random
import os
import sys
sys.path.append('..')
from methods import pulse_noise
def bandpass(sig, band, fs):
B, A = butter(5, np.array(band) / (fs / 2), btype='bandpass')
return lfilter(B, A, sig, axis=0)
sample_freq = 512.0
epoc_window = 1.75 * sample_freq#取的1.75s
start_time = 2.5
subjects = ['01', '02', '03', '04', '05', '06', '07', '08', '09', 10, 11, 12, 13, 14]#为什么后面不加引号?
data_file = 'EEG_Data/MI/raw/'
file1 = 'S{}E.mat'
file2 = 'S{}T.mat'
# if not os.path.exists(save_dir):
# os.makedirs(save_dir)
npp_params=[1, 5, 0.1]
X_cl=[]
Y_cl=[]
X_po=[]
Y_po=[]
Ek_cl=0
Ek_po=0
for s in tqdm(range(len(subjects))):
x = []
e = []
labels = []
clean=True
data = io.loadmat(data_file + file1.format(subjects[s]))
for i in range(3):#这是由数据集本身决定,1行3列,SE,下面一行5列ST,这里是把所有的数据conceret起来
s_data = data['data'][0][i]
EEG, trial, y = s_data['X'][0][0], s_data['trial'][0][0], s_data['y'][0][0]
trial, y = trial.squeeze(), y.squeeze() - 1
labels.append(y)
if not clean:
npp = pulse_noise([1, 15, int(epoc_window)], freq=npp_params[1], sample_freq=sample_freq,
proportion=npp_params[2])
amplitude = np.mean(np.std(EEG, axis=0)) * npp_params[0]#计算标准差
for _, idx in enumerate(trial):
idx = int(idx)
EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window),
:] = np.transpose(
npp.squeeze() * amplitude,
(1, 0)) + EEG[
int(idx + start_time * sample_freq):int(
idx + start_time * sample_freq + epoc_window),
:]
sig_F = bandpass(EEG, [8.0, 30.0], sample_freq)
for _, idx in enumerate(trial):
idx = int(idx)
s_EEG = EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = sig_F[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = resample(s_sig, int(epoc_window * 128 / sample_freq))
e.append(s_EEG)
x.append(s_sig)
data = io.loadmat(data_file + file2.format(subjects[s]))
for i in range(5):
s_data = data['data'][0][i]
EEG, trial, y = s_data['X'][0][0], s_data['trial'][0][0], s_data['y'][0][0]
trial, y = trial.squeeze(), y.squeeze() - 1
labels.append(y)
if not clean:
npp = pulse_noise([1, 15, int(epoc_window)], freq=npp_params[1], sample_freq=sample_freq,
proportion=npp_params[2])
amplitude = np.mean(np.std(EEG, axis=0)) * npp_params[0]
for _, idx in enumerate(trial):
idx = int(idx)
EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window),
:] = np.transpose(
npp.squeeze() * amplitude,
(1, 0)) + EEG[
int(idx + start_time * sample_freq):int(
idx + start_time * sample_freq + epoc_window),
:]
sig_F = bandpass(EEG, [8.0, 30.0], sample_freq)
for _, idx in enumerate(trial):
idx = int(idx)
s_EEG = EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = sig_F[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = resample(s_sig, int(epoc_window * 128 / sample_freq))
e.append(s_EEG)
x.append(s_sig)
e = np.array(e)
e = np.transpose(e, (0, 2, 1))
x = np.array(x)
x = np.transpose(x, (0, 2, 1))
s = np.squeeze(np.array(s))
labels = np.squeeze(np.array(labels))
labels=labels.flatten()
e = utils.standard_normalize(e)
x = utils.standard_normalize(x)
# io.savemat(save_file.format(s), {'eeg': e[:, np.newaxis, :, :],
# 'x': x[:, np.newaxis, :, :], 'y': labels})
if Ek_cl==0:#解决concatenate无法拼接空数组的问题
X_cl=x
Y_cl=labels
Ek_cl=1
else:
X_cl= np.concatenate((X_cl, x), axis=0)
Y_cl= np.concatenate((Y_cl, labels), axis=0)
for s in tqdm(range(len(subjects))):
x = []
e = []
labels = []
clean = False
data = io.loadmat(data_file + file1.format(subjects[s]))
for i in range(3): # 这是由数据集本身决定,1行3列,sE,下面一行5列ST,这里是把所有的数据conceret起来
s_data = data['data'][0][i]
EEG, trial, y = s_data['X'][0][0], s_data['trial'][0][0], s_data['y'][0][0]
trial, y = trial.squeeze(), y.squeeze() - 1
labels.append(y)
if not clean:
npp = pulse_noise([1, 15, int(epoc_window)], freq=npp_params[1], sample_freq=sample_freq,
proportion=npp_params[2])
amplitude = np.mean(np.std(EEG, axis=0)) * npp_params[0] # 计算标准差
for _, idx in enumerate(trial):
idx = int(idx)
EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window),
:] = np.transpose(
npp.squeeze() * amplitude,
(1, 0)) + EEG[
int(idx + start_time * sample_freq):int(
idx + start_time * sample_freq + epoc_window),
:]
sig_F = bandpass(EEG, [8.0, 30.0], sample_freq)
for _, idx in enumerate(trial):
idx = int(idx)
s_EEG = EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = sig_F[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = resample(s_sig, int(epoc_window * 128 / sample_freq))
e.append(s_EEG)
x.append(s_sig)
data = io.loadmat(data_file + file2.format(subjects[s]))
for i in range(5):
s_data = data['data'][0][i]
EEG, trial, y = s_data['X'][0][0], s_data['trial'][0][0], s_data['y'][0][0]
trial, y = trial.squeeze(), y.squeeze() - 1
labels.append(y)
if not clean:
npp = pulse_noise([1, 15, int(epoc_window)], freq=npp_params[1], sample_freq=sample_freq,
proportion=npp_params[2])
amplitude = np.mean(np.std(EEG, axis=0)) * npp_params[0]
for _, idx in enumerate(trial):
idx = int(idx)
EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window),
:] = np.transpose(
npp.squeeze() * amplitude,
(1, 0)) + EEG[
int(idx + start_time * sample_freq):int(
idx + start_time * sample_freq + epoc_window),
:]
sig_F = bandpass(EEG, [8.0, 30.0], sample_freq)
for _, idx in enumerate(trial):
idx = int(idx)
s_EEG = EEG[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = sig_F[int(idx + start_time * sample_freq):int(idx + start_time * sample_freq + epoc_window), :]
s_sig = resample(s_sig, int(epoc_window * 128 / sample_freq))
e.append(s_EEG)
x.append(s_sig)
e = np.array(e)
e = np.transpose(e, (0, 2, 1))
x = np.array(x)
x = np.transpose(x, (0, 2, 1))
s = np.squeeze(np.array(s))
labels = np.squeeze(np.array(labels))
labels=labels.flatten()
e = utils.standard_normalize(e)
x = utils.standard_normalize(x)
# io.savemat(save_file.format(s), {'eeg': e[:, np.newaxis, :, :],
# 'x': x[:, np.newaxis, :, :], 'y': labels})
if Ek_po==0:#解决concatenate无法拼接空数组的问题
X_po=x
Y_po=labels
Ek_po=1
else:
X_po= np.concatenate((X_po, x), axis=0)
Y_po= np.concatenate((Y_po, labels), axis=0)
X_cl=X_cl[:, np.newaxis, :, :]
X_po=X_po[:, np.newaxis, :, :]
idx_al=np.arange(0,2240)
idx_cl,_, idx_po, _ = utils.split_data([idx_al, idx_al], split=0.86, shuffle=True)
idx_po,_,idx_test_po,_=utils.split_data([idx_po, idx_po], split=0.5, shuffle=True)
x_train=X_cl[idx_cl]
y_train=Y_cl[idx_cl]
x_poison=X_po[idx_po]
y_poison=Y_po[idx_po]
x_test=X_cl[idx_test_po]
y_test=Y_cl[idx_test_po]
x_test_poison=X_po[idx_test_po]
y_test_poison=Y_po[idx_test_po]
x_train, y_train, x_validation, y_validation = utils.split_data([x_train, y_train], split=0.8, shuffle=True)
save_dir = 'EEG_Data/MI/'
save_file = save_dir + 'data2-{}-{}-{}.mat'.format(npp_params[0], npp_params[1],npp_params[2])
io.savemat(save_file, {'x_train': x_train,'y_train': y_train, 'x_validation':x_validation,'y_validation':y_validation,
'x_poison': x_poison,'y_poison':y_poison,'x_test':x_test,'y_test':y_test ,
'x_test_poison':x_test_poison,'y_test_poison':y_test_poison}) |
from django.db import models
from helpers.director.model_func.cus_fields.cus_picture import PictureField
# Create your models here.
ZHANXUN_STATUS=(
(0,'离线'),
(1,'在线'),
)
class ZhanXunModel(models.Model):
title = models.CharField('标题',max_length=500)
abstract = models.TextField('摘要',blank=True)
cover = PictureField('封面',max_length=400,blank=True)
content = models.TextField('内容',blank=True)
status=models.IntegerField('状态',choices=ZHANXUN_STATUS,default=0)
update_time =models.DateTimeField('更新时间',auto_now=True)
|
#!/usr/bin/python
import pickle
import sys
import matplotlib.pyplot
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
### read in data dictionary, convert to numpy array
data_dict = pickle.load( open("../Final Project/final_project_dataset_unix.pkl", "rb") )
data_dict.pop('TOTAL', 0)
features = ["salary", "bonus"]
data = featureFormat(data_dict, features)
### your code below
for p in data_dict:
try:
if data_dict[p]['salary']>2.5e7:
print(p, data_dict[p])
except:
pass
# Returns TOTAL
for p in data_dict:
try:
if (data_dict[p]['salary']>1e6 and data_dict[p]['bonus']>5e6):
print(p)
except:
pass
for point in data:
salary = point[0]
bonus = point[1]
matplotlib.pyplot.scatter( salary, bonus )
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("bonus")
matplotlib.pyplot.show()
|
from fastai.collab import Module, Embedding, sigmoid_range
import torch
import torch.nn as nn
class DotProduct(Module):
def __init__(self, n_users, n_animes, n_factors, y_range=(0, 10.5)):
self.user_factors = Embedding(n_users, n_factors)
self.anime_factors = Embedding(n_animes, n_factors)
self.user_bias = Embedding(n_users, 1)
self.anime_bias = Embedding(n_animes, 1)
self.y_range = y_range
def forward(self, x):
users = self.user_factors(x[:, 0])
animes = self.anime_factors(x[:, 1])
res = (users * animes).sum(dim=1, keepdim=True)
res += self.user_bias(x[:, 0]) + self.anime_bias(x[:, 1])
return sigmoid_range(res, *self.y_range)
# return (users * animes).sum(dim=1)
class CollabNN(Module):
def __init__(self, user_sz, item_sz, y_range=(0, 10)):
self.user_factors = Embedding(*user_sz)
self.item_factors = Embedding(*item_sz)
self.layers = nn.Sequential(
nn.Linear(user_sz[1]+item_sz[1], 256),
nn.BatchNorm1d(256),
nn.ReLU(),
nn.Dropout(.25),
nn.Linear(256, 128),
nn.BatchNorm1d(128),
nn.ReLU(),
nn.Linear(128, 64),
nn.BatchNorm1d(64),
nn.ReLU(),
nn.Linear(64, 1)
)
self.y_range = y_range
def forward(self, x):
embs = self.user_factors(x[:, 0]), self.item_factors(x[:, 1])
x = self.layers(torch.cat(embs, dim=1))
# return x
# return torch.clamp(x, *self.y_range)
return sigmoid_range(x, *self.y_range)
|
import requests
from bs4 import BeautifulSoup
import csv
def scrape_ether():
response = requests.get("https://www.coinbase.com/price/ethereum")
html = response.text
soup = BeautifulSoup(html, "html.parser")
tweet = soup.find(class_="ChartPriceHeader__BigAmount-sc-9ry7zl-4 dKeshi")
return tweet.get_text()
scrape_ether() |
# coding: utf-8
from setuptools import setup, find_packages
setup(
name='thumbor_logdrain_metrics',
version="0.0.1",
description='Thumbor Heroku Logdrain Metrics extensions',
author='Peter Schröder',
author_email='peter.schroeder@jimdo.com',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
install_requires=['thumbor']
)
|
#!/bin/python3
import math
import os
import random
import re
import sys
from collections import Counter
# Given a collection of input strings, count the occurrences of
# each query string
# run from command line
# $ python3 sparse_arrays.py < sparse_arrays.data
# Discovered Counter from collections
# A dict subclass for counting hashable objects
# Complete the matchingStrings function below.
def matchingStrings(strings, queries):
string_cntr = Counter()
for key in strings:
string_cntr[key] += 1
return([string_cntr[word] for word in queries])
if __name__ == '__main__':
# fptr = open(os.environ['OUTPUT_PATH'], 'w')
fptr = sys.stdout
strings_count = int(input())
strings = []
for _ in range(strings_count):
strings_item = input()
strings.append(strings_item)
queries_count = int(input())
queries = []
for _ in range(queries_count):
queries_item = input()
queries.append(queries_item)
res = matchingStrings(strings, queries)
fptr.write('\n'.join(map(str, res)))
fptr.write('\n')
fptr.close()
|
from django.forms import ModelForm
from django import forms
from .models import *
from ckeditor_uploader.widgets import CKEditorUploadingWidget
class TheoryTagForm(ModelForm):
class Meta:
model=Theory
exclude=["userId"] |
#!/usr/bin/env python
import sys
import math
import collections
import heapq
import bz2
import gzip
import argparse
import logging
from functools import partial
import util
logger = logging.getLogger("transform")
log_handler = logging.StreamHandler()
log_handler.setFormatter(logging.Formatter("%(asctime)s %(name)s %(levelname)-8s %(message)s"))
logger.addHandler(log_handler)
# general utilities
# ------------------------------------------------------------------------------
def log2(x):
return math.log(x, 2)
# vectorspace stuff
# ------------------------------------------------------------------------------
class VectorSpace(object):
def __init__(self, files):
self.files = files
def __true_iter(self):
logger.info("Resetting all files.")
for infile in self.files:
infile.seek(0)
for line in infile:
line = line.strip()
if not line:
continue
target, context, value = line.split("\t")
value = float(value)
if value:
yield target, context, value
transformations = []
def add_transformation(self, func):
self.transformations.append(func)
def __iter__(self):
iterator = self.__true_iter()
for func in self.transformations:
iterator = func(iterator)
return iterator
def count_masses(vectorspace):
logger.info("Counting mass.")
targets = collections.defaultdict(int)
contexts = collections.defaultdict(int)
total_mass = 0
for target, context, value in vectorspace:
targets[target] += value
contexts[context] += value
total_mass += value
logger.info("Total mass: %f" % total_mass)
return total_mass, targets, contexts
def mutual_information(mode, counted_masses, vectorspace):
logger.info("Computing mutual information (%s)" % mode)
# needs two passes, so you need to pass it the "same" vectorspace
# twice.
# PMI = log [ p(x,y)/(p(x)*p(y)) ]
# = log p(x,y) - (log p(x) + log p(y))
# = log [ c(x,y) / c(*,*) ] - { log [ c(x,*)/c(*,*) ] + log [ c(*,y)/c(*,*) ]
# = log c(x,y) - log c(*,*) - { log c(x,*) - log c(*,*) + log c(*,y) - log c(*,*) }
# = log c(x,y) - log c(*,*) - log c(x,*) + log c(*,*) - log c(*,y) + log c(*,*)
# = log c(x,y) + log(*,*) - log c(x,*) - log c(*,y)
total_mass, targets, contexts = counted_masses
tm_log = log2(total_mass)
processed_targets = set()
for target, context, value in vectorspace:
pmi = log2(value) + tm_log - log2(targets[target]) - log2(contexts[context])
if mode == 'lmi':
freq = value / total_mass
transformed_value = freq * pmi
elif mode == 'pmi':
transformed_value = pmi
else:
raise ValueError("'%s' is not a valid mode for mutual_information." % mode)
if target not in processed_targets:
processed_targets.add(target)
logger.info("Transforming '%s' (%d/%d)" % (target, len(processed_targets), len(targets)))
yield target, context, transformed_value
def positive(vectorspace):
logger.info("Removing nonpositive values.")
return ((t,c,v) for t,c,v in vectorspace if v > 0)
def find_top(n, vectorspace):
logger.info("Keeping only the top %d dimensions." % n)
total_mass, targets, contexts = count_masses(vectorspace)
top_contexts = heapq.nlargest(n, contexts)
return top_contexts
def keep_contexts(words, vectorspace):
words = set(words)
logger.info("Keeping dimensions: %s" % words)
return ((t,c,v) for t,c,v in vectorspace if c in words)
def remove_contexts(words, vectorspace):
words = set(words)
logger.info("Removing dimensions: %s" % words)
return ((t,c,v) for t,c,v in vectorspace if c not in words)
def output_pairs(outfile, vectorspace):
logger.info("Outputting as pairs.")
for t,c,v in vectorspace:
outfile.write("%s\t%s\t%.25f\n" % (t, c, v))
def norm1(counted_masses, vectorspace):
total_mass, targets, contexts = counted_masses
return ((t, c, v / targets[t]) for t,c,v in vectorspace)
def prob(vectorspace, total_mass):
return ((t,c, v / total_mass) for t,c,v in vectorspace)
def neglogprob(vectorspace, total_mass):
ltm = log2(total_mass)
return ((t,c, - log2(v) + ltm) for t,c,v in vectorspace)
def parse_args():
# this is a complicated system for argument parsing, let's go.
parser = argparse.ArgumentParser(description="Transform a tab separated, pairs vectorspace")
# allow input from stdin or a file (compressed or otherwise)
parser.add_argument("--stopwords", "-s", type=util.readfile, metavar="STOPFILE",
help="Removes context which appear in the list of stopwords.")
parser.add_argument("--keepn", "-n", type=int, metavar="N",
help="Keeps only the top N contexts.")
parser.add_argument("--output", "-o", type=argparse.FileType("w"), default=sys.stdout,
help="Output to the given filename." )
parser.add_argument("--outformat", "-O", metavar="OUTPUT FORMAT",
default="pairs", choices=["pairs"], #, "stripes", "dense", "contexts"],
help="Output format. Currently only 'pairs' is supported.")
parser.add_argument("transformation", metavar="METHOD", nargs="+",
choices=["nop", "pmi", "lmi", "tfidf", "prob", "neglogprob", "norm1", "positive"],
help="Transformation method. Possible values are: %(choices)s.")
parser.add_argument("--verbose", "-v", action="store_true",
help="Show logger information.")
parser.add_argument("--input", "-i", action="append", type=util.openfile, metavar="FILE",
help=("The input vector space. Multiple files may be specified with "
"multiple -i's, but target-contexts are assumed to be unique."))
return parser.parse_args()
def main(args):
# load up the vectorspace
vectorspace = VectorSpace(args.input)
# filter stopwords
if args.stopwords:
vectorspace.add_transformation(partial(remove_contexts, args.stopwords))
if args.keepn:
top_dimensions = find_top(args.keepn, vectorspace)
vectorspace.add_transformation(partial(keep_contexts, top_dimensions))
for transformation in args.transformation:
if transformation == "nop":
continue
# otherwise, we require context and target counts. compute them.
counted_masses = count_masses(vectorspace)
if transformation == "lmi" or transformation == "pmi":
vectorspace.add_transformation(partial(mutual_information, transformation, counted_masses))
elif transformation == "norm1":
vectorspace.add_transformation(partial(norm1, counted_masses))
elif transformation == "prob":
vectorspace.add_transformation(partial(prob, total_mass=counted_masses[0]))
elif transformation == "neglogprob":
vectorspace.add_transformation(partial(neglogprob, total_mass=counted_masses[0]))
elif transformation == "positive":
vectorspace.add_transformation(positive)
else:
raise NotImplementedError("Transformation '%s' not supported yet." % transformation)
if args.outformat == 'pairs':
output_pairs(args.output, vectorspace)
else:
raise NotImplementedError("Can't output as %s" % args.outformat)
if __name__ == '__main__':
args = parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
logger.info("Verbose mode enabled..")
logger.info("Command line options: %s" % args)
main(args)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2019-03-07 15:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('progress_analyzer', '0025_auto_20190301_1025'),
]
operations = [
migrations.AddField(
model_name='alcoholcumulative',
name='cum_alcohol_drink_consumed',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='metacumulative',
name='cum_inputs_reported_days_count',
field=models.IntegerField(blank=True, null=True),
),
]
|
def zero_fuel(d, m, f):
return True if m * f - d >= 0 else False
|
import json, requests, polyline
import pandas as pd
import numpy as np
# import databaseAPI
from multiprocessing import Pool
import sys
def fetch_route(start="State+College,PA", end="New+York,NY"):
url = "https://maps.googleapis.com/maps/api/directions/json"
params = dict(
origin=start.replace(" ", "+"),
destination=end.replace(" ", "+"),
waypoints='',
sensor='false',
key = 'AIzaSyC-1ZUi1jWmmoVd98MNuFsBBkM_tTykrDs'
)
resp = requests.get(url=url, params=params)
data = json.loads(resp.text)
return data
def fetch_locationinfo(point):
lat, lon = point
url = "http://dataservice.accuweather.com/locations/v1/cities/geoposition/search?" \
"apikey=HackPSU2017&q=%s%%2C%s&language=en-us" % (lat, lon)
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def get_locationkey(locationinfo):
return locationinfo['Key']
def get_locationname(locationinfo):
return locationinfo['EnglishName']
def fetch_weather(key):
url = "http://dataservice.accuweather.com/forecasts/v1/hourly/12hour/%s?apikey=HackPSU2017&detail=true" % key
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def decode(string):
return polyline.decode(string)
def parse(json):
sum = 0
legs = json['routes'][0]['legs']
for leg in legs:
sum += leg['duration']['value']
return sum
def get_polyline(json):
s = json['routes'][0]['overview_polyline']
return s
def remove_duplicate(list):
seen = set()
seen_add = seen.add
return [x for x in list if not (x in seen or seen_add(x))]
def fetch_coords(start, end):
route = fetch_route(start, end)
poly = get_polyline(route)['points']
fullpoints = decode(poly)
pool = Pool(8)
locations = pool.map(fetch_locationinfo, fullpoints)
df_name = pd.DataFrame(locations)[['Key', 'EnglishName']]
df = pd.DataFrame(fullpoints)
df.columns = ('lat', 'lon')
df_full = pd.concat([df, df_name], axis = 1)
time_series = pd.Series([np.nan for x in range(len(df_full))])
time_series[0] = 0
totaltime = parse(route)
if not totaltime > 0:
totaltime = 0
time_series[len(time_series) - 1] = totaltime
time_series = time_series/3600
time_series = time_series.interpolate()
time_series = time_series.apply(int)
df_full['time_offset'] = time_series
return df_full
def fetch_city_weather(df_full):
df_nondup = df_full.drop_duplicates(subset= 'Key')
df_nondup.reset_index(inplace=True)
keys = list(df_nondup['Key'])
pool = Pool(8)
weathers = pool.map(fetch_weather, keys)
offsets = list(df_nondup['time_offset'])
hourweathers = []
for (offset, weather) in zip(offsets, weathers):
if (offset < 0) or (offset > 11):
offset = 0
hourweathers.append(weather[offset])
df_weather = pd.DataFrame(hourweathers)
df_weather = df_weather[['DateTime', 'EpochDateTime', 'IconPhrase', 'PrecipitationProbability', 'Temperature', 'WeatherIcon']]
# print(df_weather)
df_weather['Temperature'] = [int(x['Value']) for x in df_weather['Temperature']]
precip = [12,13,14,15,16,17,18,19,20,21,22,23,25,26,29,39,40,41,42,43,44]
df_weather['Precipitation'] = (df_weather['WeatherIcon'].isin(precip))
df_nondup = pd.concat([df_nondup, df_weather], axis=1)
# print (df_nondup)
return df_nondup
def fetch_images_info(key):
url = "http://dataservice.accuweather.com/imagery/v1/maps/radsat/1024x1024/%s?apikey=HackPSU2017&detail=true" % key
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def get_images_url(json):
imgs = json['Radar']['Images']
urls = []
for image in imgs:
urls.append(image['Url'])
return urls
def first_rain(df):
res = []
for index, row in df.iterrows():
lower_bound = max(0, index-10)
if (df.loc[lower_bound:index]['Precipitation']).any() and (True in res[lower_bound:index]):
res.append(False)
elif row['Precipitation']:
res.append(True)
else:
res.append(False)
return pd.DataFrame(res)
def drive(start, end):
df_full = fetch_coords(start, end)
df_nondup = fetch_city_weather(df_full)
df_nondup['RainAlert'] = first_rain(df_nondup)
return df_full, df_nondup
def main(start, end):
instance_key = ""
df_full = fetch_coords(start, end)
df_nondup = fetch_city_weather(df_full)
df_nondup['RainAlert'] = first_rain(df_nondup)
urls = get_images_url(fetch_images_info(df_full.loc[0]['Key']))
# print(urls)
# instance_key = dump(start, end, df_full, df_nondup, urls)
return instance_key
def dump(start, end, df_full, df_nondup, urls):
startloc = df_full.loc[0][['lat', 'lon']]
endloc = df_full.loc[len(df_full)-1][['lat', 'lon']]
df = df_nondup[['lat', 'lon', 'EnglishName', 'Temperature', 'DateTime', 'PrecipitationProbability', 'WeatherIcon', 'Precipitation', 'RainAlert']]
df['Precipitation'] = df['Precipitation'].astype(int)
df['RainAlert'] = df['RainAlert'].astype(int)
table = []
for i, row in df.iterrows():
table.append(list(row))
# print(table)
instance_key = databaseAPI.insert(start, startloc['lat'], startloc['lon'],
end, endloc['lat'], endloc['lon'], urls[0], table)
return instance_key
if __name__ == "__main__":
start = sys.argv[1]
end = sys.argv[2]
if len(start) == 0:
start = "State College,PA"
if len(end) == 0:
end = "New York"
print(main(start, end))
sys.stdout.flush()
|
from django.contrib import admin
# Register your models here.
from .models import SixJars
admin.site.register(SixJars) |
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
from torchvision import transforms
# from models import *
import numpy as np
import attack_generator as attack
import os
class ResidualBlock(nn.Module):
def __init__(self, inchannel, outchannel, stride=1):
super(ResidualBlock, self).__init__()
self.left = nn.Sequential(
nn.Conv2d(inchannel, outchannel, kernel_size=3, stride=stride, padding=1, bias=False),
nn.BatchNorm2d(outchannel),
nn.ReLU(inplace=True),
nn.Conv2d(outchannel, outchannel, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(outchannel)
)
self.shortcut = nn.Sequential()
if stride != 1 or inchannel != outchannel:
self.shortcut = nn.Sequential(
nn.Conv2d(inchannel, outchannel, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(outchannel)
)
def forward(self, x):
out = self.left(x)
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, ResidualBlock,num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.inchannel = 64
self.conv1 = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
)
self.layer1 = self.make_layer(ResidualBlock, 64, num_blocks[0], stride=1)
self.layer2 = self.make_layer(ResidualBlock, 128, num_blocks[1], stride=2)
self.layer3 = self.make_layer(ResidualBlock, 256, num_blocks[2], stride=2)
self.layer4 = self.make_layer(ResidualBlock, 512, num_blocks[3], stride=2)
self.fc = nn.Linear(512, num_classes) #512
def make_layer(self, block, channels, num_blocks, stride):
strides = [stride] + [1] * (num_blocks - 1) #strides=[1,1]
layers = []
for stride in strides:
layers.append(block(self.inchannel, channels, stride))
self.inchannel = channels
return nn.Sequential(*layers)
def forward(self, x):
out = self.conv1(x)
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.fc(out)
return out
def ResNet18():
return ResNet(ResidualBlock, [2,2,2,2])
def ResNet34():
return ResNet(ResidualBlock, [3,4,6,3])
parser = argparse.ArgumentParser(description='PyTorch White-box Adversarial Attack Test')
parser.add_argument('--net', type=str, default="resnet18", help="decide which network to use,choose from resnet18, resnet34")
parser.add_argument('--dataset', type=str, default="cifar10", help="choose from cifar10,svhn")
parser.add_argument('--drop_rate', type=float,default=0.0, help='WRN drop rate')
parser.add_argument('--attack_method', type=str,default="dat", help = "choose form: dat and trades")
parser.add_argument('--model_path', default='./Res18_model/net_150.pth', help='model for white-box attack evaluation')
parser.add_argument('--method',type=str,default='dat',help='select attack setting following DAT or TRADES')
args = parser.parse_args()
transform_test = transforms.Compose([transforms.ToTensor(),])
print('==> Load Test Data')
if args.dataset == "cifar10":
testset = torchvision.datasets.CIFAR10(root='./data', train=False, download=True, transform=transform_test)
test_loader = torch.utils.data.DataLoader(testset, batch_size=128, shuffle=False, num_workers=0)
if args.dataset == "svhn":
testset = torchvision.datasets.SVHN(root='./data', split='test', download=True, transform=transform_test)
test_loader = torch.utils.data.DataLoader(testset, batch_size=128, shuffle=False, num_workers=0)
print('==> Load Model')
if args.net == "resnet18":
model = ResNet18().cuda()
net = "resnet18"
if args.net == "resnet34":
model = ResNet34().cuda()
net = "resnet34"
ckpt = torch.load(args.model_path)
model.load_state_dict(ckpt)
model.eval()
print('==> Generate adversarial sample')
PATH_DATA='./adv/Adv_data/cifar10/RN18'
X_adv=attack.adv_generate(model, test_loader, perturb_steps=20, epsilon=8./255, step_size=8./255 / 10, loss_fn="cent", category="Madry", rand_init=True)
os.makedirs(PATH_DATA)
np.save(os.path.join(PATH_DATA, 'Adv_cifar_PGD20_eps8.npy'), X_adv)
|
from collections import Counter
input = [line.split(' (contains ') for line in open('data/21.txt').read().split('\n')]
allergens = {}
cnt = Counter()
for ing, ale in input:
ing = set(ing.split())
ale = ale[:-1].split(', ')
for a in ale:
if a not in allergens:
allergens[a] = ing
else:
allergens[a] &= ing
cnt.update(ing)
for a in allergens:
for w in allergens[a]:
cnt.pop(w) if w in cnt else None
print(sum(cnt.values()))
|
import unittest
from katas.kyu_8.return_negative import make_negative
class MakeNegativeTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(make_negative(42), -42)
def test_equals_2(self):
self.assertEqual(make_negative(-9), -9)
def test_equals_3(self):
self.assertEqual(make_negative(0), 0)
|
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 17 15:38:02 2016
@author: imchugh
"""
import os
import pandas as pd
import datetime as dt
import numpy as np
import pdb
from scipy.optimize import curve_fit
import DataIO as io
import datetime_functions as dtf
def LRF_2 (data_d, alpha, beta, theta, gamma_0, gamma_1):
NEE = (1 / (2 * theta) * (alpha * data_d['PAR'] + beta -
np.sqrt((alpha * data_d['PAR'] + beta) ** 2 -
4 * alpha * beta * theta * data_d['PAR'])) -
gamma_0 * np.exp(data_d['Ts'] * gamma_1))
return NEE
def LRF(data_d, alpha, beta, gamma):
NEE = ((alpha * data_d['PAR']) / (1 - (data_d['PAR'] / 2000) +
(alpha * data_d['PAR'] / beta)) +
gamma) #ER_1 * np.exp(data_d['PAR'] * ER_2))
return NEE
ustar_threshold = 0.1
noct_threshold = 10
path1 = '/home/ian/OzFlux/Sites/GatumPasture/Data/Processed/2015/'
name1 = 'GatumPasture_2015_L4.nc'
f_name1 = os.path.join(path1, name1)
df1 = io.OzFluxQCnc_to_data_structure(f_name1,
var_list=['Fc', 'Ta', 'Ts', 'Fsd', 'Sws',
'ustar'],
output_structure='pandas')
path2 = '/home/ian/OzFlux/Sites/GatumPasture/Data/Processed/2016/'
name2 = 'GatumPasture_2016_L4.nc'
f_name2 = os.path.join(path2, name2)
df2 = io.OzFluxQCnc_to_data_structure(f_name2,
var_list=['Fc', 'Ta', 'Ts', 'Fsd', 'Sws',
'ustar'],
output_structure='pandas')
df2.Fc = df2.Fc * 1000 / 44
df = pd.concat([df1, df2])
#df = df1
start_date = dt.datetime(df1.index[0].year, 1, 1)
end_date = dt.datetime(df2.index[-1].year, 3, 31, 23, 30)
new_index = pd.date_range(start_date, end_date, freq='30T')
df = df.reindex(new_index)
data_dict = {this_col: np.array(df[this_col]) for this_col in df.columns}
data_dict['date_time'] = np.array([dt.datetime(this_date.year, this_date.month,
this_date.day, this_date.hour,
this_date.minute) for this_date in df.index])
data_dict['PAR'] = data_dict['Fsd'] * 0.46 * 4.6
window_dict = dtf.get_moving_window(data_dict, 'date_time', 3, 1)
sorted_datetime = window_dict.keys()
sorted_datetime.sort()
vars_list = [var for var in data_dict.keys() if not var == 'date_time']
alpha_list = []
beta_list = []
gamma_list = []
dates_list = []
theta_list = []
Amax_list = []
gamma0_list = []
gamma1_list = []
dummy_arr = np.empty(len(sorted_datetime))
dummy_arr[:] = np.nan
params_list = ['alpha', 'beta', 'gamma']
results_dict = {var: dummy_arr.copy() for var in params_list}
for i, datetime in enumerate(sorted_datetime):
this_dict = window_dict[datetime]
total_recs = len(this_dict['Fc'])
nan_list = []
for var in vars_list:
nan_list.append(~np.isnan(this_dict[var]))
nan_list.append(np.array(this_dict['ustar'] > ustar_threshold))
nan_list.append(np.array(this_dict['Fsd'] > noct_threshold))
all_nan_array = np.tile(True, len(this_dict[var]))
for l in nan_list:
all_nan_array = all_nan_array & l
avail_recs = len(l[l])
pct_avail_recs = np.round(float(avail_recs) / total_recs * 100, 1)
if pct_avail_recs > 20:
driver_dict = {var: this_dict[var][l] for var in ['PAR', 'Ts']}
response_arr = this_dict['Fc'][l]
p0 = [-0.1, -10, 1]
try:
params, cov = curve_fit(LRF, driver_dict, response_arr,
p0 = p0)
except Exception, e:
print 'Fail!'
continue
if params[1] > 100 or params[1] < -100 or np.all(p0==np.array(params)):
print 'Aaaaargggghhhh!'
else:
for j, var in enumerate(params_list):
results_dict[var][i] = params[j]
# try:
# params, cov = curve_fit(LRF_2, driver_dict, response_arr * -1,
# p0 = [0.01, 10, 1, 1, 1])
# except Exception, e:
# print e
# continue
#
# if not params[1] > 100:
# alpha_list.append(params[0])
# beta_list.append(params[1])
# theta_list.append(params[2])
# gamma0_list.append(params[3])
# gamma1_list.append(params[4])
# dates_list.append(datetime)
print ('For {0}, {1}% of all records were available'
.format(dt.datetime.strftime(datetime, '%Y-%m-%d'),
str(pct_avail_recs)))
sub_df = df.loc['2015-06-30':'2015-08-30']
sub_df['PAR'] = sub_df.Fsd * 4.6 * 0.46
sub_df.dropna(inplace=True)
#params, cov = curve_fit(LRF_2, sub_df, sub_df['Fc'] * -1,
# p0 = [0.01, 10, 1, 1, 1])
|
#coding:utf-8
#画笔工具
import cv2 as cv
import numpy as np
drawing = False
mode = True
ix,iy = -1,-1
#鼠标点击函数
def draw_circle(event,x,y,flags,param):
global ix,iy,drawing,mode
if event == cv.EVENT_LBUTTONDOWN:
drawing = True
ix,iy = x,y
elif event == cv.EVENT_MOUSEMOVE:
if drawing == True:
print(f'drawing:{drawing}+mode:{mode}')
if mode == True:
cv.rectangle(img,(ix,iy),(x,y),(0,255,0),-1)
else:
cv.circle(img,(x,y),5,(0,0,255),-1)
elif event == cv.EVENT_LBUTTONDOWN:
drawing == False
print(f'drawing:{drawing}+mode:{mode}')
if mode == True:
cv.rectangle(img,(ix,iy),(x,y),(0,255,0),-1)
else:
cv.circle(img,(x,y),5,(0,0,255),-1)
img = np.zeros((512,512,3),np.uint8)
cv.namedWindow('image')
cv.setMouseCallback('image',draw_circle)
while True:
cv.imshow('image',img)
k = cv.waitKey(1)& 0xFF
if k == ord('m'):
mode = not mode
elif k == 27:
break
cv.destroyAllWindows()
|
"""
Simple Flask Backend.
"""
from flask import Flask, request, send_from_directory, redirect
#from flask_cors import CORS
from werkzeug.utils import secure_filename
import os
# instantiate the app and load coniguration
app = Flask(__name__)
app.config.from_pyfile("config.py")
# enable CORS
#CORS(app, resources={r"/*": {"origins":app.config["CORS_ORIGINS"]}})
# only allow https (allways redirect)
@app.before_request
def before_request():
if not request.is_secure:
url = request.url.replace('http://', 'https://', 1)
code = 301
return redirect(url, code=code)
# check if stream is indeed an image
import imghdr
def validate_image(stream):
header = stream.read(512)
stream.seek(0)
format = imghdr.what(None, header)
if not format:
return None
return "." + (format if format != "jpeg" else "jpg")
# covert image to sw
from PIL import Image
def convert2sw(path):
try:
image_file = Image.open(path)
image_file = image_file.convert("L")
idxLastDot = path.rfind(".")
newPath = path[:idxLastDot]+str("_sw")+path[idxLastDot:]
image_file.save(newPath)
except:
return 1
return 0
# payloads size exceeds MAX_CONTENT_LENGTH
@app.errorhandler(413)
def too_large(e):
return "Payload is too large", 413
# accept multiple files for uploading
@app.route("/", methods=["POST"])
def upload_files():
"""
Since we are handling multiple files at the same time
we return the status for each file separately
{"status":"ok / error", "message":"Link2ConvertedFile / Reason"}
reasons: file not an image, ...
"""
status = {}
readyForProcessing = []
# iterate over sended files and save them
for filename in list(request.files):
# make filename secure
file = request.files[filename]
filename = secure_filename(file.filename)
# Has a file been selected? (ignore if not)
if filename != "":
# check file size
file.seek(0, os.SEEK_END)
if file.tell() > app.config["MAX_IMG_SIZE"]:
status[filename] = {"status":"error", "message":"Datei ist zu groß."}
continue
file.seek(0)
# Is file an image?
file_ext = os.path.splitext(filename)[1]
file_ext = ".jpg" if file_ext == ".jpeg" else file_ext
if file_ext not in app.config["ALLOWED_EXTENSIONS"] or file_ext != validate_image(file.stream):
status[filename] = {"status":"error", "message":"Dateiformat ist nicht zulässig."}
continue
# save file
temp_path = os.path.join(app.config["UPLOAD_PATH"], filename)
file.save(temp_path)
readyForProcessing.append({"filename":filename, "path":temp_path})
# produce grayscale images
for ele in readyForProcessing:
if convert2sw(ele["path"]):
# ERROR
status[ele["filename"]] = {"status":"error", "message":"Fehler bei der Umwandlung."}
else:
# SUCCESS
idxLastDot = ele["filename"].rfind(".")
downloadLink = request.base_url+"uploads/"+ele["filename"][:idxLastDot]+str("_sw")+ele["filename"][idxLastDot:]
status[ele["filename"]] = {"status":"ok", "message":downloadLink}
return status, 200
# provide images from folder uploads
@app.route("/uploads/<filename>")
def upload(filename):
if os.path.isfile(app.config["UPLOAD_PATH"]+"/"+filename):
return send_from_directory(app.config["UPLOAD_PATH"], filename), 200
return "File does not exist", 404
# serve frontend
#
## Achtung: Only temporarily. Use Apache2 or nginx to serve static content.
#
@app.route("/")
def index():
return send_from_directory(app.config["FRONTEND_PATH"], "index.html"), 200
@app.route("/<path:path>")
def serve_ressource(path):
return send_from_directory(app.config["FRONTEND_PATH"], path), 200 |
#!/usr/bin/python3.4
# -*-coding:Utf-8 -
def afficher(*values, sep=' ', end='\n'):
"""Fonction chargée de reproduire le comportement de print.
Elle doit finir par faire appel à print pour afficher le résultat.
Mais les paramètres devront déjà avoir été formatés.
On doit passer à print une unique chaîne, en lui spécifiant de ne rien mettre à la fin :
print(chaine, end='')"""
liste = list(values)
for i, values in enumerate(values):
values[i] = str(values[i])
string = sep.join(liste)
string += fin
return string
|
# Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import versioneer
ROOT_DIR = os.path.dirname(__file__)
from setuptools import setup
metadata = {}
with open(os.path.join(ROOT_DIR, "forge/_metadata.py")) as fp:
exec(fp.read(), metadata)
with open(os.path.join(ROOT_DIR, "requirements.txt")) as fp:
install_requirements = [i.strip() for i in list(fp)
if i.strip() and not i.strip().startswith("#")]
def recursive_hack(dir):
return [os.path.join(dir, *['*']*i) for i in range(1, 10)]
setup(name=metadata["__title__"],
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description=metadata["__summary__"],
author=metadata["__author__"],
author_email=metadata["__email__"],
url=metadata["__uri__"],
license=metadata["__license__"],
packages=['forge'],
include_package_data=True,
install_requires=install_requirements,
entry_points={"console_scripts": ["forge = forge.cli:call_main"]},
keywords=['Deployment', 'Kubernetes', 'service', 'microservice'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS',
'Operating System :: OS Independent',
'Operating System :: POSIX',
'Topic :: Software Development'
]
)
|
from flask import Flask, render_template, request, jsonify, redirect, url_for, flash, session
import json, gc
from functools import wraps
from passlib.handlers.sha2_crypt import sha256_crypt
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import func
from forex_python.converter import CurrencyRates, CurrencyCodes
import datetime
from sqlalchemy.ext.declarative import declarative_base
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
import jinja2
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:root@localhost/kyan'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SESSION_TYPE'] = 'filesystem'
app.config['SECRET_KEY'] = 'super secret key'
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
def datetimeformat(value, format='%Y/%m'):
return value.strftime(format)
jinja2.filters.FILTERS['datetimeformat'] = datetimeformat
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique = True)
email = db.Column(db.String(120), unique=True)
password = db.Column(db.String(100))
def __init__(self, username, email, password):
self.username = username
self.email = email
self.password = password
def __repr__(self):
return '<Username %r>' % self.username
class Bet365(db.Model):
__tablename__ = "bet365s"
id = db.Column(db.Integer, primary_key=True)
dateto = db.Column(db.Date, unique = True)
click = db.Column(db.Integer)
nsignup = db.Column(db.Integer)
ndepo = db.Column(db.Integer)
valdepo = db.Column(db.Float)
numdepo = db.Column(db.Integer)
spotsturn = db.Column(db.Float)
numsptbet = db.Column(db.Integer)
acsptusr = db.Column(db.Integer)
sptnetrev = db.Column(db.Float)
casinonetrev = db.Column(db.Float)
pokernetrev = db.Column(db.Float)
bingonetrev = db.Column(db.Float)
netrev = db.Column(db.Float)
afspt = db.Column(db.Float)
afcasino = db.Column(db.Float)
afpoker = db.Column(db.Float)
afbingo = db.Column(db.Float)
commission = db.Column(db.Float)
def __init__(self, dateto, click, nsignup, ndepo, valdepo, numdepo, spotsturn, numsptbet, acsptusr, sptnetrev, casinonetrev, pokernetrev, bingonetrev, netrev, afspt, afcasino, afpoker, afbingo, commission):
self.dateto = dateto
self.click = click
self.nsignup = nsignup
self.ndepo = ndepo
self.valdepo = valdepo
self.numdepo = numdepo
self.spotsturn = spotsturn
self.numsptbet = numsptbet
self.acsptusr = acsptusr
self.sptnetrev = sptnetrev
self.casinonetrev = casinonetrev
self.pokernetrev = pokernetrev
self.bingonetrev = bingonetrev
self.netrev = netrev
self.afspt = afspt
self.afcasino = afcasino
self.afpoker = afpoker
self.afbingo = afbingo
self.commission = commission
class Bet365Other(db.Model):
__tablename__ = "bet365others"
id = db.Column(db.Integer, primary_key=True)
dateto = db.Column(db.Date, unique = True)
click = db.Column(db.Integer)
nsignup = db.Column(db.Integer)
ndepo = db.Column(db.Integer)
valdepo = db.Column(db.Float)
numdepo = db.Column(db.Integer)
spotsturn = db.Column(db.Float)
numsptbet = db.Column(db.Integer)
acsptusr = db.Column(db.Integer)
sptnetrev = db.Column(db.Float)
casinonetrev = db.Column(db.Float)
pokernetrev = db.Column(db.Float)
bingonetrev = db.Column(db.Float)
netrev = db.Column(db.Float)
afspt = db.Column(db.Float)
afcasino = db.Column(db.Float)
afpoker = db.Column(db.Float)
afbingo = db.Column(db.Float)
commission = db.Column(db.Float)
def __init__(self, dateto, click, nsignup, ndepo, valdepo, numdepo, spotsturn, numsptbet, acsptusr, sptnetrev, casinonetrev, pokernetrev, bingonetrev, netrev, afspt, afcasino, afpoker, afbingo, commission):
self.dateto = dateto
self.click = click
self.nsignup = nsignup
self.ndepo = ndepo
self.valdepo = valdepo
self.numdepo = numdepo
self.spotsturn = spotsturn
self.numsptbet = numsptbet
self.acsptusr = acsptusr
self.sptnetrev = sptnetrev
self.casinonetrev = casinonetrev
self.pokernetrev = pokernetrev
self.bingonetrev = bingonetrev
self.netrev = netrev
self.afspt = afspt
self.afcasino = afcasino
self.afpoker = afpoker
self.afbingo = afbingo
self.commission = commission
class Eight88(db.Model):
__tablename__ = "eight88s"
id = db.Column(db.Integer, primary_key=True)
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
lead = db.Column(db.Integer)
money_player = db.Column(db.Integer)
balance = db.Column(db.Float)
prebalance = db.Column(db.Float)
imprwk = db.Column(db.Integer)
cliwk = db.Column(db.Integer)
regwk = db.Column(db.Integer)
leadwk = db.Column(db.Integer)
mpwk = db.Column(db.Integer)
imprpre = db.Column(db.Integer)
clipre = db.Column(db.Integer)
regpre = db.Column(db.Integer)
leadpre = db.Column(db.Integer)
mppre = db.Column(db.Integer)
imprto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
leadto = db.Column(db.Integer)
mpto = db.Column(db.Integer)
def __init__(self, impression, click, registration, lead, money_player, balance, prebalance, imprwk, cliwk, regwk, leadwk, mpwk, imprpre, clipre, regpre, leadpre, mppre, imprto, clito, regto, leadto, mpto):
self.impression = impression
self.click = click
self.registration = registration
self.lead = lead
self.money_player = money_player
self.balance = balance
self.prebalance = prebalance
self.imprwk = imprwk
self.cliwk = cliwk
self.regwk = regwk
self.leadwk = leadwk
self.mpwk = mpwk
self.imprpre = imprpre
self.clipre = clipre
self.regpre = regpre
self.leadpre = leadpre
self.mppre = mppre
self.imprto = imprto
self.clito = clito
self.regto = regto
self.leadto = leadto
self.mpto = mpto
class Bet10(db.Model):
__tablename__ = "bet10s"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
impreytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
impreto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, impreytd, cliytd, regytd, ndytd, commiytd, impreto, clito, regto, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.impreytd = impreytd
self.cliytd = cliytd
self.regytd = regytd
self.ndytd = ndytd
self.commiytd = commiytd
self.impreto = impreto
self.clito = clito
self.regto = regto
self.ndto = ndto
self.commito = commito
self.dateto = dateto
class RealDeal(db.Model):
__tablename__ = "realdeals"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
impreytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regiytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
impreto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, impreytd, cliytd, regiytd, ndytd, commiytd, impreto, clito, regto, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.impreytd = impreytd
self.cliytd = cliytd
self.regiytd = regiytd
self.ndytd = ndytd
self.commiytd = commiytd
self.impreto = impreto
self.clito = clito
self.regto = regto
self.ndto = ndto
self.commito = commito
self.dateto = dateto
class LadBroke(db.Model):
__tablename__ = "ladbrokes"
id = db.Column(db.Integer, primary_key=True)
balance = db.Column(db.Float)
def __init__(self, balance):
self.balance = balance
class BetFred(db.Model):
__tablename__ = "betfreds"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
impreytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
impreto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, impreytd, cliytd, regytd, ndytd, commiytd, impreto, clito, regto, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.impreytd = impreytd
self.cliytd = cliytd
self.regytd = regytd
self.ndytd = ndytd
self.commiytd = commiytd
self.impreto = impreto
self.clito = clito
self.regto = regto
self.ndto = ndto
self.commito = commito
self.dateto = dateto
class Paddy(db.Model):
__tablename__ = "paddyies"
id = db.Column(db.Integer, primary_key=True)
balance = db.Column(db.Float)
def __init__(self, balance):
self.balance = balance
class NetBet(db.Model):
__tablename__ = "netbets"
id = db.Column(db.Integer, primary_key=True)
balance = db.Column(db.Float)
def __init__(self, balance):
self.balance = balance
class TitanBet(db.Model):
__tablename__ = "titanbets"
id = db.Column(db.Integer, primary_key=True)
balance = db.Column(db.Float)
def __init__(self, balance):
self.balance = balance
class Stan(db.Model):
__tablename__ = "stans"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
imprytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
imprto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, imprytd, cliytd, regytd, ndytd, commiytd, imprto, clito, regto, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.imprytd = imprytd
self.cliytd = cliytd
self.regytd = regytd
self.ndytd = ndytd
self.commiytd = commiytd
self.imprto = imprto
self.clito = clito
self.regto = regto
self.ndto = ndto
self.commito = commito
self.dateto = dateto
class Coral(db.Model):
__tablename__ = "corals"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
impreytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
impreto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, impreytd, cliytd, regytd, ndytd, commiytd, impreto, clito, regto, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.impreytd = impreytd
self.cliytd = cliytd
self.regytd = regytd
self.ndytd = ndytd
self.commiytd = commiytd
self.impreto = impreto
self.clito = clito
self.regto = regto
self.ndto = ndto
self.commito = commito
self.dateto = dateto
class William(db.Model):
__tablename__ = "williams"
id = db.Column(db.Integer, primary_key=True)
balance = db.Column(db.Float)
def __init__(self, balance):
self.balance = balance
class SkyBet(db.Model):
__tablename__ = "skybets"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
impreytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regiytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
impreto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regito = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, impreytd, cliytd, regiytd, ndytd, commiytd, impreto, clito, regito, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.impreytd = impreytd
self.cliytd = cliytd
self.regiytd = regiytd
self.ndytd = ndytd
self.commiytd = commiytd
self.impreto = impreto
self.clito = clito
self.regito = regito
self.ndto = ndto
self.commito = commito
self.dateto = dateto
class Victor(db.Model):
__tablename__ = "victors"
id = db.Column(db.Integer, primary_key=True)
merchant = db.Column(db.String(80))
impression = db.Column(db.Integer)
click = db.Column(db.Integer)
registration = db.Column(db.Integer)
new_deposit = db.Column(db.Integer)
commission = db.Column(db.Float)
impreytd = db.Column(db.Integer)
cliytd = db.Column(db.Integer)
regytd = db.Column(db.Integer)
ndytd = db.Column(db.Integer)
commiytd = db.Column(db.Float)
impreto = db.Column(db.Integer)
clito = db.Column(db.Integer)
regto = db.Column(db.Integer)
ndto = db.Column(db.Integer)
commito = db.Column(db.Float)
dateto = db.Column(db.Date, unique = True)
def __init__(self, merchant, impression, click, registration, new_deposit, commission, impreytd, cliytd, regytd, ndytd, commiytd, impreto, clito, regto, ndto, commito, dateto):
self.merchant = merchant
self.impression = impression
self.click = click
self.registration = registration
self.new_deposit = new_deposit
self.commission = commission
self.impreytd = impreytd
self.cliytd = cliytd
self.regytd = regytd
self.ndytd = ndytd
self.commiytd = commiytd
self.impreto = impreto
self.clito = clito
self.regto = regto
self.ndto = ndto
self.commito = commito
self.dateto = dateto
def login_required(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("You need to login first.")
return redirect(url_for('login'))
return wrap
@app.route('/logout/')
@login_required
def logout():
session.clear()
flash('You have been logged out.')
gc.collect()
return redirect(url_for('login'))
#login form - redirect dashboard.
@app.route('/login/', methods = ['GET', 'POST'])
def login():
error = ''
try:
username = request.form['username']
if request.method == 'POST':
data = db.session.query(User).filter_by(username = username).first()
if not data:
error = "Invalid credentials, try again."
if sha256_crypt.verify(request.form['password'], data.password):
session['logged_in'] = True
session['username'] = request.form['username']
flash('You are logged in!')
return redirect(url_for('dashboard'))
else:
error = "Invalid credentials, try again."
gc.collect()
return render_template('pages/user_sys/login.html', error = error)
except Exception as e:
return render_template('pages/user_sys/login.html', error = error)
#register form
@app.route('/register/', methods = ['GET', 'POST'])
def register():
try:
if request.method == 'POST':
username = request.form['username']
email = request.form['email']
password = sha256_crypt.encrypt((str(request.form['password'])))
user = db.session.query(User).filter_by(username = username).first()
if not user:
result = User(username, email, password)
db.session.add(result)
db.session.commit()
flash('Thanks for registering!')
gc.collect()
session['logged_in'] = True
session['username'] = username
return redirect(url_for('register'))
else:
flash('That username is already taken, please choose another.')
return render_template('pages/user_sys/register.html')
except Exception as e:
return (str(e))
@app.route('/', methods = ['GET', 'POST'])
def landing():
session.clear()
flash("You need to login first.")
return render_template('/pages/user_sys/login.html')
@app.route('/dashboard/', methods = ['GET', 'POST'])
@login_required
def dashboard():
bet365 = db.session.query(Bet365).order_by(Bet365.id.desc()).first()
eight88 = db.session.query(Eight88).order_by(Eight88.id.desc()).first()
bet10 = db.session.query(Bet10).order_by(Bet10.id.desc()).first()
realDeal = db.session.query(RealDeal).order_by(RealDeal.id.desc()).first()
ladBroke = db.session.query(LadBroke).order_by(LadBroke.id.desc()).first()
betFred = db.session.query(BetFred).order_by(BetFred.id.desc()).first()
paddy = db.session.query(Paddy).order_by(Paddy.id.desc()).first()
netBet = db.session.query(NetBet).order_by(NetBet.id.desc()).first()
titanBet = db.session.query(TitanBet).order_by(TitanBet.id.desc()).first()
stan = db.session.query(Stan).order_by(Stan.id.desc()).first()
coral = db.session.query(Coral).order_by(Coral.id.desc()).first()
william = db.session.query(William).order_by(William.id.desc()).first()
skyBet = db.session.query(SkyBet).order_by(SkyBet.id.desc()).first()
bet365other = db.session.query(Bet365Other).order_by(Bet365Other.id.desc()).first()
victor = db.session.query(Victor).order_by(Victor.id.desc()).first()
currency = CurrencyRates()
sg_cur = CurrencyCodes()
eur = float(currency.get_rate('EUR', 'USD'))
gbp = float(currency.get_rate('GBP', 'USD'))
sg_usd = sg_cur.get_symbol('USD')
sg_eur = sg_cur.get_symbol('EUR')
sg_gbp = sg_cur.get_symbol('GBP')
valSg = [sg_usd, sg_eur, sg_gbp]
currency = CurrencyRates()
sg_cur = CurrencyCodes()
eur = float(currency.get_rate('EUR', 'USD'))
gbp = float(currency.get_rate('GBP', 'USD'))
sg_usd = sg_cur.get_symbol('USD')
sg_eur = sg_cur.get_symbol('EUR')
sg_gbp = sg_cur.get_symbol('GBP')
valSg = [sg_usd, sg_eur, sg_gbp]
if request.method == 'GET':
bet365Data = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365s
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365otherData = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365others
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
data = [bet365Data, eight88, bet10, realDeal, ladBroke, betFred, paddy, titanBet, stan, coral, eur, gbp, william, skyBet, netBet, bet365otherData, valSg, victor]
return render_template('home.html', data = data)
if request.method == 'POST':
val = request.json['val']
state = request.json['state']
if state == "1":
dateStr = request.json['val']
fromDate = dateStr.split("-")[0].strip(" ")
toDate = dateStr.split("-")[1].strip(" ")
startDate = datetime.datetime.strptime(fromDate, '%m/%d/%Y').date()
endDate = datetime.datetime.strptime(toDate, '%m/%d/%Y').date()
bet365 = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo
FROM bet365s
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
bet10 = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM bet10s
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
realDeal = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM realdeals
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
betFred = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM betfreds
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
stan = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM stans
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
coral = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM corals
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
skyBet = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regito)::int as registration,
SUM(commito)::float as commission
FROM skybets
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
bet365other = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo
FROM bet365others
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
victor = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM victors
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
tB3Odollar = bet365other.ndepo * 100
tB3dollar = bet365.ndepo * 100
tB10dollar = "%.2f" % round(bet10.commission * eur, 2)
tRealdollar = "%.2f" % round(realDeal.commission * eur, 2)
tSkydollar = "%.2f" % round(skyBet.commission * gbp, 2)
tStandollar = stan.commission
tBFdollar = "%.2f" % round(betFred.commission * gbp, 2)
tWildollar = "%.2f" % round(william.balance * eur, 2)
tLadollar = "%.2f" % round(ladBroke.balance * gbp, 2)
tPadollar = "%.2f" % round(paddy.balance * eur, 2)
tNetdollar = "%.2f" % round(netBet.balance * eur, 2)
tVidollar = "%.2f" % round(victor.commission * gbp, 2)
jsonData = []
jsonData.append({
"tB3Oclick" : bet365other.click,
"tB3Osignup" : bet365other.nsignup,
"tB3Odepo" : bet365other.ndepo,
"tB3Odollar" : tB3Odollar,
"tB3click" : bet365.click,
"tB3signup" : bet365.nsignup,
"tB3depo" : bet365.ndepo,
"tB3dollar" : tB3dollar,
"t8click" : eight88.clito,
"t8register" : eight88.regto,
"t8balance" : eight88.balance,
"t8dollar" : eight88.balance,
"tB10click" : bet10.click,
"tB10register" : bet10.registration,
"tB10commission" : bet10.commission,
"tB10dollar" : tB10dollar,
"tRealclick" : realDeal.click,
"tRealregister" : realDeal.registration,
"tRealcommission" : realDeal.commission,
"tRealdollar" : tRealdollar,
"tSkyclick" : skyBet.click,
"tSkyregister" : skyBet.registration,
"tSkycommission" : skyBet.commission,
"tSkydollar": tSkydollar,
"tWildollar" : tWildollar,
"tLadollar" : tLadollar,
"tPadollar" : tPadollar,
"tNetdollar" : tNetdollar,
"tTidollar" : titanBet.balance,
"tStanclick" : stan.click,
"tStanregister" : stan.registration,
"tStancommission" : stan.commission,
"tStandollar" : tStandollar,
"tCoralclick" : coral.click,
"tCoralregister" : coral.registration,
"tCoralcommission" : coral.commission,
"tCoraldollar" : coral.commission,
"tBFclick" : betFred.click,
"tBFregister" : betFred.registration,
"tBFcommission" : betFred.commission,
"tBFdollar" : tBFdollar,
"tViclick" : victor.click,
"tViregister" : victor.registration,
"tVicommission" : victor.commission,
"tVidollar" : tVidollar,
})
return jsonify(status = True, jsonData = jsonData)
if state == "2":
if val == "1":
bet365Data = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365s
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365otherData = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365others
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365Date = bet365Data.datefield
bet365OtherDate = bet365otherData.datefield
tB3Odollar = bet365otherData.ndepo * 100
tB3dollar = bet365Data.ndepo * 100
tB10dollar = "%.2f" % round(bet10.commission * eur, 2)
tRealdollar = "%.2f" % round(realDeal.commission * eur, 2)
tSkydollar = "%.2f" % round(skyBet.commission * gbp, 2)
tStandollar = stan.commission
tBFdollar = "%.2f" % round(betFred.commission * gbp, 2)
tWildollar = "%.2f" % round(william.balance * eur, 2)
tLadollar = "%.2f" % round(ladBroke.balance * gbp, 2)
tPadollar = "%.2f" % round(paddy.balance * eur, 2)
tNetdollar = "%.2f" % round(netBet.balance * eur, 2)
tVidollar = "%.2f" % round(victor.commission * eur, 2)
jsonData = []
jsonData.append({
"tB3Odate" : bet365OtherDate,
"tB3Oclick" : bet365otherData.click,
"tB3Osignup" : bet365otherData.nsignup,
"tB3Odepo" : bet365otherData.ndepo,
"tB3Odollar" : tB3Odollar,
"tB3date" : bet365Date,
"tB3click" : bet365Data.click,
"tB3signup" : bet365Data.nsignup,
"tB3depo" : bet365Data.ndepo,
"tB3dollar" : tB3dollar,
"t8click" : eight88.click,
"t8register" : eight88.registration,
"t8balance" : eight88.balance,
"t8dollar" : eight88.balance,
"tB10click" : bet10.click,
"tB10register" : bet10.registration,
"tB10commission" : bet10.commission,
"tB10dollar" : tB10dollar,
"tRealclick" : realDeal.click,
"tRealregister" : realDeal.registration,
"tRealcommission" : realDeal.commission,
"tRealdollar" : tRealdollar,
"tSkyclick" : skyBet.click,
"tSkyregister" : skyBet.registration,
"tSkycommission" : skyBet.commission,
"tSkydollar": tSkydollar,
"tWildollar" : tWildollar,
"tLadollar" : tLadollar,
"tPadollar" : tPadollar,
"tNetdollar" : tNetdollar,
"tTidollar" : titanBet.balance,
"tStanclick" : stan.click,
"tStanregister" : stan.registration,
"tStancommission" : stan.commission,
"tStandollar" : tStandollar,
"tCoralclick" : coral.click,
"tCoralregister" : coral.registration,
"tCoralcommission" : coral.commission,
"tCoraldollar" : coral.commission,
"tBFclick" : betFred.click,
"tBFregister" : betFred.registration,
"tBFcommission" : betFred.commission,
"tBFdollar" : tBFdollar,
"tViclick" : victor.click,
"tViregister" : victor.registration,
"tVicommission" : victor.commission,
"tVidollar" : tVidollar,
# "total" : total
})
return jsonify(status = True, jsonData = jsonData)
elif val == "2":
bet365Data = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text AS datefield
FROM bet365s
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365otherData = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text AS datefield
FROM bet365others
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365Date = bet365Data.datefield
bet365OtherDate = bet365otherData.datefield
tB3Odollar = bet365otherData.ndepo * 100
tB3dollar = bet365Data.ndepo * 100
tB10dollar = "%.2f" % round(bet10.commiytd * eur, 2)
tRealdollar = "%.2f" % round(realDeal.commiytd * eur, 2)
tSkydollar = "%.2f" % round(skyBet.commiytd * gbp, 2)
tStandollar = stan.commiytd
tBFdollar = "%.2f" % round(betFred.commiytd * gbp, 2)
tWildollar = "%.2f" % round(william.balance * eur, 2)
tLadollar = "%.2f" % round(ladBroke.balance * gbp, 2)
tPadollar = "%.2f" % round(paddy.balance * eur, 2)
tNetdollar = "%.2f" % round(netBet.balance * eur, 2)
tVidollar = "%.2f" % round(victor.commiytd * eur, 2)
jsonData = []
jsonData.append({
"tB3Odate" : bet365OtherDate,
"tB3Oclick" : bet365otherData.click,
"tB3Osignup" : bet365otherData.nsignup,
"tB3Odepo" : bet365otherData.ndepo,
"tB3Odollar" : tB3Odollar,
"tB3date" : bet365Date,
"tB3click" : bet365Data.click,
"tB3signup" : bet365Data.nsignup,
"tB3depo" : bet365Data.ndepo,
"tB3dollar" : tB3dollar,
"t8click" : eight88.click,
"t8register" : eight88.registration,
"t8balance" : eight88.balance,
"t8dollar" : eight88.balance,
"tB10click" : bet10.cliytd,
"tB10register" : bet10.regytd,
"tB10commission" : bet10.commiytd,
"tB10dollar" : tB10dollar,
"tRealclick" : realDeal.cliytd,
"tRealregister" : realDeal.regiytd,
"tRealcommission" : realDeal.commiytd,
"tRealdollar" : tRealdollar,
"tSkyclick" : skyBet.cliytd,
"tSkyregister" : skyBet.regiytd,
"tSkycommission" : skyBet.commiytd,
"tSkydollar": tSkydollar,
"tWildollar" : tWildollar,
"tLadollar" : tLadollar,
"tPadollar" : tPadollar,
"tNetdollar" : tNetdollar,
"tTidollar" : titanBet.balance,
"tStanclick" : stan.cliytd,
"tStanregister" : stan.regytd,
"tStancommission" : stan.commiytd,
"tStandollar" : tStandollar,
"tCoralclick" : coral.cliytd,
"tCoralregister" : coral.regytd,
"tCoralcommission" : coral.commiytd,
"tCoraldollar" : coral.commiytd,
"tBFclick" : betFred.cliytd,
"tBFregister" : betFred.regytd,
"tBFcommission" : betFred.commiytd,
"tBFdollar" : tBFdollar,
"tViclick" : victor.cliytd,
"tViregister" : victor.regytd,
"tVicommission" : victor.commiytd,
"tVidollar" : tVidollar,
# "total" : total
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/summary/', methods = ['GET', 'POST'])
def summary():
bet365 = db.session.query(Bet365).order_by(Bet365.id.desc()).first()
eight88 = db.session.query(Eight88).order_by(Eight88.id.desc()).first()
bet10 = db.session.query(Bet10).order_by(Bet10.id.desc()).first()
realDeal = db.session.query(RealDeal).order_by(RealDeal.id.desc()).first()
ladBroke = db.session.query(LadBroke).order_by(LadBroke.id.desc()).first()
betFred = db.session.query(BetFred).order_by(BetFred.id.desc()).first()
paddy = db.session.query(Paddy).order_by(Paddy.id.desc()).first()
netBet = db.session.query(NetBet).order_by(NetBet.id.desc()).first()
titanBet = db.session.query(TitanBet).order_by(TitanBet.id.desc()).first()
stan = db.session.query(Stan).order_by(Stan.id.desc()).first()
coral = db.session.query(Coral).order_by(Coral.id.desc()).first()
william = db.session.query(William).order_by(William.id.desc()).first()
skyBet = db.session.query(SkyBet).order_by(SkyBet.id.desc()).first()
bet365other = db.session.query(Bet365Other).order_by(Bet365Other.id.desc()).first()
victor = db.session.query(Victor).order_by(Victor.id.desc()).first()
currency = CurrencyRates()
sg_cur = CurrencyCodes()
eur = float(currency.get_rate('EUR', 'USD'))
gbp = float(currency.get_rate('GBP', 'USD'))
sg_usd = sg_cur.get_symbol('USD')
sg_eur = sg_cur.get_symbol('EUR')
sg_gbp = sg_cur.get_symbol('GBP')
valSg = [sg_usd, sg_eur, sg_gbp]
if request.method == 'GET':
bet365Data = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365s
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365otherData = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365others
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
data = [bet365Data, eight88, bet10, realDeal, ladBroke, betFred, paddy, titanBet, stan, coral, eur, gbp, william, skyBet, netBet, bet365otherData, valSg, victor]
return render_template('pages/summary.html', data = data)
if request.method == 'POST':
val = request.json['val']
state = request.json['state']
if state == "1":
dateStr = request.json['val']
fromDate = dateStr.split("-")[0].strip(" ")
toDate = dateStr.split("-")[1].strip(" ")
startDate = datetime.datetime.strptime(fromDate, '%m/%d/%Y').date()
endDate = datetime.datetime.strptime(toDate, '%m/%d/%Y').date()
bet365 = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo
FROM bet365s
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
bet10 = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM bet10s
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
realDeal = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM realdeals
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
betFred = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM betfreds
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
stan = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM stans
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
coral = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM corals
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
skyBet = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regito)::int as registration,
SUM(commito)::float as commission
FROM skybets
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
bet365other = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo
FROM bet365others
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
victor = db.session.execute("""SELECT
SUM(clito)::int as click,
SUM(regto)::int as registration,
SUM(commito)::float as commission
FROM victors
WHERE dateto >='%s' AND dateto <= '%s'""" % (startDate, toDate)).first()
tB3Odollar = bet365other.ndepo * 100
tB3dollar = bet365.ndepo * 100
tB10dollar = "%.2f" % round(bet10.commission * eur, 2)
tRealdollar = "%.2f" % round(realDeal.commission * eur, 2)
tSkydollar = "%.2f" % round(skyBet.commission * gbp, 2)
tStandollar = stan.commission
tBFdollar = "%.2f" % round(betFred.commission * gbp, 2)
tWildollar = "%.2f" % round(william.balance * eur, 2)
tLadollar = "%.2f" % round(ladBroke.balance * gbp, 2)
tPadollar = "%.2f" % round(paddy.balance * eur, 2)
tNetdollar = "%.2f" % round(netBet.balance * eur, 2)
tVidollar = "%.2f" % round(victor.commission * gbp, 2)
jsonData = []
jsonData.append({
"tB3Oclick" : bet365other.click,
"tB3Osignup" : bet365other.nsignup,
"tB3Odepo" : bet365other.ndepo,
"tB3Odollar" : tB3Odollar,
"tB3click" : bet365.click,
"tB3signup" : bet365.nsignup,
"tB3depo" : bet365.ndepo,
"tB3dollar" : tB3dollar,
"t8click" : eight88.clito,
"t8register" : eight88.regto,
"t8balance" : eight88.balance,
"t8dollar" : eight88.balance,
"tB10click" : bet10.click,
"tB10register" : bet10.registration,
"tB10commission" : bet10.commission,
"tB10dollar" : tB10dollar,
"tRealclick" : realDeal.click,
"tRealregister" : realDeal.registration,
"tRealcommission" : realDeal.commission,
"tRealdollar" : tRealdollar,
"tSkyclick" : skyBet.click,
"tSkyregister" : skyBet.registration,
"tSkycommission" : skyBet.commission,
"tSkydollar": tSkydollar,
"tWildollar" : tWildollar,
"tLadollar" : tLadollar,
"tPadollar" : tPadollar,
"tNetdollar" : tNetdollar,
"tTidollar" : titanBet.balance,
"tStanclick" : stan.click,
"tStanregister" : stan.registration,
"tStancommission" : stan.commission,
"tStandollar" : tStandollar,
"tCoralclick" : coral.click,
"tCoralregister" : coral.registration,
"tCoralcommission" : coral.commission,
"tCoraldollar" : coral.commission,
"tBFclick" : betFred.click,
"tBFregister" : betFred.registration,
"tBFcommission" : betFred.commission,
"tBFdollar" : tBFdollar,
"tViclick" : victor.click,
"tViregister" : victor.registration,
"tVicommission" : victor.commission,
"tVidollar" : tVidollar,
})
return jsonify(status = True, jsonData = jsonData)
if state == "2":
if val == "1":
bet365Data = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365s
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365otherData = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365others
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365Date = bet365Data.datefield
bet365OtherDate = bet365otherData.datefield
tB3Odollar = bet365otherData.ndepo * 100
tB3dollar = bet365Data.ndepo * 100
tB10dollar = "%.2f" % round(bet10.commission * eur, 2)
tRealdollar = "%.2f" % round(realDeal.commission * eur, 2)
tSkydollar = "%.2f" % round(skyBet.commission * gbp, 2)
tStandollar = stan.commission
tBFdollar = "%.2f" % round(betFred.commission * gbp, 2)
tWildollar = "%.2f" % round(william.balance * eur, 2)
tLadollar = "%.2f" % round(ladBroke.balance * gbp, 2)
tPadollar = "%.2f" % round(paddy.balance * eur, 2)
tNetdollar = "%.2f" % round(netBet.balance * eur, 2)
tVidollar = "%.2f" % round(victor.commission * gbp, 2)
jsonData = []
jsonData.append({
"tB3Odate" : bet365OtherDate,
"tB3Oclick" : bet365otherData.click,
"tB3Osignup" : bet365otherData.nsignup,
"tB3Odepo" : bet365otherData.ndepo,
"tB3Odollar" : tB3Odollar,
"tB3date" : bet365Date,
"tB3click" : bet365Data.click,
"tB3signup" : bet365Data.nsignup,
"tB3depo" : bet365Data.ndepo,
"tB3dollar" : tB3dollar,
"t8click" : eight88.click,
"t8register" : eight88.registration,
"t8balance" : eight88.balance,
"t8dollar" : eight88.balance,
"tB10click" : bet10.click,
"tB10register" : bet10.registration,
"tB10commission" : bet10.commission,
"tB10dollar" : tB10dollar,
"tRealclick" : realDeal.click,
"tRealregister" : realDeal.registration,
"tRealcommission" : realDeal.commission,
"tRealdollar" : tRealdollar,
"tSkyclick" : skyBet.click,
"tSkyregister" : skyBet.registration,
"tSkycommission" : skyBet.commission,
"tSkydollar": tSkydollar,
"tWildollar" : tWildollar,
"tLadollar" : tLadollar,
"tPadollar" : tPadollar,
"tNetdollar" : tNetdollar,
"tTidollar" : titanBet.balance,
"tStanclick" : stan.click,
"tStanregister" : stan.registration,
"tStancommission" : stan.commission,
"tStandollar" : tStandollar,
"tCoralclick" : coral.click,
"tCoralregister" : coral.registration,
"tCoralcommission" : coral.commission,
"tCoraldollar" : coral.commission,
"tBFclick" : betFred.click,
"tBFregister" : betFred.registration,
"tBFcommission" : betFred.commission,
"tBFdollar" : tBFdollar,
"tViclick" : victor.click,
"tViregister" : victor.registration,
"tVicommission" : victor.commission,
"tVidollar" : tVidollar,
# "total" : total
})
return jsonify(status = True, jsonData = jsonData)
elif val == "2":
bet365Data = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text AS datefield
FROM bet365s
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365otherData = db.session.execute("""SELECT
SUM(click)::int as click,
SUM(nSignup)::int as nsignup,
SUM(nDepo)::int as ndepo,
SUM(valDepo)::int as valdepo,
SUM(numDepo)::int as numdepo,
SUM(spotsTurn)::int as spotsturn,
SUM(numsptbet)::int as numsptbet,
SUM(acsptusr)::int as acsptusr,
SUM(sptnetrev)::int as sptnetrev,
SUM(casinonetrev)::int as casinonetrev,
SUM(pokernetrev)::int as pokernetrev,
SUM(bingonetrev)::int as bingonetrev,
SUM(netrev)::int as netrev,
SUM(afspt)::int as afspt,
SUM(afcasino)::int as afcasino,
SUM(afpoker)::int as afpoker,
SUM(afbingo)::int as afbingo,
SUM(commission)::int as commission,
EXTRACT(YEAR FROM dateto)::text AS datefield
FROM bet365others
GROUP BY datefield
ORDER By datefield DESC LIMIT 1;""").first()
bet365Date = bet365Data.datefield
bet365OtherDate = bet365otherData.datefield
tB3Odollar = bet365otherData.ndepo * 100
tB3dollar = bet365Data.ndepo * 100
tB10dollar = "%.2f" % round(bet10.commiytd * eur, 2)
tRealdollar = "%.2f" % round(realDeal.commiytd * eur, 2)
tSkydollar = "%.2f" % round(skyBet.commiytd * gbp, 2)
tStandollar = stan.commiytd
tBFdollar = "%.2f" % round(betFred.commiytd * gbp, 2)
tWildollar = "%.2f" % round(william.balance * eur, 2)
tLadollar = "%.2f" % round(ladBroke.balance * gbp, 2)
tPadollar = "%.2f" % round(paddy.balance * eur, 2)
tNetdollar = "%.2f" % round(netBet.balance * eur, 2)
tVidollar = "%.2f" % round(victor.commiytd * gbp, 2)
# totalVal = float(tB3Odollar) + float(tB3dollar) + float(tB10dollar) + float(tRealdollar) + float(tSkydollar) + float(tStandollar) + float(coral.commiytd) + float(tBFdollar)
# total = "%.2f" % round(totalVal, 2)
jsonData = []
jsonData.append({
"tB3Odate" : bet365OtherDate,
"tB3Oclick" : bet365otherData.click,
"tB3Osignup" : bet365otherData.nsignup,
"tB3Odepo" : bet365otherData.ndepo,
"tB3Odollar" : tB3Odollar,
"tB3date" : bet365Date,
"tB3click" : bet365Data.click,
"tB3signup" : bet365Data.nsignup,
"tB3depo" : bet365Data.ndepo,
"tB3dollar" : tB3dollar,
"t8click" : eight88.click,
"t8register" : eight88.registration,
"t8balance" : eight88.balance,
"t8dollar" : eight88.balance,
"tB10click" : bet10.cliytd,
"tB10register" : bet10.regytd,
"tB10commission" : bet10.commiytd,
"tB10dollar" : tB10dollar,
"tRealclick" : realDeal.cliytd,
"tRealregister" : realDeal.regiytd,
"tRealcommission" : realDeal.commiytd,
"tRealdollar" : tRealdollar,
"tSkyclick" : skyBet.cliytd,
"tSkyregister" : skyBet.regiytd,
"tSkycommission" : skyBet.commiytd,
"tSkydollar": tSkydollar,
"tWildollar" : tWildollar,
"tLadollar" : tLadollar,
"tPadollar" : tPadollar,
"tNetdollar" : tNetdollar,
"tTidollar" : titanBet.balance,
"tStanclick" : stan.cliytd,
"tStanregister" : stan.regytd,
"tStancommission" : stan.commiytd,
"tStandollar" : tStandollar,
"tCoralclick" : coral.cliytd,
"tCoralregister" : coral.regytd,
"tCoralcommission" : coral.commiytd,
"tCoraldollar" : coral.commiytd,
"tBFclick" : betFred.cliytd,
"tBFregister" : betFred.regytd,
"tBFcommission" : betFred.commiytd,
"tBFdollar" : tBFdollar,
"tViclick" : victor.cliytd,
"tViregister" : victor.regytd,
"tVicommission" : victor.commiytd,
"tVidollar" : tVidollar,
# "total" : total
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/bet365/', methods = ['GET', 'POST'])
def bet365():
data = {}
if request.method == 'GET':
now = datetime.datetime.now()
today = now.date()
data = db.session.query(Bet365).filter(Bet365.dateto == today)
return render_template('pages/bet365.html', data = data)
elif request.method == 'POST':
period = request.json['period']
optVal = request.json['optVal']
fromDate = datetime.datetime.strptime(period.split('-')[0].strip(), '%m/%d/%Y').date()
toDate = datetime.datetime.strptime(period.split('-')[1].strip(), '%m/%d/%Y').date()
jsonData = []
if (optVal == '0') or (optVal == '1'):
data = db.session.execute("""SELECT
*,
EXTRACT(YEAR FROM dateto)::text || '-' ||EXTRACT(MONTH FROM dateto)::text || '-' || EXTRACT(DAY FROM dateto)::text AS datefield
FROM bet365s
WHERE dateto >= '%s' AND dateto <= '%s'
ORDER By datefield;""" % (fromDate, toDate))
for perDay in data:
jsonData.append({
"dateto" : perDay.datefield,
"click" : perDay.click,
"nSignup" : perDay.nsignup,
"nDepo" : perDay.ndepo,
"valDepo" : perDay.valdepo,
"numDepo" : perDay.numdepo,
"spotsTurn" : perDay.spotsturn,
"numSptBet" : perDay.numsptbet,
"acSptUsr" : perDay.acsptusr,
"sptNetRev" : perDay.sptnetrev,
"casinoNetRev" : perDay.casinonetrev,
"pokerNetRev" : perDay.pokernetrev,
"bingoNetRev" : perDay.bingonetrev,
"netRev" : perDay.netrev,
"afSpt" : perDay.afspt,
"afCasino" : perDay.afcasino,
"afPoker" : perDay.afpoker,
"afBingo" : perDay.afbingo,
"commission" : perDay.commission
})
return jsonify(jsonData = jsonData)
elif optVal == '2':
data = db.session.execute("""SELECT
SUM(click) as click,
SUM(nSignup) as nsignup,
SUM(nDepo) as ndepo,
SUM(valDepo) as valdepo,
SUM(numDepo) as numdepo,
SUM(spotsTurn) as spotsturn,
SUM(numsptbet) as numsptbet,
SUM(acsptusr) as acsptusr,
SUM(sptnetrev) as sptnetrev,
SUM(casinonetrev) as casinonetrev,
SUM(pokernetrev) as pokernetrev,
SUM(bingonetrev) as bingonetrev,
SUM(netrev) as netrev,
SUM(afspt) as afspt,
SUM(afcasino) as afcasino,
SUM(afpoker) as afpoker,
SUM(afbingo) as afbingo,
SUM(commission) as commission,
EXTRACT(YEAR FROM dateto)::text || '/' ||EXTRACT(MONTH FROM dateto)::text || '(' || EXTRACT(WEEK FROM dateto)::text || 'wk.' || ')' AS datefield
FROM bet365s
WHERE dateto >= '%s' AND dateto <= '%s'
GROUP BY datefield
ORDER By datefield;""" % (fromDate, toDate))
elif optVal == '3':
data = db.session.execute("""SELECT
SUM(click) as click,
SUM(nSignup) as nsignup,
SUM(nDepo) as ndepo,
SUM(valDepo) as valdepo,
SUM(numDepo) as numdepo,
SUM(spotsTurn) as spotsturn,
SUM(numsptbet) as numsptbet,
SUM(acsptusr) as acsptusr,
SUM(sptnetrev) as sptnetrev,
SUM(casinonetrev) as casinonetrev,
SUM(pokernetrev) as pokernetrev,
SUM(bingonetrev) as bingonetrev,
SUM(netrev) as netrev,
SUM(afspt) as afspt,
SUM(afcasino) as afcasino,
SUM(afpoker) as afpoker,
SUM(afbingo) as afbingo,
SUM(commission) as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365s
WHERE dateto >= '%s' AND dateto <= '%s'
GROUP BY datefield
ORDER By datefield;""" % (fromDate, toDate))
for perDay in data:
jsonData.append({
"dateto" : perDay.datefield,
"click" : perDay.click,
"nSignup" : perDay.nsignup,
"nDepo" : perDay.ndepo,
"valDepo" : perDay.valdepo,
"numDepo" : perDay.numdepo,
"spotsTurn" : perDay.spotsturn,
"numSptBet" : perDay.numsptbet,
"acSptUsr" : perDay.acsptusr,
"sptNetRev" : perDay.sptnetrev,
"casinoNetRev" : perDay.casinonetrev,
"pokerNetRev" : perDay.pokernetrev,
"bingoNetRev" : perDay.bingonetrev,
"netRev" : perDay.netrev,
"afSpt" : perDay.afspt,
"afCasino" : perDay.afcasino,
"afPoker" : perDay.afpoker,
"afBingo" : perDay.afbingo,
"commission" : perDay.commission
})
return jsonify(jsonData = jsonData)
@app.route('/bet365other/', methods = ['GET', 'POST'])
def bet365other():
data = {}
if request.method == 'GET':
now = datetime.datetime.now()
today = now.date()
data = db.session.query(Bet365Other).filter(Bet365Other.dateto == today)
return render_template('pages/bet365other.html', data = data)
elif request.method == 'POST':
period = request.json['period']
optVal = request.json['optVal']
fromDate = datetime.datetime.strptime(period.split('-')[0].strip(), '%m/%d/%Y').date()
toDate = datetime.datetime.strptime(period.split('-')[1].strip(), '%m/%d/%Y').date()
jsonData = []
if (optVal == '0') or (optVal == '1'):
data = db.session.execute("""SELECT
*,
EXTRACT(YEAR FROM dateto)::text || '-' ||EXTRACT(MONTH FROM dateto)::text || '-' || EXTRACT(DAY FROM dateto)::text AS datefield
FROM bet365others
WHERE dateto >= '%s' AND dateto <= '%s'
ORDER By datefield;""" % (fromDate, toDate))
for perDay in data:
jsonData.append({
"dateto" : perDay.datefield,
"click" : perDay.click,
"nSignup" : perDay.nsignup,
"nDepo" : perDay.ndepo,
"valDepo" : perDay.valdepo,
"numDepo" : perDay.numdepo,
"spotsTurn" : perDay.spotsturn,
"numSptBet" : perDay.numsptbet,
"acSptUsr" : perDay.acsptusr,
"sptNetRev" : perDay.sptnetrev,
"casinoNetRev" : perDay.casinonetrev,
"pokerNetRev" : perDay.pokernetrev,
"bingoNetRev" : perDay.bingonetrev,
"netRev" : perDay.netrev,
"afSpt" : perDay.afspt,
"afCasino" : perDay.afcasino,
"afPoker" : perDay.afpoker,
"afBingo" : perDay.afbingo,
"commission" : perDay.commission
})
return jsonify(jsonData = jsonData)
elif optVal == '2':
data = db.session.execute("""SELECT
SUM(click) as click,
SUM(nSignup) as nsignup,
SUM(nDepo) as ndepo,
SUM(valDepo) as valdepo,
SUM(numDepo) as numdepo,
SUM(spotsTurn) as spotsturn,
SUM(numsptbet) as numsptbet,
SUM(acsptusr) as acsptusr,
SUM(sptnetrev) as sptnetrev,
SUM(casinonetrev) as casinonetrev,
SUM(pokernetrev) as pokernetrev,
SUM(bingonetrev) as bingonetrev,
SUM(netrev) as netrev,
SUM(afspt) as afspt,
SUM(afcasino) as afcasino,
SUM(afpoker) as afpoker,
SUM(afbingo) as afbingo,
SUM(commission) as commission,
EXTRACT(YEAR FROM dateto)::text || '/' ||EXTRACT(MONTH FROM dateto)::text || '(' || EXTRACT(WEEK FROM dateto)::text || 'wk.' || ')' AS datefield
FROM bet365others
WHERE dateto >= '%s' AND dateto <= '%s'
GROUP BY datefield
ORDER By datefield;""" % (fromDate, toDate))
elif optVal == '3':
data = db.session.execute("""SELECT
SUM(click) as click,
SUM(nSignup) as nsignup,
SUM(nDepo) as ndepo,
SUM(valDepo) as valdepo,
SUM(numDepo) as numdepo,
SUM(spotsTurn) as spotsturn,
SUM(numsptbet) as numsptbet,
SUM(acsptusr) as acsptusr,
SUM(sptnetrev) as sptnetrev,
SUM(casinonetrev) as casinonetrev,
SUM(pokernetrev) as pokernetrev,
SUM(bingonetrev) as bingonetrev,
SUM(netrev) as netrev,
SUM(afspt) as afspt,
SUM(afcasino) as afcasino,
SUM(afpoker) as afpoker,
SUM(afbingo) as afbingo,
SUM(commission) as commission,
EXTRACT(YEAR FROM dateto)::text || '/' || EXTRACT(MONTH FROM dateto)::text AS datefield
FROM bet365others
WHERE dateto >= '%s' AND dateto <= '%s'
GROUP BY datefield
ORDER By datefield;""" % (fromDate, toDate))
for perDay in data:
jsonData.append({
"dateto" : perDay.datefield,
"click" : perDay.click,
"nSignup" : perDay.nsignup,
"nDepo" : perDay.ndepo,
"valDepo" : perDay.valdepo,
"numDepo" : perDay.numdepo,
"spotsTurn" : perDay.spotsturn,
"numSptBet" : perDay.numsptbet,
"acSptUsr" : perDay.acsptusr,
"sptNetRev" : perDay.sptnetrev,
"casinoNetRev" : perDay.casinonetrev,
"pokerNetRev" : perDay.pokernetrev,
"bingoNetRev" : perDay.bingonetrev,
"netRev" : perDay.netrev,
"afSpt" : perDay.afspt,
"afCasino" : perDay.afcasino,
"afPoker" : perDay.afpoker,
"afBingo" : perDay.afbingo,
"commission" : perDay.commission
})
return jsonify(jsonData = jsonData)
@app.route('/eight88/', methods = ['GET', 'POST'])
def eight88():
data = {}
if request.method == 'GET':
data = db.session.query(Eight88).order_by(Eight88.id.desc()).first()
return render_template('pages/eight88.html', data = data)
if request.method == 'POST':
data = db.session.query(Eight88).order_by(Eight88.id.desc()).first()
jsonData = []
jsonData.append({
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"lead" : data.lead,
"money_player" : data.money_player,
"balance" : data.balance,
"prebalance" : data.prebalance,
"imprwk" : data.imprwk,
"cliwk" : data.cliwk,
"regwk" : data.regwk,
"leadwk" : data.leadwk,
"mpwk" : data.mpwk,
"imprpre" : data.imprpre,
"clipre" : data.clipre,
"regpre" : data.regpre,
"leadpre" : data.leadpre,
"mppre" : data.mppre,
"imprto" : data.imprto,
"clito" : data.clito,
"regto" : data.regto,
"leadto" : data.leadto,
"mpto" : data.mpto
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/bet10/', methods = ['GET', 'POST'])
def bet10():
data = {}
if request.method == 'GET':
data = db.session.query(Bet10).order_by(Bet10.id.desc()).first()
return render_template('pages/bet10.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(Bet10).order_by(Bet10.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.impreytd,
"cliytd" : data.cliytd,
"regytd" : data.regytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(Bet10).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.impreto,
"clito" : data.clito,
"regto" : data.regto,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/realDeal/', methods = ['GET', 'POST'])
def realDeal():
data = {}
if request.method == 'GET':
data = db.session.query(RealDeal).order_by(RealDeal.id.desc()).first()
return render_template('pages/realDeal.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(RealDeal).order_by(RealDeal.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.impreytd,
"cliytd" : data.cliytd,
"regytd" : data.regiytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(RealDeal).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.impreto,
"clito" : data.clito,
"regto" : data.regto,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/ladBroke/')
def ladBroke():
data = db.session.query(LadBroke).order_by(LadBroke.id.desc()).first()
return render_template('pages/ladBroke.html', data = data)
@app.route('/betFred/', methods = ['GET', 'POST'])
def betFred():
data = {}
if request.method == 'GET':
data = db.session.query(BetFred).order_by(BetFred.id.desc()).first()
return render_template('pages/betFred.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(BetFred).order_by(BetFred.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.impreytd,
"cliytd" : data.cliytd,
"regytd" : data.regytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(BetFred).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.impreto,
"clito" : data.clito,
"regto" : data.regto,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/paddy/')
def paddy():
data = db.session.query(Paddy).order_by(Paddy.id.desc()).first()
return render_template('pages/paddy.html', data = data)
@app.route('/netBet/')
def netBet():
data = db.session.query(NetBet).order_by(NetBet.id.desc()).first()
return render_template('pages/netBet.html', data = data)
@app.route('/titanBet/')
def titanBet():
# data = db.session.query(TitanBet).all()[1]
data = db.session.query(TitanBet).order_by(TitanBet.id.desc()).first()
return render_template('pages/titanBet.html', data = data)
@app.route('/stan/', methods = ['GET', 'POST'])
def stan():
data = {}
if request.method == 'GET':
data = db.session.query(Stan).order_by(Stan.id.desc()).first()
return render_template('pages/stan.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(Stan).order_by(Stan.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.imprytd,
"cliytd" : data.cliytd,
"regytd" : data.regytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(Stan).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.imprto,
"clito" : data.clito,
"regto" : data.regto,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/coral/', methods = ['GET', 'POST'])
def coral():
data = {}
if request.method == 'GET':
data = db.session.query(Coral).order_by(Coral.id.desc()).first()
return render_template('pages/coral.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(Coral).order_by(Coral.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.impreytd,
"cliytd" : data.cliytd,
"regytd" : data.regytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(Coral).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.impreto,
"clito" : data.clito,
"regto" : data.regto,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/skyBet/', methods = ['GET', 'POST'])
def skyBet():
data = {}
if request.method == 'GET':
data = db.session.query(SkyBet).order_by(SkyBet.id.desc()).first()
return render_template('pages/skyBet.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(SkyBet).order_by(SkyBet.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.impreytd,
"cliytd" : data.cliytd,
"regytd" : data.regiytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(SkyBet).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.impreto,
"clito" : data.clito,
"regto" : data.regito,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
@app.route('/william/')
def william():
data = db.session.query(William).order_by(William.id.desc()).first()
return render_template('pages/william.html', data = data)
@app.route('/victor/', methods = ['GET', 'POST'])
def victor():
data = {}
if request.method == 'GET':
data = db.session.query(Victor).order_by(Victor.id.desc()).first()
return render_template('pages/victor.html', data = data)
if request.method == 'POST':
state = request.json["state"]
if state == "1":
data = db.session.query(Victor).order_by(Victor.id.desc()).first()
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impression" : data.impression,
"click" : data.click,
"registration" : data.registration,
"new_deposit" : data.new_deposit,
"commission" : data.commission,
"impreytd" : data.impreytd,
"cliytd" : data.cliytd,
"regytd" : data.regytd,
"ndytd" : data.ndytd,
"commiytd" : data.commiytd
})
return jsonify(status = True, jsonData = jsonData)
elif state == "2":
dateStr = request.json['val']
dateVal = datetime.datetime.strptime(dateStr, '%m/%d/%Y').date()
data = db.session.query(Victor).filter_by(dateto = dateVal).first()
if not data:
return jsonify(status = False, message = "There is no data in your database...?")
else:
jsonData = []
jsonData.append({
"merchant" : data.merchant,
"impreto" : data.impreto,
"clito" : data.clito,
"regto" : data.regto,
"ndto" : data.ndto,
"commito" : data.commito
})
return jsonify(status = True, jsonData = jsonData)
if __name__ == '__main__':
# manager.run()
app.debug = True
app.run()
|
from django.urls import include, path
from rest_framework.routers import DefaultRouter
from .views import TestViewSet
router = DefaultRouter()
router.register("tests", TestViewSet, basename="tests")
urlpatterns = [
path("v1/", include(router.urls)),
]
|
from os import environ
import os
# if you set a property in SESSION_CONFIG_DEFAULTS, it will be inherited by all configs
# in SESSION_CONFIGS, except those that explicitly override it.
# the session config can be accessed from methods in your apps as self.session.config,
# e.g. self.session.config['participation_fee']
# the environment variable OTREE_PRODUCTION controls whether Django runs in
# DEBUG mode. If OTREE_PRODUCTION==1, then DEBUG=False
environ.__setitem__('OTREE_PRODUCTION','0') ################
if environ.get('OTREE_PRODUCTION') not in {None, '', '0'}:
DEBUG = False
else:
DEBUG = True
SESSION_CONFIG_DEFAULTS = {
'real_world_currency_per_point': 1.00,
'participation_fee': 0.00,
'doc': ""
}
"""dict(
name='hl_mpl',
display_name='Risk Lottery',
num_demo_participants=10,
app_sequence=['hl_mpl'],
num_choices=8,
multiplier=10,
),"""
SESSION_CONFIGS = [
{
'name':'CTB',
'display_name': 'encuesta',
'num_demo_participants':1,
'app_sequence': ['CTB'],
'Rounds':None,
'doc':"""
"""
},
{
'name':'otdm_master',
'display_name': 'otdm',
'num_demo_participants':1,
'app_sequence': ['otdm_master'],
'Rounds':None,
'doc':"""
"""
},
{
'name': 'mpl',
'display_name': 'MultiplePriceList (Holt/Laury)',
'num_demo_participants': 1,
'app_sequence': ['mpl']
},
{
'name': 'otime',
'display_name': 'otime',
'num_demo_participants': 1,
'app_sequence': ['otime']
},
{
'name': 'BRET',
'display_name': 'BRET',
'num_demo_participants': 1,
'app_sequence': ['BRET']
},
{
'name': 'torneo',
'display_name': 'Juego de encriptación',
'num_demo_participants': 4,
'app_sequence': ['torneo'],
'observabilidad': False,
'meritocracia': False,
}
]
# ISO-639 code
# for example: de, fr, ja, ko, zh-hans
LANGUAGE_CODE = 'es'
# e.g. EUR, GBP, CNY, JPY
REAL_WORLD_CURRENCY_CODE = 'COP'
USE_POINTS = True
ROOMS = []
ADMIN_USERNAME = 'Ferley Rincon'
# for security, best to set admin password in an environment variable
ADMIN_PASSWORD = environ.get('OTREE_ADMIN_PASSWORD')
DEMO_PAGE_INTRO_HTML = """ """
SECRET_KEY = 'jtq+07qbt-tvcu(si_j6-&2m2x-*d6btl0qbwss*(pkv6l#$p0'
# if an app is included in SESSION_CONFIGS, you don't need to list it here
INSTALLED_APPS = ['otree']
#variables to otdm game
#: The total number of weeks
NUM_WEEKS = 52
#: The gain to be paid out per week
GAIN_PER_WEEK = 20
STATIC_URL = '/static/' |
import unittest
from katas.kyu_6.same_array import same
class SameTestCase(unittest.TestCase):
def test_true(self):
self.assertTrue(same([], []))
def test_true_2(self):
self.assertTrue(same([[2, 5], [3, 6]], [[5, 2], [3, 6]]))
def test_true_3(self):
self.assertTrue(same([[2, 5], [3, 6]], [[6, 3], [5, 2]]))
def test_true_4(self):
self.assertTrue(same([[2, 5], [3, 6]], [[6, 3], [2, 5]]))
def test_true_5(self):
self.assertTrue(same(
[[2, 5], [3, 5], [6, 2]], [[2, 6], [5, 3], [2, 5]]))
def test_true_6(self):
self.assertTrue(same(
[[2, 5], [3, 5], [6, 2]], [[3, 5], [6, 2], [5, 2]]))
def test_false(self):
self.assertFalse(same([[2, 3], [3, 4]], [[4, 3], [2, 4]]))
def test_false_2(self):
self.assertFalse(same([[2, 3], [3, 2]], [[2, 3]]))
|
from tkinter import*
class LibraryManagementSystem:
def __init__(self,root):
self.root=root
self.root.title("Library Management System")
self.root.geometry("1920x1080+0+0") #width & height
#To set title alignment and fonts
lbltitle=Label(self.root,text= "LIBRARY MANAGEMENT SYSTEM",bg="White",fg="#427bff",bd=20,relief=RIDGE,font=("times new roman",50,"bold"),padx=2,pady=6)
lbltitle.pack(side=TOP ,fill=X)
#to create frame
frame=Frame(self.root,bd=12,relief=RIDGE,padx=20,bg="white")
frame.place(x=0,y=130,width=1530,height=400)
if __name__ == "__main__":
root=Tk()
obj=LibraryManagementSystem(root)
root.mainloop()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.