text
stringlengths 8
6.05M
|
|---|
from util import _get_hash, lcm
from math import gcd
class GodelHashSet:
def __init__(self, iter_vals = []):
if not isinstance(iter_vals, (str, list)):
raise ValueError("value must be string or list")
self.hash = 1
for val in iter_vals:
val_hash = _get_hash(val)
if not self._contains(val_hash):
self.hash *= val_hash
def _contains(self, val_hash: int) -> bool:
return self.hash % val_hash == 0
def contains(self, val) -> bool:
val_hash = _get_hash(val)
return self._contains(val_hash)
def add(self, val) -> bool:
val_hash = _get_hash(val)
if not self._contains(vals):
self.hash *= val_hash
return True
else:
return False
def remove(self, val):
val_hash = _get_hash(val)
if self._contains(vals):
self.hash //= val_hash
return True
else:
return False
def intersect(self, other_set):
new_hash = gcd(self.hash, other.hash)
new_set = GodelHashSet()
new_set.hash = new_hash
return new_set
def union(self, other_set):
new_hash = lcm(self.hash, other_set.hash)
new_set = GodelHashSet()
new_set.hash = new_hash
return new_set
def diff(self, other_set):
new_hash = self.hash // gcd(self.hash, other_set.hash)
new_set = GodelHashSet()
new_set.hash = new_hash
return new_set
if __name__ == '__main__':
empty_set = GodelHashSet()
print(empty_set.hash)
|
import os,sys
import numpy as np
import ROOT
from root_numpy import root2array, root2rec, tree2rec, array2root
import pandas as pd
from badchtable import get_badchtable
from pulsed_list import get_pulsed_channel_list
def import_data( ptree_filename ):
"""Imports pulse tree and outputs list of numpy arrays"""
arr = root2array( ptree_filename, 'ptree' )
return arr
def get_channel_groups( df ):
chs = df.groupby( ('crate','slot','femch') )
return chs
def get_run_data( run ):
filename = 'data/pulser_run0%d.root'%(run)
if not os.path.exists(filename):
print "Did not Find: ",filename
return None
arr = import_data( filename )
df = pd.DataFrame(arr)
return df
def sponge_run( run, use_pulsed_list=False, remake=False ):
out_npz = 'output/run%03d.npz'%(run)
if os.path.exists(out_npz) and not remake:
print "NPZ for Run ",run," already made"
return
print "PROCESSING: ",out_npz
# crate slot femch ped_mean ped_rms amp charge
df = get_run_data(run)
if df is None:
print "No data for run: ",a
return
# get bad channel table
badch = get_badchtable()
badch_list = badch[['Crate','Slot','FEM Channel']].values.tolist()
# pulsed channel list
if use_pulsed_list:
pulsed_df = get_pulsed_channel_list(run)
print pulsed_df
pulsed_list = pulsed_df[['crate','slot','femch']].values.tolist()
print "LOADED PULSED CHANNELS RECORDED: ",len(pulsed_list)
chs = get_channel_groups( df[ df['amp']>10 ] )
print "Number of groups: ",len(chs)
# get max, mean of each column for each group
maxch = chs.aggregate(np.max)
meanch = chs.aggregate(np.mean)
max_cols = []
for col in maxch.columns:
max_cols.append( "max_%s"%(col) )
maxch.columns = max_cols
mean_cols = []
for col in meanch.columns:
mean_cols.append( "mean_%s"%(col) )
meanch.columns = mean_cols
dfmax = pd.DataFrame( maxch )
dfmean = pd.DataFrame( meanch )
outdf = dfmax.join( dfmean ).reset_index()
# if adc is above a threshold, then mark it as a pulsed channel
if use_pulsed_list:
outdf['pulsed'] = np.vectorize( lambda x,y,z: 1 if [x,y,z] in pulsed_list else 0 )( outdf['crate'], outdf['slot'], outdf['femch'] )
else:
outdf['pulsed'] = outdf['max_amp'].apply( lambda x: 1 if x>700.0 else 0 )
# tag bad channels
outdf['badch'] = np.vectorize( lambda x,y,z: 1 if [x,y,z] in badch_list else 0 )( outdf['crate'], outdf['slot'], outdf['femch'] )
print "NPULSED: ",len( outdf[ outdf['pulsed']==1 ] )
print "NPULSED and BADCH: ",len( outdf.query( '(pulsed==1) & (badch==1)' ) )
print "Writing ",out_npz
np.savez( out_npz, outdf=outdf.to_records() )
array2root( outdf.to_records(), 'output/run%03d.root'%(run),'maxamp' )
if __name__=="__main__":
for x in xrange(83,85):
sponge_run( x, use_pulsed_list=False, remake=True )
|
from django import forms
from .models import Comment, Post
from django import forms
from django.contrib.auth.models import User
from .models import Profile
from django.forms.models import inlineformset_factory
class NewComment(forms.ModelForm):
class Meta:
model = Comment
fields = ('body',)
class PostCreateForm(forms.ModelForm):
title = forms.CharField(label='عنوان التدوينة')
content = forms.CharField(label='نص التدوينة', widget=forms.Textarea)
onlyMe = forms.BooleanField(help_text="اذا كنت تريد الاحتفاظ بهذه التدوينة لنفسك فقط , اضغط على المربع",
label=" انا فقط",required=False)
stuff = forms.URLField(label='رابط ملحق',help_text="* يجب ان يكون الرابط صالحاً",required=False)
image = forms.ImageField(label= 'إضافة صورة', required=False)
class Meta:
model = Post
fields = ['title', 'content', 'onlyMe','stuff','image']
class UserCreationForm(forms.ModelForm):
username = forms.CharField(label='اسم المستخدم', max_length=30,
help_text='اسم المستخدم يجب أن لا يحتوي على مسافات')
email = forms.EmailField(label="البريد الإلكتروني")
first_name = forms.CharField(label='الاسم الأول')
last_name = forms.CharField(label='الاسم الأخير')
password1 = forms.CharField(
label='كلمة المرور', widget=forms.PasswordInput, min_length=8)
password2 = forms.CharField(
label='تأكيد كلمة المرور ', widget=forms.PasswordInput, min_length=8)
class Meta:
model = User
fields = ('username', 'email', 'first_name',
'last_name', 'password1', 'password2')
def clean_password2(self):
cd = self.cleaned_data
if cd['password1'] != cd['password2']:
raise forms.ValidationError('كلمة المرور غير متطابقة')
return cd['password2']
def clean_username(self):
cd = self.cleaned_data
if User.objects.filter(username=cd['username']).exists():
raise forms.ValidationError('يوجد مستخدم مسجل بهذا الاسم')
return cd['username']
class LoginForm(forms.ModelForm):
username = forms.CharField(label='اسم المستخدم')
password = forms.CharField(label='كلمة المرور', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('username', 'password')
class UserUpdateForm(forms.ModelForm):
first_name = forms.CharField(label='الاسم الأول')
last_name = forms.CharField(label='الاسم الأخير')
email = forms.EmailField(label="البريد الإلكتروني")
class Meta:
model = User
fields = ('first_name', 'last_name', "email")
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('image', )
|
# Escalonamento de tarefas
import ordenador
from random import randint
# A função escalonar, que colocará somente as tarefas compatíveis.
def escalonar(lista):
# Passamos a lista como parâmetro para a função ordenador, que ficará responsável por organizar
# em ordem não-decrescente todas as tarefas, em função de tempo final.
listaOrdenada = ordenador.ordenador(lista)
# Declaramos uma lista vazia para armazenar futuramente os valores obtidos do processo mais abaixo.
listaFinal = []
# Definimos a primeira tarefa como uma referência na hora de comparar.
tarefaBase = listaOrdenada[0]
listaFinal.append(tarefaBase)
# No laço abaixo, estamos comparando as tarefas seguintes da lista, com a tarefa referência
# caso o começo dela seja igual ou maior ao final da tarefa referência, ela passa no teste
# e entra para a lista final. Assim sendo, as que não forem compatíveis são desprezadas.
for i in range(1, len(listaOrdenada), 1):
if(tarefaBase.fim <= listaOrdenada[i].inicio):
listaFinal.append(listaOrdenada[i])
tarefaBase = listaOrdenada[i]
# Abaixo percorremos a lista e gravamos os resultados finalistas.
# Aqui damos o nome ao arquivo
numero = randint(0,10000000)
# Aqui é o caminho onde salvaremos o arquivo com os dados gravados
arquivo = open("escalonados/" + str(numero) + ".txt", "w")
# Abrimos o arquivo que será responsável por armazenar quantos elementos foram escalonados dessa vez
execucoes = open("dados.txt", "a")
# Aqui é o laço para salvarmos o as tarefas selecionadas
for i in range(0, len(listaFinal), 1):
arquivo.write("A tarefa: " + listaFinal[i].nome + "\nCom início em: " + str(listaFinal[i].inicio) + "\nE final em: " + str(listaFinal[i].fim) + "\n\n")
# Aqui terminamos de gravar os dados e em seguida fechamos o acesso ao arquivo
execucoes.write(str(len(listaFinal)) + " ")
execucoes.close()
# Aqui fechamos o arquivo responsável peloas tarefas selecioandas
print("Gravando as palavras selecionadas no arquivo!")
arquivo.close()
|
# Курс Python: основы и применение
# Задача 1, блок 2.2. Работа с кодом: модули и импорт
'''
В первой строке дано три числа, соответствующие некоторой дате date -- год, месяц и день.
Во второй строке дано одно число days -- число дней.
Вычислите и выведите год, месяц и день даты, которая наступит, когда с момента исходной даты date пройдет число дней, равное days.
Примечание:
Для решения этой задачи используйте стандартный модуль datetime.
Вам будут полезны класс datetime.date для хранения даты и класс datetime.timedelta для прибавления дней к дате.
'''
# Решение
import datetime
date_raw = list(map(int, input().split(' ')))
delta = datetime.timedelta(int(input()))
date_raw = datetime.date(*date_raw)
date = date_raw + delta
print(date.year, date.month, date.day)
|
from operator import and_ as AND
from operator import or_ as OR
from dataclasses import dataclass
@dataclass
class Rule():
conditions: list
actions: list
@dataclass
class RuleCondition():
pass
@dataclass
class RuleAction():
pass
@dataclass
class MatchCondition(RuleCondition):
fieldname: str
values: list
operator: object
regex: str = None
@dataclass
class MatchNumericCondition(RuleCondition):
fieldname: str
value: list
operator: object
absolute: bool = False
@dataclass
class ValueSetter(RuleAction):
fieldname: str
get_value: callable
wrap: callable
@dataclass
class ValueAdder(RuleAction):
fieldname: str
values: list
|
# input=input("Enter answer")
#
# def if_yes_fun(input):
#
# if input.lower() == 'yes':
# print("correct")
#
# else:
# print("Wrong")
#
# if_yes_fun(input)
# firstNo=int(input("Enter a number"))
# secNo=int(input("Enter a second number"))
# thirdNo=int(input("Enter a third number"))
#
# def largestNum(firstNo,secNo,thirdNo):
# if(firstNo>secNo) and (firstNo>thirdNo):
# print('The largest no is {}'.format(firstNo))
#
# elif(secNo>firstNo) and (secNo>thirdNo):
# print('The largest no is {}'.format(secNo))
# elif(thirdNo>firstNo)and(thirdNo>secNo):
# print('The largest no is {}'.format(thirdNo))
# largestNum(firstNo,secNo,thirdNo)
a=[8,9,8,78,987,78]
def picklastandFirst(a):
l=a[0]
f=a[-1]
result=print(("first value is",l)+ ("last val is ",f))
return result
picklastandFirst(a)
myDict={
"Name":"stan",
"age":34
}
bl=[]
for each in myDict.items():
c=list(each)
bl.append(c)
print(bl)
profit={
"cost_price":32.67,
"sell_price":45.00,
"inventory":1200
}
# profit({
# "cost_price": 32.67,
# "sell_price": 45.00,
# "inventory": 1200
# })
def cal_profit(profit):
profit=(profit['sell_price']-profit['cost_price'])*profit['inventory']
return profit
prof=(cal_profit(profit))
print("%.2f"%prof)
|
from urllib.request import urlretrieve
src = "https://movie-phinf.pstatic.net/20190116_187/15476220698637Uv7t_JPEG/movie_image.jpg?type=m203_290_2"
urlretrieve(src,"poster.png")
|
import unittest
from min_max_diff import *
class TestMinMaxDiff(unittest.TestCase):
def test_min_max_diff(self):
numbers = [1, 2, 3, 4, 5]
self.assertEqual(min_max_diff(numbers), 4)
def test_min_max_diff_empty_list(self):
numbers = []
self.assertEqual(min_max_diff(numbers), None)
def test_min_max_diff_zero(self):
numbers = [0]
self.assertEqual(min_max_diff(numbers), 0)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import xmlrpclib
import base64
from pprint import pprint as pp
url = "http://shoprrt.com/xmlrpc/object"
db = "rimreadytires"
uid = 1
password = "Antonio230"
sock = xmlrpclib.ServerProxy(url)
model = "product.template"
real_list = []
count = 0
products_ids = sock.execute(db, uid, password, model, 'search', [])
products = sock.execute(db, uid, password, model, 'read', products_ids)
avoid_list = {}
for product in products:
result = "Este no existe"
if product["name"].startswith("U-PLT-A"):
result = sock.execute(db, uid, password, model, 'write', product["id"], {"categ_id": 9})
elif product["name"].startswith("U-PLT-B"):
result = sock.execute(db, uid, password, model, 'write', product["id"], {"categ_id": 4})
elif product["name"].startswith("U-PLT-C"):
result = sock.execute(db, uid, password, model, 'write', product["id"], {"categ_id": 5})
elif product["name"].startswith("U-PLT-T"):
result = sock.execute(db, uid, password, model, 'write', product["id"], {"categ_id": 6})
elif product["name"].startswith("U-PLT-S"):
result = sock.execute(db, uid, password, model, 'write', product["id"], {"categ_id": 8})
print result
# if product["name"].startswith("U-PLT") and product["qty_available"] > 0:
# result = sock.execute(db, uid, password, model, 'write', product["id"], {"website_published": True})
# else:
# result = sock.execute(db, uid, password, model, 'write', product["id"], {"website_published": False})
# print result
pp(avoid_list)
|
import unittest
from katas.beta.small_enough_beginner import small_enough
class SmallEnoughTestCase(unittest.TestCase):
def test_true_1(self):
self.assertTrue(small_enough([66, 101], 200))
def test_true_2(self):
self.assertTrue(small_enough([101, 45, 75, 105, 99, 107], 107))
def test_true_3(self):
self.assertTrue(
small_enough([80, 117, 115, 104, 45, 85, 112, 115], 120)
)
def test_true_4(self):
self.assertTrue(small_enough([1, 2, 3, 4, 5, 6, 7, 8, 9], 10))
def test_true_5(self):
self.assertTrue(
small_enough([12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12], 12)
)
def test_false_1(self):
self.assertFalse(
small_enough([78, 117, 110, 99, 104, 117, 107, 115], 100)
)
def test_false_2(self):
self.assertFalse(small_enough([1, 1, 1, 1, 1, 2], 1))
def test_false_3(self):
self.assertFalse(small_enough([78, 33, 22, 44, 88, 9, 6], 87))
|
from .vol_data import VolSurfaceData, VolSmileData
import numpy as np
from datetime import timedelta
def augment_vol_data(data, n_rows, spot_perturb, time_perturb, vol_perturb):
exis_len = len(data)
augmented_data = []
for i in range(0, n_rows, exis_len):
for vol_data in data:
perturbed_data = perturb_gauss(vol_data, spot_perturb, time_perturb, vol_perturb) if not i == 0 else vol_data
augmented_data.append(perturbed_data)
return augmented_data
def perturb(vol_data, strike_perturb, time_perturb, vol_perturb):
# Only the vol is perturbed as we want to see the effect of vol.
# So the liquid instruments' strike and expiry are kept constant
# strike_perturb and time_perturb are ideally 0
strikes_per = list(map(lambda x: x + strike_perturb * np.random.randn(), vol_data.strikes))
spot_per = vol_data.spot + strike_perturb * np.random.randn()
smiles_per = []
for smile in vol_data.smiles:
pillar_per = smile.pillar_date + timedelta(days = int(time_perturb * np.random.randn()))
pert = vol_perturb * 2 * (np.random.random() - 0.5)
vol_per = list(map(lambda x: x + pert, smile.vols))
smile_per = VolSmileData(pillar_per, smile.rd, smile.rf, vol_per)
smiles_per.append(smile_per)
return VolSurfaceData(vol_data.underlying_name, vol_data.market_date,
vol_data.spot_date, spot_per, strikes_per, smiles_per)
# Subtracts bivariate gaussian, centered at random point, from input data
def perturb_gauss(vol_data, strike_perturb, time_perturb, vol_perturb, n_gauss = 5):
# Only the vol is perturbed as we want to see the effect of vol.
# So the liquid instruments' strike and expiry are kept constant
# strike_perturb and time_perturb are ideally 0
strikes_per = list(map(lambda x: x + strike_perturb * np.random.randn(), vol_data.strikes))
spot_per = vol_data.spot + strike_perturb * np.random.randn()
n_strikes = len(vol_data.strikes)
n_mats = len(vol_data.smiles)
vols_coll = np.zeros((n_mats, n_strikes))
i = 0
for smile in vol_data.smiles:
vols_coll[i] = smile.vols
i += 1
for j in range(n_gauss):
z = get_bivariate_normal(n_strikes, n_mats, vol_perturb)
vols_coll = vols_coll - z
smiles_per = []
i = 0
for smile in vol_data.smiles:
pillar_per = smile.pillar_date + timedelta(days = int(time_perturb * np.random.randn()))
vol_per = vols_coll[i]
smile_per = VolSmileData(pillar_per, smile.rd, smile.rf, vol_per)
smiles_per.append(smile_per)
i += 1
return VolSurfaceData(vol_data.underlying_name, vol_data.market_date,
vol_data.spot_date, spot_per, strikes_per, smiles_per)
def get_bivariate_normal(n_strikes, n_mats, vol_perturb):
x, y = np.meshgrid(np.linspace(-1, 1, n_strikes), np.linspace(-1, 1, n_mats))
mu_x = 2 * (np.random.random() - 0.5)
mu_y = 2 * (np.random.random() - 0.5)
p = 2 * (np.random.random() - 0.5)
z = bivariate_normal(x, y, 1.0, 1.0, mu_x, mu_y, sigmaxy=p)
z = (z - z.mean()) / z.std() * vol_perturb * 0.3 #Rescale pdf so that vol is in reasonable range (-5% to 5%)
return z
def bivariate_normal(X, Y, sigmax=1.0, sigmay=1.0,
mux=0.0, muy=0.0, sigmaxy=0.0):
"""
Bivariate Gaussian distribution for equal shape *X*, *Y*.
See `bivariate normal
<http://mathworld.wolfram.com/BivariateNormalDistribution.html>`_
at mathworld.
"""
Xmu = X-mux
Ymu = Y-muy
rho = sigmaxy/(sigmax*sigmay)
z = Xmu**2/sigmax**2 + Ymu**2/sigmay**2 - 2*rho*Xmu*Ymu/(sigmax*sigmay)
denom = 2*np.pi*sigmax*sigmay*np.sqrt(1-rho**2)
return np.exp(-z/(2*(1-rho**2))) / denom
|
#!/usr/bin/env python
# gscholar - Get bibtex entries from Goolge Scholar
# Copyright (C) 2011 Bastian Venthur <venthur at debian org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Library to query Google Scholar.
Call the method query with a string which contains the full search string.
Query will return a list of bibtex items.
"""
import hashlib
import logging
import optparse
import os
import random
import re
import subprocess
import sys
import urllib.error
import urllib.parse
import urllib.request
from html.entities import name2codepoint
# fake google id (looks like it is a 16 elements hex)
google_id = hashlib.md5(str(random.random())).hexdigest()[:16]
GOOGLE_SCHOLAR_URL = "http://scholar.google.com"
# the cookie looks normally like:
# 'Cookie' : 'GSP=ID=%s:CF=4' % google_id }
# where CF is the format (e.g. bibtex). since we don't know the format yet, we
# have to append it later
HEADERS = {'User-Agent': 'Mozilla/5.0',
'Cookie': 'GSP=ID=%s' % google_id}
FORMAT_BIBTEX = 4
FORMAT_ENDNOTE = 3
FORMAT_REFMAN = 2
FORMAT_WENXIANWANG = 5
def query(search_string: str, out_format: int, all_results=False):
"""Return a list of bibtex items.
:param search_string: string to be search in gscholar
:param out_format: int to indicate what format to output, see FORMAT_* consts
:param all_results: if output all results or just the first one
"""
logging.debug("Query: %s" % search_string)
search_str = '/scholar?q=' + urllib.parse.quote(search_string)
url = GOOGLE_SCHOLAR_URL + search_str
header = HEADERS
header['Cookie'] += u":CF={0:d}".format(out_format)
request = urllib.request.Request(url, headers=header)
response = urllib.request.urlopen(request)
html = response.read()
html.decode('ascii', 'ignore')
# grab the links
tmp = get_links(html, out_format)
# follow the bibtex links to get the bibtex entries
result = list()
if all_results is False and len(tmp) != 0:
tmp = [tmp[0]]
for link in tmp:
url = GOOGLE_SCHOLAR_URL + link
request = urllib.request.Request(url, headers=header)
response = urllib.request.urlopen(request)
bib = response.read()
# print
# print
# print bib
result.append(bib)
return result
def get_links(html: str, out_format: int) -> list:
"""Return a list of reference links from the html.
:param html: the html page acquired from gscholar
:param out_format: format of output
"""
if out_format == FORMAT_BIBTEX:
ref_re = re.compile(r'<a href="(/scholar\.bib\?[^>]*)">')
elif out_format == FORMAT_ENDNOTE:
ref_re = re.compile(r'<a href="(/scholar\.enw\?[^>]*)">')
elif out_format == FORMAT_REFMAN:
ref_re = re.compile(r'<a href="(/scholar\.ris\?[^>]*)">')
elif out_format == FORMAT_WENXIANWANG:
ref_re = re.compile(r'<a href="(/scholar\.ral\?[^>]*)">')
else:
return None
ref_list = ref_re.findall(html)
# escape html entities
ref_list = [re.sub(u'&({0:s});'.format('|'.join(name2codepoint)), lambda m: chr(name2codepoint[m.group(1)]), s) for
s in ref_list]
return ref_list
def convert_pdf_to_txt(pdf: str) -> str:
"""Convert a pdf file to text and return the text.
This method requires pdftotext to be installed.
:param pdf: path of the pdf file to be converted
"""
stdout = subprocess.Popen(["pdftotext", "-q", pdf, "-"], stdout=subprocess.PIPE).communicate()[0]
return stdout
def pdf_lookup(pdf: str, all_results: bool, out_format: int) -> list:
"""Look a pdf up on google scholar and return bibtex items.
:param pdf: pdf file path to be looked up
:param all_results: if return all results or just first
:param out_format: output format of bibliography"""
txt = convert_pdf_to_txt(pdf)
# remove all non alphanumeric characters
txt = re.sub("\W", " ", txt)
words = txt.strip().split()[:20]
gs_query = " ".join(words)
bibtex_list = query(gs_query, out_format, all_results)
return bibtex_list
def _get_bib_element(bib_item, element):
"""Return element from bib_item or None."""
item_list = [item.strip() for item in bib_item.split("\n")]
for item in item_list:
if item.startswith(element):
value = item.split("=", 1)[-1]
value = value.strip()
while value.endswith(','):
value = value[:-1]
while value.startswith('{') or value.startswith('"'):
value = value[1:-1]
return value
return None
def rename_file(pdf, bib_item):
"""Attempt to rename pdf according to bib_item.
:param pdf: path of pdf file to rename
:param bib_item: list of bib_items"""
year = _get_bib_element(bib_item, "year")
author = _get_bib_element(bib_item, "author")
if author:
author = author.split(",")[0]
title = _get_bib_element(bib_item, "title")
l = [x for x in (year, author, title) if x]
filename = " - ".join(l) + ".pdf"
new_file = pdf.replace(os.path.basename(pdf), filename)
print()
print("Will rename:")
print()
print((" %s" % pdf))
print()
print("to")
print()
print((" %s" % new_file))
print()
print("Proceed? [y/N]")
answer = eval(input())
if answer == 'y':
print(("Renaming %s to %s" % (pdf, new_file)))
os.rename(pdf, new_file)
else:
print("Aborting.")
def main():
usage = 'Usage: %prog [options] {pdf | "search terms"}'
parser = optparse.OptionParser(usage)
parser.add_option("-a", "--all", action="store_true", dest="all",
default="False", help="show all bibtex results")
parser.add_option("-d", "--debug", action="store_true", dest="debug",
default="False", help="show debugging output")
parser.add_option("-r", "--rename", action="store_true", dest="rename",
default="False", help="rename file (asks before doing it)")
# noinspection SpellCheckingInspection
parser.add_option("-f", "--outputformat", dest='output',
default="bibtex",
help="Output format. Available formats are: "
"bibtex, endnote, refman, wenxianwang [default: %default]")
(options, args) = parser.parse_args()
if options.debug is True:
logging.basicConfig(level=logging.DEBUG)
if options.output == 'bibtex':
out_format = FORMAT_BIBTEX
elif options.output == 'endnote':
out_format = FORMAT_ENDNOTE
elif options.output == 'refman':
out_format = FORMAT_REFMAN
elif options.output == 'wenxianwang':
out_format = FORMAT_WENXIANWANG
else:
return
if len(args) != 1:
parser.error("No argument given, nothing to do.")
sys.exit(1)
args = args[0]
pdf_mode = False
if os.path.exists(args):
logging.debug("File exist, assuming you want me to lookup the pdf: %s." % args)
pdf_mode = True
bib_list = pdf_lookup(args, options.all, out_format)
else:
logging.debug("Assuming you want me to lookup the query: %s." % args)
bib_list = query(args, out_format, options.all)
if len(bib_list) < 1:
print("No results found, try again with a different query!")
sys.exit(1)
if options.all:
logging.debug("All results:")
for i in bib_list:
print(i)
else:
logging.debug("First result:")
print((bib_list[0]))
if options.rename:
if not pdf_mode:
print("You asked me to rename the pdf but didn't tell me which file to rename, aborting.")
sys.exit(1)
else:
rename_file(args, bib_list[0])
if __name__ == "__main__":
main()
|
import sys
import time
import socket
import struct
from types import LongType, FloatType
import libweb100
import Web100 as PyWeb100
from pathlib import *
################################################################
# report generation
# stdout and/or a .txt file
stdouttype="T"
tfile=None
def runlog(lvl, msg):
"Prepare to deprecate this report generation"
global tfile, stdouttype, tfmt
if (not lvl in tfmt["T"]) or (not lvl in tfmt["H"]):
lvl="X"
if lvl=="X":
pass # XXX exceptional = True
# fixup the html character encoding
# hmsg = msg.replace(">",">")
# if hmsg[1]==" ":
# hmsg=" "+hmsg
# stdout first
if stdouttype=="T":
print tfmt[stdouttype][lvl]%msg,
else:
print tfmt[stdouttype][lvl]%hmsg,
sys.stdout.flush()
# and then the file
if tfile:
tfile.write(tfmt["T"][lvl]%msg)
#def html(str):
# """ Unformated html output """
# global stdouttype
# if stdouttype=="H":
# print str
tfmt={}
tfmt["T"]={}
tfmt["H"]={}
# tester information
# X Unknown
# H heading (Big, black)
# T Tester problem (yellow)
# I Misc info, progress reports (grey)
# Test information
# M mesurments and targets or goals (black)
# P pass (green)
# W warn, test may be inaccurate (orange)
# F fail (red)
# N novice - (blue)
# WA, FA *Action (bold, ital)
# X - undfined entropy
tfmt["T"]["X"]="XXX: %s\n"
tfmt["H"]["X"]="XXX: %s<br>\n"
# H Heading (h1 black)
tfmt["T"]["H"]="%s\n"
tfmt["H"]["H"]="<H1>%s</H1>\n"
# T Tester problem/limitation (yellow)
tfmt["T"]["T"]="%s\n"
tfmt["H"]["T"]="<font color=\"#C0C000\">%s</font><br>\n"
# E Tester error (Red)
tfmt["T"]["E"]="%s\n"
tfmt["H"]["E"]="<font color=\"#FF0000\">%s</font><br>\n"
# I Misc info, progress reports (grey)
tfmt["T"]["I"]="%s\n"
tfmt["H"]["I"]="<font color=\"#C0C0C0\">%s</font><br>\n"
# M Measurement and targets or goals (black)
tfmt["T"]["M"]="%s\n"
tfmt["H"]["M"]="<font color=\"#000000\">%s</font><br>\n"
# P Pass (Green)
tfmt["T"]["P"]="%s\n"
tfmt["H"]["P"]="<font color=\"#00FF00\">%s</font><br>\n"
# W Warn, moderate problem with path or tester (Orange)
tfmt["T"]["W"]="%s\n"
tfmt["H"]["W"]="<font color=\"#FFC000\">%s</font><br>\n"
# WA Warn Action (BI Orange)
tfmt["T"]["WA"]="%s\n"
tfmt["H"]["WA"]="<font color=\"#FFC000\"><b><i>%s</i></b></font><br>\n"
# F Fail (Red)
tfmt["T"]["F"]="%s\n"
tfmt["H"]["F"]="<font color=\"#FF0000\">%s</font><br>\n"
# FA FailAction (BI Red)
tfmt["T"]["FA"]="%s\n"
tfmt["H"]["FA"]="<font color=\"#FF0000\"><b><i>%s</i></b></font><br>\n"
# N Novice - overly helpful (Blue)
tfmt["T"]["N"]="%s\n"
tfmt["H"]["N"]="<font color=\"#0000FF\">%s</font><br>\n"
def setupRunlog(opts, fmt):
global tfile, stdouttype
tfile = None
fmt=fmt.upper()
stdouttype=fmt[0]
if stdouttype != "H":
stdouttype="T"
fmt=fmt[1:]
if "T" in fmt:
tfile=open(opts.logbase+".txt", "w")
################################################################
# Setup and pre-test
def setupParse(p):
# Host/connection selection
p.add_option("-H", "--host",
help="Host to connect to",
type="string", default="", dest="host")
p.add_option("-C", "--cid",
help="CID of TCP connection to use",
type="int", default=-1, dest="cid")
p.add_option("-F", "--fd",
help="File descriptor of an open TCP connection (int)",
type="int", default=-1, dest="fd")
p.add_option("-x", "--xmit",
help="Transmit data on specidifed FD (with -F only)",
action="store_true", default=False, dest="ixmit")
p.add_option("-L", "--listen",
help="Listen for an incoming connection",
action="store_true", default=False, dest="listen_opt")
p.add_option("-P", "--port",
help="Port (default is 56117)",
type="int", default=56117, dest="port")
# override low level properties
p.add_option("-B", "--bully",
help="Stiffen TCP (beyond AIMD)",
type="int", dest="bully")
p.add_option("-U", "--mss",
help="Set MSS (down only)",
type="int", default=0, dest="set_mss")
p.add_option("", "--tos",
help="Set the TOS byte (8 bits)",
type="int", default=-1, dest="set_tos") # NB: -2 means error
# server only stuff
p.add_option("", "--queclient",
type="string", default="")
p.add_option("", "--maxtime",
help="Running time limit (5 minute default)",
type="int", default=300) # 10 minutes
def queClient(opts):
"""
Send a message on the "control channel" that we are ready for an active client.
This is really part of a diagnostic server, but it makes life so much easier.
"""
if opts.queclient:
# this has to be a naked print to stdout. It may be incompatable with --format=h??
print "%s %d\n"%(opts.queclient, opts.port)
sys.stdout.flush()
timelimit=0
def setupTCP(ev, opts):
"""
Setup the TCP connection for a test.
"""
# Set up the internal soft watchdog
global maxtime
maxtime = time.time() + opts.maxtime - 5 # deduct 5 seconds for slop
# Set up web100 access
global ag
ag = libweb100.web100_attach(libweb100.WEB100_AGENT_TYPE_LOCAL, None)
if ag == None:
runlog("F","Web100 not initialized/not installed")
sys.exit(1)
try:
cvar.gread = libweb100.web100_group_find(ag,"read")
cvar.gtune = libweb100.web100_group_find(ag,"tune")
except Exception, e:
runlog("F","Web100 setup error %s"%(e))
sys.exit(1)
# Find/create the connection to test
global cid, pid, ixmit, datasock
cid = -1
ixmit = opts.ixmit # -x
if (opts.host): # -H Hostname
for addrinfo in socket.getaddrinfo(opts.host, opts.port, socket.AF_UNSPEC, socket.SOCK_STREAM):
(af, socktype, proto, canonname, sa) = addrinfo
try:
if opts.verbose:
gn=socket.getnameinfo(sa, 1)
runlog("I", "Trying %s Port %s"%(gn[0], gn[1]))
datasock = socket.socket(af, socktype, proto)
except Exception, e:
runlog("F", "Failed to create socket: %s"%(e))
sys.exit(2)
try:
datasock.connect(sa)
except Exception, e:
gn=socket.getnameinfo(sa, 1)
runlog("F", "Failed to establish connection to %s on port %s, %s"%(gn[0], gn[1], e))
sys.exit(2)
try:
cvar.conn = libweb100.web100_connection_from_socket(ag, datasock.fileno())
except Exception, e:
runlog("F", "Failed to get web100 conn %s"%(e))
sys.exit(2)
if cvar.conn == 0:
runlog("F", "Failed to find the connection %s"%(e))
sys.exit(2)
try:
cid = libweb100.web100_get_connection_cid(cvar.conn)
except Exception, e:
runlog("F", "Failed to find web100 cid %s"%(e))
sys.exit(2)
ixmit = 1
elif (opts.cid <> -1): # -C cid
# cid is already set
# cid = opts.cid
if opts.verbose:
runlog("I", "Using existing CID %d"%(cid))
try:
cvar.conn = libweb100.web100_connection_lookup(ag, cid)
except Exception, e:
runlog("F", "Failed to get web100 conn %s"%(e))
sys.exit(2)
ixmit = 0
elif (opts.fd <> -1): # -F fd
if opts.verbose:
runlog("I", "Using existing file descriptor from parent %d"%(opts.fd))
try:
cvar.conn = libweb100.web100_connection_from_socket(ag, opts.fd);
except Exception, e:
runlog("F", "Failed to get web100 conn %s"%(e))
sys.exit(2)
try:
cid = libweb100.web100_get_connection_cid(cvar.conn)
except Exception, e:
runlog("F", "Failed to get web100 cid %s"%(e))
sys.exit(2)
try:
datasock = socket.fromfd(opts.fd, socket.AF_INET, socket.SOCK_STREAM)
except Exception, e:
runlog("F", "Failed to use file descriptor %s passed in from parent process"%(opts.fd))
sys.exit(2)
# ixmit from -x option
elif (opts.listen_opt): # -L (isten)
if opts.verbose:
runlog("I", "Listening for a connection on port %d"%(opts.port))
try:
lsock = socket.socket()
except Exception, e:
runlog("F", "Couldn't create socket: %s"%(e))
sys.exit(2)
try:
lsock.bind(("", int(opts.port)))
except Exception, e:
runlog("F", "Couldn't bind port %d: %s"%(opts.port, e))
sys.exit(2)
try:
lsock.listen(1)
except Exception, e:
runlog("F", "Couldn't \"listen\" on port %d: %s"%(opts.port, e))
sys.exit(2)
queClient(opts)
try:
datasock, addr = lsock.accept()
except Exception, e:
runlog("F", "Failed to accept connection on port %d: %s"%(opts.port, e))
sys.exit(2)
try:
cvar.conn = libweb100.web100_connection_from_socket(ag, datasock.fileno())
except Exception, e:
runlog("F", "Failed to get web100 conn %s"%(e))
sys.exit(2)
try:
cid = libweb100.web100_get_connection_cid(cvar.conn)
except Exception, e:
runlog("F", "Failed to get web100 cid %s"%(e))
sys.exit(2)
ixmit = 1
else:
runlog("F", "Must specify on of -H, -C, -F or -L on command line")
sys.exit(2)
if cid == -1:
runlog("F", "Failed to establish or locate connection, cid = -1")
sys.exit(2)
if ixmit:
pid = pumpsegs(datasock.fileno(), 1000000) # XXX - fixed buffer/write size
# note: pid_t has no distructor, so this is technically a leak.
# therfore we need -DSWIG_PYTHON_SILENT_MEMLEAK to silence the rt complaint
# try to set the TOS byte (note this includes 6 bits DSCP + 2 bits ECN)
# 0x20 is the correct value for scavanger/LBE service
if opts.set_tos >= 0:
i = -2
runlog("W", "Setting TOS to 0x%x"%(opts.set_tos))
try:
datasock.setsockopt(socket.IPPROTO_IP, socket.IP_TOS, opts.set_tos)
except Exception, e:
runlog("W", "Set TOS failed: %s"%(e))
else:
try:
i=struct.unpack("b", datasock.getsockopt(socket.IPPROTO_IP, socket.IP_TOS, 1))[0]
if i != opts.set_tos:
runlog("W", "getsockopt reports incorrect TOS=0x%x (should be 0x%x)"%(i, opts.set_tos))
except Exception, e:
runlog("W", "Get TOS failed: %s"%(e))
ev["set_tos"] = i # Make the report summarize the status, -2 means there were errors
# Set the MSS
# we do no validity checking here - we rely on the kernel
# Later checks will catch problems
if opts.set_mss:
runlog("W", "Set MSS to %d"%(opts.set_mss))
write_web100_var(cvar.conn, cvar.gtune, "CurMSS", opts.set_mss)
cvar.baseMSS = opts.set_mss
def pretuneTCP(opts):
""" Make any pre-test adjustments to TCP. """
# If bully mode we replace AIMD with our own
if opts.bully:
one, ssthresh=1, 2*cvar.baseMSS
vWADNoAI=libweb100.web100_var_find(cvar.gtune, "WAD_NoAI")
libweb100.web100_raw_write(vWADNoAI, cvar.conn, one)
try:
vLimSsthresh=libweb100.web100_var_find(cvar.gtune, "LimSsthresh")
except Exception, e:
runlog("F", "No LimSsthresh - Check for a newer Web100 kernel!")
sys.exit(1)
libweb100.web100_raw_write(vLimSsthresh, cvar.conn, ssthresh)
runlog("W", "Set bully mode %d"%(libweb100.web100_raw_read(vLimSsthresh, cvar.conn)))
################################################################
# run_ something
################################################################
allRuns = [] # strictly chronological
rowcount = 0
def init_elapsed_sample(ctl):
"""
Gather the "zeroth" data sample,
from the current state of the connection
All samples (including this one) are archived in cronological order in allRuns.
"""
global ns, rowcount, allRuns
ns = libweb100.web100_snapshot_alloc(cvar.gread, cvar.conn)
libweb100.web100_snap(ns)
r={}
r["tctrl"]=ctl
r["rawsnap"]=ns
r.update(mkdict(ctl, ns, None))
r["row"] = rowcount
rowcount += 1
allRuns.append(r)
cvar.baseMSS = r["CurMSS"]
def run_elapsed_sample(ctl):
"""
Gather a data sample.
All of the argument specified in ctrl are passed to the c code to actually
manipulate TCP and collect data, without the overhead of the python interperter.
All samples are archived in cronological order in allRuns.
"""
global os, ns, rowcount, allRuns
os, ns = ns, watch_elapsed_sample(cvar.conn, ctl) # beware: alters ctl
r={}
r["tctrl"]=ctl
r["rawsnap"]=ns
r.update(mkdict(ctl, ns, os))
r["row"] = rowcount
rowcount += 1
if time.time() > maxtime:
r["maxtime"]=True
allRuns.append(r)
return(r)
################################################################
# Interim dictionary definitions
################################################################
overflow = 4294967296L # 2^32 to force unconditional unsigned long
mask = overflow-1
def rntohl(a):
global overflow, mask
a = (a+overflow) & mask
if socket.ntohl(1) != 1: # Snaps are little endian
return(a)
r = ((a & 0x0FF) << 24) | \
((a & 0x0FF00) << 8) | \
((a & 0x0FF0000) >> 8) | \
((a >> 24) & 0x0FF)
return(r)
overflow64=18446744073709551616L
mask64=overflow64-1
def rntohll(a):
global overflow64, mask64
a = (a+overflow64) & mask64
if socket.ntohl(1) != 1: # Snaps are little endian
return(a)
r = ((a & 0x0FFL) << 56) | \
((a & 0x0FF00L) << 40) | \
((a & 0x0FF0000L) << 24) | \
((a & 0x0FF000000L) << 8) | \
((a & 0x0FF00000000L) >> 8) | \
((a & 0x0FF0000000000L) >> 24) | \
((a & 0x0FF000000000000L) >> 40) | \
((a >> 56) & 0x0FF)
return(r)
# test rntohll()
def testrntohll():
i = overflow64
while i:
i = i >> 1
r = rntohll(i)
print "%x %x"%(i, r)
sys.exit()
# testrntohll()
def mkdict(ctl, new, old):
"""
Convert a tctrl and two snaps into a dictionary. This parallels John Heffners
new python API for Web100, but handles deltas differently.
Don't compute deltas, if there is no prior (old) sample.
"""
r = {}
# process the control structure
# eeeewww clearly this is lame XXX
r["flag"]=ctl.flag
r["basemss"]=ctl.basemss
r["win"]=ctl.win
r["burstwin"]=ctl.burstwin
r["duration"]=ctl.duration
r["obswin"]=ctl.obswin
r["SSbursts"]=ctl.SSbursts
r["SSbully"]=ctl.SSbully
r["SSbullyStall"]=ctl.SSbullyStall
r["SSsumAwnd"]=ctl.SSsumAwnd
r["SScntAwnd"]=ctl.SScntAwnd
r["SSpoll"]=ctl.SSpoll
# Process the snaps
# This interim version violates clean layering:
# it uses a hybrid of libweb100 and PyWeb100
v = libweb100.web100_var_head(cvar.gread)
while v:
vv = PyWeb100.Web100Var(v, cvar.gread) # ugly
n = libweb100.web100_get_var_name(v)
t = libweb100.web100_get_var_type(v)
if libweb100.web100_snap_read(v, new, cvar.nbuf) != libweb100.WEB100_ERR_SUCCESS:
raise "snap read fail"
if old:
if libweb100.web100_snap_read(v, old, cvar.obuf) != libweb100.WEB100_ERR_SUCCESS:
raise "snap read fail"
if ( t == libweb100.WEB100_TYPE_COUNTER32 ):
r["abs_"+n]=rntohl(vv.val(cvar.nbuf)) # abs_ is absolute magnitude
if old:
delta = rntohl(vv.val(cvar.nbuf)) - rntohl(vv.val(cvar.obuf))
if delta < 0:
delta = delta + 4294967296
r[n] = delta
elif ( t == libweb100.WEB100_TYPE_COUNTER64 ):
r["abs_"+n]=rntohll(vv.val(cvar.nbuf)) # abs_ is absolute magnitude
if old:
# rntohl() is wrong
delta = rntohll(vv.val(cvar.nbuf)) - rntohll(vv.val(cvar.obuf))
if delta < 0:
delta = delta + 18446744073709551616
r[n] = delta
elif ( t == libweb100.WEB100_TYPE_GAUGE32 or
t == libweb100.WEB100_TYPE_INTEGER32 or
t == libweb100.WEB100_TYPE_UNSIGNED32 or
t == libweb100.WEB100_TYPE_TIME_TICKS or
t == libweb100.WEB100_TYPE_INTEGER ):
r[n] = rntohl(vv.val(cvar.nbuf))
else:
r[n] = vv.val(cvar.nbuf)
v = libweb100.web100_var_next(v)
# vv.free()
return(r)
################################################################
# File saving and restoring code
################################################################
def write_events(ev, f):
for e in ev:
if isinstance(ev[e], int):
f.write("%s event_int %d\n"%(e, ev[e]))
elif isinstance(ev[e], str):
f.write("%s event_str %s\n"%(e, ev[e]))
elif type(ev[e]) == LongType:
f.write("%s event_long %Ld\n"%(e, ev[e]))
elif type(ev[e]) == FloatType:
f.write("%s event_float %s\n"%(e, ev[e]))
else:
try:
row=ev[e]["row"]
f.write("%s event_row %d\n"%(e, row))
except:
f.write("%s event_other %s\n"%(e, str(ev[e])))
def diff_events(nev, oev, f):
for e in nev:
if e in oev:
try:
nrow=nev[e]["row"]
orow=oev[e]["row"]
if nrow != orow:
f.write(" %s different rows: %d %d\n"%(e, nrow, orow))
except:
if nev[e] != oev[e]:
f.write(" %s new: %s\n"%(e, nev[e]))
f.write(" old: %s\n"%oev[e])
def write_stats(ev, lbase):
"""
Write test parameters and results to a pair of files
Test test parameters and control information (struct tcrtl) is
written to the ascii file <name>.ctl
Binary web100 snaps are written to <name>.log
"""
global allRuns
fver = "20060411"
# setup control log and write (almost) all events
logf = open(lbase + ".ctrl", 'w')
logf.write("version %s\n"%fver)
# setup snap log
vlog = libweb100.web100_log_open_write(lbase + ".log", cvar.conn, cvar.gread)
for r in allRuns:
c, s = r["tctrl"], r["rawsnap"]
logf.write("tctrl 20050126 %d %d %d %d %d %d %d %d %d %d %d %d\n"%(
c.flag, c.basemss, c.win, c.burstwin, c.duration, c.obswin,
c.SSbursts, c.SSbully, c.SSbullyStall, c.SSsumAwnd, c.SScntAwnd, c.SSpoll))
libweb100.web100_log_write(vlog, s)
write_events(ev, logf)
logf.close()
libweb100.web100_log_close_write(vlog)
def old_write_stats(lbase, plist):
"""
Write test parameters and results to a pair of files
Test test parameters and control information (struct tcrtl) is
written to the ascii file <name>.ctl
Binary web100 snaps are written to <name>.log
"""
# setup control log and write test parameters
logf = open(lbase + ".ctrl", 'w')
for p in plist:
logf.write("%d "%p)
logf.write("\n")
# setup snap log
vlog = libweb100.web100_log_open_write(lbase + ".log", cvar.conn, cvar.gread)
for r in allRuns:
c, s = r["tctrl"], r["rawsnap"]
logf.write("10 20050126 : %d %d %d %d %d %d %d %d %d %d %d %d\n"%(
c.flag, c.basemss, c.win, c.burstwin, c.duration, c.obswin,
c.SSbursts, c.SSbully, c.SSbullyStall, c.SSsumAwnd, c.SScntAwnd, c.SSpoll))
libweb100.web100_log_write(vlog, s)
logf.close()
libweb100.web100_log_close_write(vlog)
def read_stats(lbase):
"""
Read previously saved test results written by write_stats
"""
global ag, allRuns
# Open the logs with the appropriate tools
logf = open(lbase + ".ctrl", 'r')
vlog = libweb100.web100_log_open_read(lbase + ".log")
ag = libweb100.web100_get_log_agent(vlog)
cvar.gread = libweb100.web100_get_log_group(vlog)
cvar.conn = libweb100.web100_get_log_connection(vlog)
# parse test prameters (first line)
firstline = logf.readline().split(' ')
# old format always started with a digit
if firstline[0].isdigit():
return(old_read_stats(firstline, logf, vlog))
if firstline[0] != "version":
raise "malformed ctrl file version"
fver = firstline[1]
ev,ar={},[]
# new format is keyword driven
for line in logf.readlines():
w=line.split()
name = w[0]
if w[1] == "event_str":
ev[name] = " ".join(w[2:])
elif w[1] == "event_int":
ev[name] = int(w[2])
elif w[1] == "event_long":
ev[name] = long(w[2])
elif w[1] == "event_float":
ev[name] = float(w[2])
elif w[1] == "event_row":
ev[name] = ar[int(w[2])]
elif w[1] == "event_other":
ev[name] = None # XXX - not supported
elif w[1] == "20050126":
c=parse_tctrl_20050126(2, w)
s = libweb100.web100_snapshot_alloc( cvar.gread, cvar.conn )
libweb100.web100_snap_from_log(s, vlog)
d={}
d["tctrl"]=c
d["rawsnap"]=s
ar.append( d )
else:
raise "unknown .ctrl line: "+w[1]
allRuns=ar
return ev,ar
def parse_tctrl_20050126(i, w):
c = tctrl()
i, c.flag = i+1, int(w[i])
i, c.basemss = i+1, int(w[i])
i, c.win = i+1, int(w[i])
i, c.burstwin = i+1, int(w[i])
i, c.duration = i+1, int(w[i])
i, c.obswin = i+1, int(w[i])
i, c.SSbursts = i+1, int(w[i])
i, c.SSbully = i+1, int(w[i])
i, c.SSbullyStall = i+1, int(w[i])
i, c.SSsumAwnd = i+1, int(w[i])
i, c.SScntAwnd = i+1, int(w[i])
i, c.SSpoll = i+1, int(w[i])
if (i != len(w)):
raise "missaligned format error"
return(c)
def old_read_stats(firstline, logf, vlog):
"""
Read old logfiles,
note that this is not verbatum old code
"""
global allRuns
ev,rr={},[] # return result (deprecated)
for w in firstline:
if w.isdigit():
rr.append(int(w))
ev["target_rate"] = rr[0]
ev["target_rtt"] = rr[1]
# parse control parameters and web100 snaps in tandom for individual tests
ar = [] # scan result
for l in logf:
w=l.split(' ')
if w[0:3] == [ "10", "20050126", ":" ] and len(w) == 15:
c = parse_tctrl_20050126(3, w)
elif w[0:3] == [ "10", "20040330", ":" ] and len(w) == 13:
raise "deprecate this format" # XXX nuke (all of) this
c = tctrl()
i=3
i, c.flag = i+1, int(w[i])
i, c.basemss = i+1, int(w[i])
i, c.win = i+1, int(w[i])
i, c.burstwin = i+1, int(w[i])
i, c.duration = i+1, int(w[i])
i, c.obswin = i+1, int(w[i])
i, c.SSbursts = i+1, int(w[i])
i, c.SSbully = i+1, int(w[i])
i, c.SSsumAwnd = i+1, int(w[i])
i, c.SScntAwnd = i+1, int(w[i])
i, c.SSpoll = i+1, -1
i, c.SSbullyStall = i+1, -1 # reordered
if (i != SizeOfTctrl):
raise "missaligned format error"
else:
print len(w), w
raise "format error"
s = libweb100.web100_snapshot_alloc( cvar.gread, cvar.conn )
libweb100.web100_snap_from_log(s, vlog)
d={}
d["tctrl"]=c
d["rawsnap"]=s
ar.append( d )
allRuns=ar
return ev,ar
|
class Vector2:
def __init__(self, _x=0, _y=0):
self.x = _x
self.y = _y
@staticmethod
def zero():
v = Vector2(0,0)
return v
@staticmethod
def one():
v = Vector2(1,1)
return v
def __str__(self):
return '({0},{1})'.format(self.x,self.y)
class Vector3(Vector2):
def __init__(self, _x=0, _y=0, _z=0):
super().__init__(_x,_y)
self.z = _z
@staticmethod
def zero():
v = Vector3(0,0,0)
return v
@staticmethod
def one():
v = Vector3(1,1,1)
return v
def __str__(self):
return '({0},{1},{2})'.format(self.x,self.y,self.z)
v = Vector3(2,8,6)
print(v)
|
p="I am praveen"
if(p=="I am praveen"):
print "it works"
else:
print 'not working'
for let in p:
print "Current letter ",let
print let.tell()
st=p[5:12]
print st
|
import random
board=[[1,2,3],[4,5,6],[7,8,9]]
RandomNumberList=random.sample(range(1, 10), 9)
FreeSquareList=[]
tempcomparion=0
global StatusGame
PlayAgain=""
def DisplayBoard(board):
#
# the function accepts one parameter containing the board's current status
# and prints it out to the console
#
print("+-------+-------+-------+")
for i in range(3):
print("| | | |")
print("|",end="")
for j in range(3):
print(" ",board[i][j], " |",end="")
print("")
print("| | | |")
print("+-------+-------+-------+")
return board
def EnterMove(board):
GetFreeList=MakeListOfFreeFields(board)
print("It's Your Turn, Check Available Fields", GetFreeList)
TakeInputFromUser=input("Please Enter A Number From The Above Available Fields ")
while TakeInputFromUser.isdigit()!=True:
print("It's Your Turn, Check Available Fields", GetFreeList)
TakeInputFromUser=input("Please Enter A Number From The Above Available Fields ")
else:
TakeInputFromUser = int(TakeInputFromUser)
while TakeInputFromUser not in GetFreeList:
print("Check Available Fields are", GetFreeList)
TakeInputFromUser=int(input("Please Enter A Number From The Above Available Fields Only "))
for i in range(3):
for j in range(3):
if board[i][j]==TakeInputFromUser:
board[i][j]="O"
break
DisplayBoard(board)
StatusOfTheGame=VictoryFor(board, "O")
if StatusOfTheGame=="O":
print("you have the won Game")
return board
elif StatusOfTheGame==True:
DrawMove(board)
else:
print("thank you")
# the function accepts the board current status, asks the user about their move,
# checks the input and updates the board according to the user's decision
#
def MakeListOfFreeFields(board):
#
# the function browses the board and builds a list of all the free squares;
# the list consists of tuples, while each tuple is a pair of row and column numbers
#
FreeSquareList.clear()
for i in range(3):
for j in range(3):
if board[i][j]!="X" and board[i][j]!="O":
FreeSquareList.append(board[i][j])
return FreeSquareList
def VictoryFor(board, sign):
if board[0][0]==board[0][1]==board[0][2]==sign:
print(sign," has won the game")
return sign
elif board[1][0]==board[1][1]==board[1][2]==sign:
print(sign," has won the game")
return sign
elif board[2][0]==board[2][1]==board[2][2]==sign:
print(sign," has won the game")
return sign
elif board[0][0]==board[1][0]==board[2][0]==sign:
print(sign," has won the game")
return sign
elif board[0][1]==board[1][1]==board[2][1]==sign:
print(sign," has won the game")
return sign
elif board[0][2]==board[1][2]==board[2][2]==sign:
print(sign," has won the game")
return sign
elif board[0][0]==board[1][1]==board[2][2]==sign:
print(sign," has won the game")
return sign
elif board[0][2]==board[1][1]==board[2][0]==sign:
print(sign," has won the game")
return sign
elif len(MakeListOfFreeFields(board))>=1:
return True
else:
print("game is tie")
#
# the function analyzes the board status in order to check if
# the player using 'O's or 'X's has won the game
#
def DrawMove(board):
global StatusOfTheGame
global tempcomparion
FreeSquareList=MakeListOfFreeFields(board)
for item in RandomNumberList:
if item in FreeSquareList:
tempcomparion= item
FreeSquareList.remove(item)
break
for i in range(3):
for j in range(3):
if board[i][j]==tempcomparion:
board[i][j]="X"
break
DisplayBoard(board)
StatusOfTheGame=VictoryFor(board, "X")
if StatusOfTheGame=="X":
print("Computer won the won Game")
return board
elif StatusOfTheGame==True:
EnterMove(board)
else:
print("Thank you")
#
# the function draws the computer's move and updates the board
#
while True:
board=DrawMove(board)
PlayAgain=input("do you wish play again, y to continue and any other key to exit")
if PlayAgain.upper()=="Y":
board=[[1,2,3],[4,5,6],[7,8,9]]
RandomNumberList=random.sample(range(1, 10), 9)
else:
print("Bye!")
break
|
y=int(input())
while y>0 :
print("Hello")
y=y-1
|
#python 3.8
#WebQA Json解析程序
import json
import pymysql
conn = pymysql.connect(
host='localhost', # mysql服务器地址
port=3306, # 端口号
user='root', # 用户名
passwd='root', # 密码
db='faq', # 数据库名称
charset='utf8', # 连接编码,根据需要填写
)
cur = conn.cursor() # 创建并返回游标
questionList = open(r"E:\gongzhonghao\robot\WebQA.v1.0(1)\WebQA.v1.0\me_train.json", "r",encoding='UTF-8')
questionList = json.load(questionList)
num=1
for lineQuestion in questionList:
questionDict=questionList[lineQuestion]
question = questionDict['question']
evidences = questionDict['evidences']
for ed in evidences:
if evidences[ed]['answer'][0]!='no_answer':
answer=evidences[ed]['answer'][0]+" "+evidences[ed]['evidence']
insertSql = " insert into faq (fid,question,answer)VALUES(%s,%s,%s)"
#insertSql = " insert into faq (fid,question,answer)VALUES('1','2','3')"
cur.execute(insertSql,(str(num),question,answer))
conn.commit()
print(str(num))
num+=1
break
|
'''
Canetti-Halevi-Katz Public Key Encryption, IBE-to-PKE transform (generic composition of IBE+signature -> PKE)
| From: "R. Canneti, S. Halevi, J. Katz: Chosen-Ciphertext Security from Identity-Based Encryption"
| Published in: CRYPTO 2004
| Available from: http://eprint.iacr.org/2003/182
| Notes:
* type: encryption (public key)
* setting: n/a --- requires a selective-ID secure IBE scheme an EU-CMA one-time signature (OTS) scheme
* assumption: n/a --- dependent on the underlying primitives
:Authors: J. Ayo Akinyele
:Date: 1/2011
'''
from charm.toolbox.PKEnc import PKEnc
from charm.toolbox.pairinggroup import PairingGroup,ZR,G1,G2,GT,pair
debug = False
class CHK04(PKEnc):
"""
>>> from charm.adapters.ibenc_adapt_identityhash import HashIDAdapter
>>> from charm.schemes.ibenc.ibenc_bb03 import IBE_BB04
>>> from charm.schemes.pksig.pksig_bls04 import IBSig
>>> group = PairingGroup('SS512')
>>> ibe = IBE_BB04(group)
>>> hash_ibe = HashIDAdapter(ibe, group)
>>> ots = IBSig(group)
>>> pkenc = CHK04(hash_ibe, ots, group)
>>> (public_key, secret_key) = pkenc.keygen(0)
>>> msg = group.random(GT)
>>> cipher_text = pkenc.encrypt(public_key, msg)
>>> decrypted_msg = pkenc.decrypt(public_key, secret_key, cipher_text)
>>> decrypted_msg == msg
True
"""
def __init__(self, ibe_scheme, ots_scheme, groupObj):
global ibe, ots, group
ibe = ibe_scheme
ots = ots_scheme
group = groupObj
def keygen(self, secparam):
# Run the IBE Setup routine to generate (mpk, msk)
(mpk, msk) = ibe.setup()
pk = { 'mpk' : mpk, 'secparam':secparam }
return (pk, msk)
def encrypt(self, pk, message):
# Generate a random keypair for the OTS
(svk, ssk) = ots.keygen(pk['secparam'])
# print("pub identity enc =>", _id)
# Encrypt message with the IBE scheme under 'identity' vk
C = ibe.encrypt(pk['mpk'],svk['identity'] , message)
# Sign the resulting ciphertext with sk
sigma = ots.sign(ssk['x'], C)
return { 'vk' : svk, 'C' : C, 'sigma' : sigma }
# NOTE: need to transform c['vk'] into a string to use as key
def decrypt(self, pk, sk, c):
# Given a ciphertext (vk, C, sigma), verify that sigma is a signature on C under public key vk
if not ots.verify(c['vk'], c['sigma'], c['C']):
return False
identity = c['vk']['identity']
# print("identity in dec =>", identity)
# Otherwise, extract an IBE key for identity 'vk' under the master secret params
dk = ibe.extract(sk, identity)
# Return the decryption of the ciphertext element "C" under key dk
return ibe.decrypt(pk, dk, c['C'])
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
import os
import re
from dataclasses import dataclass
from pathlib import PurePath
from typing import Iterable
from pants.backend.python.dependency_inference.module_mapper import module_from_stripped_path
from pants.backend.python.macros.pipenv_requirements import parse_pipenv_requirements
from pants.backend.python.macros.poetry_requirements import PyProjectToml, parse_pyproject_toml
from pants.backend.python.macros.python_requirements import (
parse_pyproject_toml as parse_pep621_pyproject_toml,
)
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import (
PexBinary,
PexEntryPointField,
PythonSourcesGeneratorTarget,
PythonTestsGeneratingSourcesField,
PythonTestsGeneratorTarget,
PythonTestUtilsGeneratingSourcesField,
PythonTestUtilsGeneratorTarget,
ResolvedPexEntryPoint,
ResolvePexEntryPointRequest,
)
from pants.base.specs import AncestorGlobSpec, RawSpecs
from pants.core.goals.tailor import (
AllOwnedSources,
PutativeTarget,
PutativeTargets,
PutativeTargetsRequest,
)
from pants.core.target_types import ResourceTarget
from pants.engine.fs import DigestContents, FileContent, PathGlobs, Paths
from pants.engine.internals.selectors import Get, MultiGet
from pants.engine.rules import collect_rules, rule
from pants.engine.target import Target, UnexpandedTargets
from pants.engine.unions import UnionRule
from pants.source.filespec import FilespecMatcher
from pants.source.source_root import SourceRootsRequest, SourceRootsResult
from pants.util.dirutil import group_by_dir
from pants.util.logging import LogLevel
from pants.util.requirements import parse_requirements_file
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class PutativePythonTargetsRequest(PutativeTargetsRequest):
pass
def classify_source_files(paths: Iterable[str]) -> dict[type[Target], set[str]]:
"""Returns a dict of target type -> files that belong to targets of that type."""
tests_filespec_matcher = FilespecMatcher(PythonTestsGeneratingSourcesField.default, ())
test_utils_filespec_matcher = FilespecMatcher(PythonTestUtilsGeneratingSourcesField.default, ())
path_to_file_name = {path: os.path.basename(path) for path in paths}
test_file_names = set(tests_filespec_matcher.matches(list(path_to_file_name.values())))
test_util_file_names = set(
test_utils_filespec_matcher.matches(list(path_to_file_name.values()))
)
test_files = {
path for path, file_name in path_to_file_name.items() if file_name in test_file_names
}
test_util_files = {
path for path, file_name in path_to_file_name.items() if file_name in test_util_file_names
}
library_files = set(paths) - test_files - test_util_files
return {
PythonTestsGeneratorTarget: test_files,
PythonTestUtilsGeneratorTarget: test_util_files,
PythonSourcesGeneratorTarget: library_files,
}
# The order "__main__" == __name__ would also technically work, but is very
# non-idiomatic, so we ignore it.
_entry_point_re = re.compile(rb"^if __name__ +== +['\"]__main__['\"]: *(#.*)?$", re.MULTILINE)
def is_entry_point(content: bytes) -> bool:
# Identify files that look like entry points. We use a regex for speed, as it will catch
# almost all correct cases in practice, with extremely rare false positives (we will only
# have a false positive if the matching code is in a multiline string indented all the way
# to the left). Looking at the ast would be more correct, technically, but also more laborious,
# trickier to implement correctly for different interpreter versions, and much slower.
return _entry_point_re.search(content) is not None
async def _find_resource_py_typed_targets(
py_typed_files_globs: PathGlobs, all_owned_sources: AllOwnedSources
) -> list[PutativeTarget]:
"""Find resource targets that may be created after discovering any `py.typed` files."""
all_py_typed_files = await Get(Paths, PathGlobs, py_typed_files_globs)
unowned_py_typed_files = set(all_py_typed_files.files) - set(all_owned_sources)
putative_targets = []
for dirname, filenames in group_by_dir(unowned_py_typed_files).items():
putative_targets.append(
PutativeTarget.for_target_type(
ResourceTarget,
kwargs={"source": "py.typed"},
path=dirname,
name="py_typed",
triggering_sources=sorted(filenames),
)
)
return putative_targets
async def _find_source_targets(
py_files_globs: PathGlobs, all_owned_sources: AllOwnedSources, python_setup: PythonSetup
) -> list[PutativeTarget]:
result = []
check_if_init_file_empty: dict[str, tuple[str, str]] = {} # full_path: (dirname, filename)
all_py_files = await Get(Paths, PathGlobs, py_files_globs)
unowned_py_files = set(all_py_files.files) - set(all_owned_sources)
classified_unowned_py_files = classify_source_files(unowned_py_files)
for tgt_type, paths in classified_unowned_py_files.items():
for dirname, filenames in group_by_dir(paths).items():
name: str | None
if issubclass(tgt_type, PythonTestsGeneratorTarget):
name = "tests"
elif issubclass(tgt_type, PythonTestUtilsGeneratorTarget):
name = "test_utils"
else:
name = None
if (
python_setup.tailor_ignore_empty_init_files
and tgt_type == PythonSourcesGeneratorTarget
and filenames in ({"__init__.py"}, {"__init__.pyi"})
):
f = next(iter(filenames))
check_if_init_file_empty[os.path.join(dirname, f)] = (dirname, f)
else:
result.append(
PutativeTarget.for_target_type(
tgt_type, path=dirname, name=name, triggering_sources=sorted(filenames)
)
)
if check_if_init_file_empty:
init_contents = await Get(DigestContents, PathGlobs(check_if_init_file_empty.keys()))
for file_content in init_contents:
if not file_content.content.strip():
continue
d, f = check_if_init_file_empty[file_content.path]
result.append(
PutativeTarget.for_target_type(
PythonSourcesGeneratorTarget, path=d, name=None, triggering_sources=[f]
)
)
return result
@rule(level=LogLevel.DEBUG, desc="Determine candidate Python targets to create")
async def find_putative_targets(
req: PutativePythonTargetsRequest,
all_owned_sources: AllOwnedSources,
python_setup: PythonSetup,
) -> PutativeTargets:
pts = []
all_py_files_globs: PathGlobs = req.path_globs("*.py", "*.pyi")
if python_setup.tailor_source_targets:
source_targets = await _find_source_targets(
all_py_files_globs, all_owned_sources, python_setup
)
pts.extend(source_targets)
if python_setup.tailor_py_typed_targets:
all_py_typed_files_globs: PathGlobs = req.path_globs("py.typed")
resource_targets = await _find_resource_py_typed_targets(
all_py_typed_files_globs, all_owned_sources
)
pts.extend(resource_targets)
if python_setup.tailor_requirements_targets:
# Find requirements files.
(
all_requirements_files,
all_pipenv_lockfile_files,
all_pyproject_toml_contents,
) = await MultiGet(
Get(DigestContents, PathGlobs, req.path_globs("*requirements*.txt")),
Get(DigestContents, PathGlobs, req.path_globs("Pipfile.lock")),
Get(DigestContents, PathGlobs, req.path_globs("pyproject.toml")),
)
def add_req_targets(files: Iterable[FileContent], alias: str, target_name: str) -> None:
contents = {i.path: i.content for i in files}
unowned_files = set(contents) - set(all_owned_sources)
for fp in unowned_files:
path, name = os.path.split(fp)
try:
validate(fp, contents[fp], alias, target_name)
except Exception as e:
logger.warning(
f"An error occurred when validating `{fp}`: {e}.\n\n"
"You'll need to create targets for its contents manually.\n"
"To silence this error in future, see "
"https://www.pantsbuild.org/docs/reference-tailor#section-ignore-paths \n"
)
continue
pts.append(
PutativeTarget(
path=path,
name=target_name,
type_alias=alias,
triggering_sources=[fp],
owned_sources=[name],
kwargs=(
{}
if alias != "python_requirements" or name == "requirements.txt"
else {"source": name}
),
)
)
def validate(path: str, contents: bytes, alias: str, target_name: str) -> None:
if alias == "python_requirements":
if path.endswith("pyproject.toml"):
return validate_pep621_requirements(path, contents)
return validate_python_requirements(path, contents)
elif alias == "pipenv_requirements":
return validate_pipenv_requirements(contents)
elif alias == "poetry_requirements":
return validate_poetry_requirements(contents)
def validate_python_requirements(path: str, contents: bytes) -> None:
for _ in parse_requirements_file(contents.decode(), rel_path=path):
pass
def validate_pep621_requirements(path: str, contents: bytes) -> None:
list(parse_pep621_pyproject_toml(contents.decode(), rel_path=path))
def validate_pipenv_requirements(contents: bytes) -> None:
parse_pipenv_requirements(contents)
def validate_poetry_requirements(contents: bytes) -> None:
p = PyProjectToml(PurePath(), PurePath(), contents.decode())
parse_pyproject_toml(p)
add_req_targets(all_requirements_files, "python_requirements", "reqs")
add_req_targets(all_pipenv_lockfile_files, "pipenv_requirements", "pipenv")
add_req_targets(
{fc for fc in all_pyproject_toml_contents if b"[tool.poetry" in fc.content},
"poetry_requirements",
"poetry",
)
def pyproject_toml_has_pep621(fc) -> bool:
try:
return (
len(list(parse_pep621_pyproject_toml(fc.content.decode(), rel_path=fc.path)))
> 0
)
except Exception:
return False
add_req_targets(
{fc for fc in all_pyproject_toml_contents if pyproject_toml_has_pep621(fc)},
"python_requirements",
"reqs",
)
if python_setup.tailor_pex_binary_targets:
# Find binary targets.
# Get all files whose content indicates that they are entry points or are __main__.py files.
digest_contents = await Get(DigestContents, PathGlobs, all_py_files_globs)
all_main_py = await Get(Paths, PathGlobs, req.path_globs("__main__.py"))
entry_points = [
file_content.path
for file_content in digest_contents
if is_entry_point(file_content.content)
] + list(all_main_py.files)
# Get the modules for these entry points.
src_roots = await Get(
SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(entry_points)
)
module_to_entry_point = {}
for entry_point in entry_points:
entry_point_path = PurePath(entry_point)
src_root = src_roots.path_to_root[entry_point_path]
stripped_entry_point = entry_point_path.relative_to(src_root.path)
module = module_from_stripped_path(stripped_entry_point)
module_to_entry_point[module] = entry_point
# Get existing binary targets for these entry points.
entry_point_dirs = {os.path.dirname(entry_point) for entry_point in entry_points}
possible_existing_binary_targets = await Get(
UnexpandedTargets,
RawSpecs(
ancestor_globs=tuple(AncestorGlobSpec(d) for d in entry_point_dirs),
description_of_origin="the `pex_binary` tailor rule",
),
)
possible_existing_binary_entry_points = await MultiGet(
Get(ResolvedPexEntryPoint, ResolvePexEntryPointRequest(t[PexEntryPointField]))
for t in possible_existing_binary_targets
if t.has_field(PexEntryPointField)
)
possible_existing_entry_point_modules = {
rep.val.module for rep in possible_existing_binary_entry_points if rep.val
}
unowned_entry_point_modules = (
module_to_entry_point.keys() - possible_existing_entry_point_modules
)
# Generate new targets for entry points that don't already have one.
for entry_point_module in unowned_entry_point_modules:
entry_point = module_to_entry_point[entry_point_module]
path, fname = os.path.split(entry_point)
name = os.path.splitext(fname)[0]
pts.append(
PutativeTarget.for_target_type(
target_type=PexBinary,
path=path,
name=name,
triggering_sources=tuple(),
kwargs={"entry_point": fname},
)
)
return PutativeTargets(pts)
def rules():
return [
*collect_rules(),
UnionRule(PutativeTargetsRequest, PutativePythonTargetsRequest),
]
|
# Módulo que contém todas as funções necessárias para criação e avaliação do K-Means
import numpy as np
import matplotlib.pyplot as plt
# Método K-Means
class KMeans():
def __init__(self, n_clusters=2, max_iter=50):
self.n_clusters = n_clusters
self.max_iter = max_iter
def euclidean_distance(self, Xi, Xj):
return np.sqrt(np.sum((Xi - Xj)**2))
#Inicializa os centróides iniciais de maneira arbitrária
def initial_centers(self, X, k):
centroids = []
n = X.shape[1]
min_ = np.min(X, axis=0)
max_ = np.max(X, axis=0)
for i in range(k):
centroids.append(np.random.uniform(min_, max_, n))
return np.array(centroids)
#Calcula o índice do centroid mais próximo para cada ponto do dataset
def nearest_centroids(self, X, cluster_centers):
nearest_indexes = []
for i in range(X.shape[0]):
dist = [self.euclidean_distance(X[i], center) for center in cluster_centers]
nearest_index = [index for index, val in enumerate(dist) if val==min(dist)]
nearest_indexes.append(nearest_index[0])
return nearest_indexes
#Soma das distâncias quadradas das amostras para o centro do cluster mais próximo
def inertia(self, X, cluster_centers, nearest_indexes):
return np.sum([self.euclidean_distance(X[i], cluster_centers[nearest_indexes[i]])**2 for i in range(0, len(X))])
# Atualiza os centroids
def update_centroids(self, X, nearest_indexes):
D = max(np.unique(nearest_indexes)) + 1 # Dimensão
sum_ = np.zeros((D, X.shape[1]))
total = np.zeros(D)
for i in range(0, len(X)):
sum_[nearest_indexes[i]] += X[i]
total[nearest_indexes[i]] += 1
cluster_centers = [np.divide(sum_[i], total[i], where=sum_[i] != 0) for i in range(0, D)]
return np.array(cluster_centers)
# Calcula os centros dos clusters e prediz o índice dos clusters para cada amostra
def fit_predict(self, X):
# Inicializa os centróides
cluster_centers = self.initial_centers(X, self.n_clusters)
# Computa o cluster de cada amostra
cluster_indexes = self.nearest_centroids(X, cluster_centers)
# Calcula a inércia inicial
init_inertia = self.inertia(X, cluster_centers, cluster_indexes)
for i in range(0, self.max_iter):
cluster_centers = self.update_centroids(X, cluster_indexes)
cluster_indexes = self.nearest_centroids(X, cluster_centers)
inertia_ = self.inertia(X, cluster_centers, cluster_indexes)
if(init_inertia == inertia_):
break
else:
init_inertia = inertia_
# self.model = {'cluster_centers': cluster_centers, 'labels': cluster_indexes, 'inertia': inertia}
self.cluster_centers_ = cluster_centers
self.labels_ = cluster_indexes
self.inertia_ = inertia_
# def fit_predict(self, X):
# self.fit(X)
# return self.model
def kmeans_elbow_visualizer(X, k_range=[2], max_iter=50):
inertia_list = []
fig, axs = plt.subplots(2, 2, sharex=True, sharey=True, figsize=(16,12))
for i, K in enumerate(k_range):
kmeans = KMeans(n_clusters=K, max_iter=max_iter)
kmeans.fit_predict(X)
labels = np.unique(kmeans.labels_)
y_kmeans = kmeans.labels_
inertia_list.append(kmeans.inertia_)
colors = plt.cm.Set1(np.linspace(0, 0.9, labels.shape[0]))
axs.flat[i].set_title("K = {}; Inércia = {}".format(K, kmeans.inertia_), fontsize=14)
for j, label in enumerate(labels):
axs.flat[i].scatter(X[y_kmeans==label, 0], X[y_kmeans==label, 1], s=100, marker='o', color=colors[j])
# for k in range(K):
axs.flat[i].scatter(kmeans.cluster_centers_[:, 0], kmeans.cluster_centers_[:, 1], s=100, facecolors='w', edgecolors='k', linewidth=3)
axs.flat[i].scatter(kmeans.cluster_centers_[:, 0], kmeans.cluster_centers_[:, 1], s=20, color='k')
# axs.flat[i].set_xlim(xlim)
# axs.flat[i].set_ylim(ylim)
plt.figure(figsize=(10, 5))
plt.rcParams.update({'font.size': 14})
plt.plot(k_range, inertia_list, '-k', color='firebrick')
plt.xlabel('Número de clusters (k)', fontsize=14)
plt.ylabel('Distâncias', fontsize=14)
plt.show()
|
from django.urls import path
from . import views
app_name = "poll"
urlpatterns = [
path('',views.PollListView.as_view(),name="poll_list"),
path('<int:pk>/',views.PollDetailView.as_view(),name="poll_detail"),
path('vote/<int:question_id>/',views.vote,name="vote"),
path('result/<int:pk>/',views.PollResultView.as_view(),name="poll_result"),
]
|
def search(nums, target):
start = 0
end = len(nums) - 1
while start <= end:
mid = (start + end) // 2
if nums[mid] == target:
return nums[mid]
if nums[mid] > target:
end = mid - 1
else:
start = mid + 1
if abs(nums[start] - target) < abs(nums[end] - target):
return nums[start]
else:
return nums[end]
print(search([10, 15, 20, 35, 60, 70, 100], 45))
|
#1548 Fila do recreio´
T = int(input())
while (T > 0):
alunos = int(input())
notas = input().split()
for id, i in enumerate(notas):
notas[id] = int(notas[id])
tot = 0
notas_ord = sorted(notas) # ordenar as notas
notas_ord.reverse()
for id, i in enumerate(notas):
if (notas[id] == notas_ord[id]):
tot = tot + 1
print(tot)
T = (T - 1)
|
from flask_sqlalchemy import SQLAlchemy
from flask import Flask, render_template, flash, request, redirect, url_for, logging, session
from wtforms import Form, StringField, TextAreaField, PasswordField, IntegerField, validators
from passlib.hash import sha256_crypt
from functools import wraps
# Config Application
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:123456@127.0.0.1/sdsc5003'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = 'False'
db = SQLAlchemy(app)
connection = db.engine.raw_connection()
cur = connection.cursor()
# Create registration form
class RegisterForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email', [validators.Length(min=6, max=50)])
password = PasswordField('Password', [validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords do not match.')])
confirm = PasswordField('Confirm password')
# Registration Page
@app.route('/register.html', methods=['GET', 'POST'])
def register():
form = RegisterForm(request.form)
if request.method == 'POST' and form.validate():
username = form.username.data
email = form.email.data
password = sha256_crypt.encrypt(str(form.password.data))
# Execute MySQL
cur.execute("SELECT * FROM users WHERE username = %s;", username)
result = cur.fetchone()
if result is not None:
flash('Username Existed.', 'danger')
return render_template('register.html', form=form)
db.engine.execute("INSERT INTO users(username, email, password) VALUES(%s, %s, %s);",
(username, email, password))
flash('Registered Successfully. Please login.', 'success')
return redirect(url_for('login'))
return render_template('register.html', form=form)
# Login Page
@app.route('/', methods=['GET', 'POST'])
def login():
session['logged_in'] = False
if request.method == 'POST':
username = request.form['username']
password_candidate = request.form['password']
cur.execute("SELECT * FROM users WHERE username = %s;", username)
result = cur.fetchone()
print(result)
if result is not None:
# Get stored has
password = result[3]
# Compare Passwords
if sha256_crypt.verify(password_candidate, password):
session['logged_in'] = True
session['username'] = username
return redirect(url_for('index'))
else:
flash('Wrong Password.', 'danger')
else:
flash('Wrong Username.', 'danger')
return render_template('login.html')
# Forget Password Page
@app.route('/forgot-password.html')
def forgot_password():
return render_template('forgot-password.html')
def is_logged_in(f):
@wraps(f)
def wrap(*args, **kwargs):
if session['logged_in']:
return f(*args, **kwargs)
else:
flash('Unauthorized, Please Login!', 'danger')
return redirect(url_for('login'))
return wrap
# Index
@app.route('/index.html')
@is_logged_in
def index():
return render_template('index.html')
# hrForm Class, Please add more attributes here! including dept's attr and contract's attr
class HRForm(Form):
eid = IntegerField('eid', [validators.DataRequired()])
ename = StringField('Name', [validators.Length(min=1)])
gender = StringField('Gender', [validators.Length(min=1)])
age = IntegerField('Age', [validators.DataRequired()])
salary = IntegerField('Salary', [validators.DataRequired()])
department = StringField('Department', [validators.Length(min=1)])
# Recruitment Page (for addition and deletion of employees)
@app.route('/recruitment.html', methods=['GET', 'POST'])
@is_logged_in
def recruitment():
cur = db.engine.raw_connection().cursor()
cur.execute("SELECT * FROM employees;")
employees = cur.fetchall()
return render_template('recruitment.html', employees=employees)
# Emp Addition
@app.route('/add_employee', methods=['POST'])
@is_logged_in
def add_employee():
form = HRForm(request.form)
if request.method == 'POST' and form.validate():
eid = form.eid.data
name = form.ename.data
gender = form.gender.data
age = form.age.data
salary = form.salary.data
department = form.department.data
try:
db.engine.execute("INSERT INTO employees(eid, name, gender, age, salary, department) VALUES (%s, %s, %s, %s, %s, %s)",
(eid, name, gender, age, salary, department))
except:
flash('Insert Unsuccessfully, Please Check Again!', 'danger')
return redirect(url_for('recruitment'))
flash('Insert Successfully!', 'success')
return redirect(url_for('recruitment'))
return render_template('recruitment.html', form=form)
# Emp Deletion
@app.route('/recruitment.html/<string:eid>', methods=['GET', 'POST'])
@is_logged_in
def delete_employee(eid):
if request.method == 'POST':
db.engine.execute("DELETE FROM employees WHERE eid = %s", [eid])
flash('Delete Successfully!', 'success')
return redirect(url_for('recruitment'))
# Employee Page (Blue One)
@app.route('/employee.html', methods=['GET', 'POST'])
@is_logged_in
def employee():
cur = db.engine.raw_connection().cursor()
cur.execute("SELECT * FROM employees;")
employees = cur.fetchall()
return render_template('employee.html', employees=employees)
# Selection of Employee
@app.route('/search_employee', methods=['POST', 'GET'])
@is_logged_in
def search_employee():
form = HRForm(request.form)
eid = form.eid.data
name = form.ename.data
print(name)
gender = form.gender.data
age = form.age.data
salary = form.salary.data
department = form.department.data
print(department)
cur = db.engine.raw_connection().cursor()
if eid is not None:
cur.execute("select * from employees where eid =%d;" % eid)
else:
cur.execute("select * from employees \
where name like '%%%s%%'\
and gender like '%%%s%%'\
and department like '%%%s%%';"
% (name, gender, department))
employees = cur.fetchall()
flash('Search Successfully!', 'success')
return render_template('employee.html', employees=employees)
# Department Page (Yellow One)
@app.route('/department.html', methods=['GET', 'POST'])
@is_logged_in
def department():
return render_template('department.html')
if __name__ == "__main__":
app.secret_key = '123'
app.run(debug=True)
print("Server is running...")
# @app.route('/charts.html')
# @is_logged_in
# def charts():
# return render_template('charts.html')
# @app.route('/tables.html')
# @is_logged_in
# def tables():
# return render_template('tables.html')
# # Article Form Class
# class ArticleForm(Form):
# title = StringField('Title', [validators.Length(min=1)])
# body = TextAreaField('Body', [validators.Length(min=1)])
#
#
# @app.route('/table1.html', methods=['GET', 'POST'])
# @is_logged_in
# def table1():
# cur = db.engine.raw_connection().cursor()
# cur.execute("SELECT * FROM articles;")
# articles = cur.fetchall()
# return render_template('table1.html', articles=articles)
#
#
# @app.route('/add_article', methods=['POST'])
# @is_logged_in
# def add_article():
# form = ArticleForm(request.form)
# if request.method == 'POST' and form.validate():
# title = form.title.data
# body = form.body.data
# db.engine.execute("INSERT INTO articles(title, body, transactor) VALUES (%s, %s, %s)",
# (title, body, session['username']))
# flash('Insert Successfully!', 'success')
# return redirect(url_for('table1'))
# return render_template('table1.html', form=form)
|
from __future__ import division, print_function
import random
import unittest
import numpy
# noinspection PyUnresolvedReferences
from six.moves import range
from smqtk.utils import bit_utils
class TestBitUtils (unittest.TestCase):
def test_int_to_bit_vector_large_0(self):
# Need at least one bit to represent 0.
numpy.testing.assert_array_equal(
bit_utils.int_to_bit_vector_large(0),
[False]
)
# Force 5 bits.
numpy.testing.assert_array_equal(
bit_utils.int_to_bit_vector_large(0, 5),
[False, False, False, False, False]
)
def test_int_to_bit_vector_large_1(self):
numpy.testing.assert_array_equal(
bit_utils.int_to_bit_vector_large(1),
[True]
)
numpy.testing.assert_array_equal(
bit_utils.int_to_bit_vector_large(1, 7),
([False] * 6) + [True]
)
def test_int_to_bit_vector_large_large(self):
# Try large integer bit vectors
int_val = (2**256) - 1
expected_vector = [True] * 256
numpy.testing.assert_array_equal(
bit_utils.int_to_bit_vector_large(int_val),
expected_vector
)
int_val = (2**512)
expected_vector = [True] + ([False] * 512)
numpy.testing.assert_array_equal(
bit_utils.int_to_bit_vector_large(int_val),
expected_vector
)
def test_int_to_bit_vector_large_invalid_bits(self):
# Cannot represent 5 in binary using 1 bit.
self.assertRaises(
ValueError,
bit_utils.int_to_bit_vector_large,
5, 1
)
def test_popcount(self):
self.assertEqual(bit_utils.popcount(1), 1)
self.assertEqual(bit_utils.popcount(2), 1)
self.assertEqual(bit_utils.popcount(3), 2)
self.assertEqual(bit_utils.popcount(2 ** 16), 1)
self.assertEqual(bit_utils.popcount(2**16 - 1), 16)
self.assertEqual(bit_utils.popcount(2 ** 32), 1)
self.assertEqual(bit_utils.popcount(2 ** 32 - 1), 32)
def test_popcount_0(self):
self.assertEqual(bit_utils.popcount(0), 0)
def test_popcount_limits(self):
# Make sure documented integer limit is truthful.
c = 10000
for _ in range(c):
# noinspection PyUnresolvedReferences
v = random.randint(0, bit_utils.popcount.v_max)
# Known method to always work based on counting python's binary
# string representation.
v_bin_count = bin(v).count('1')
# Test method
v_pop_count = bit_utils.popcount(v)
self.assertEqual(v_pop_count, v_bin_count,
'popcount failed for integer %d' % v)
|
"""Unit test for GravitySpy
"""
__author__ = 'Scott Coughlin <scott.coughlin@ligo.org>'
import os
import unittest2
class GravitySpyTests(unittest2.TestCase):
"""`TestCase` for the GravitySpy
"""
def test_api(self):
self.assertEqual(1,1)
|
from clang.cindex import TranslationUnit, Cursor, CursorKind
from ipc_parser import parse_file
# --------------------------------------------------------------------------- #
class message:
def __init__(self, facade, method):
self.facade = facade
self.method = method
def args(method):
return method.args or []
def methods(struct):
return struct.methods or []
def is_facade(struct):
"""
Returns whether the struct name ends with the word `facade`.
This function works with both snake_case and CamelCase patterns.
"""
return re.match(r'.*(?:_f|F)acade$', struct.name) is not None
def is_listener(struct):
"""
Returns whether the struct name either starts with `on` or ends with
`listener`.
This function works with both snake_case and CamelCase patterns.
"""
matches = re.match(r'(?:^[oO]n[_A-Z].*|.*(?:_l|L)istener$)', struct.name)
return matches is not None
# parse all structs
# find facades
# for each facade genrate_boilerplate(facade, method)
def generate_boilerplate(facade: Struct):
def main():
file = sys.argv[1] if len(sys.argv) >= 2 else 'example/mouse_facade.hpp'
parser_result = parse_file(file)
structs = parse_file(file)['index'].values()
facades = filter(is_facade, structs)
messages = [message(f, m) for f in facades for m in methods(f)]
print(message)
if __name__ == '__main__':
main()
|
from random import randint
import pyperclip
import sys
path = 'C:\\Users\Martin\Documents\\bombpartycheat\\top10000'
wordlistone = open(path)
stringone = wordlistone.read()
def complexity(word):
points = 0
letters = ['w','k','j','q','b','g']
for letter in letters:
if letter in word:
points = points + 1
if len(word) >5: points = points + 1
if len(word) >8: points = points + 1
if len(word) >12: points = points + 1
# print (f"word: {word} points: {points}")
return points
while True:
arrayOfWords = []
letters = input("Please enter part of a word: ")
for word in stringone.splitlines():
if letters in word:
arrayOfWords.append(word)
matchingWord = max(arrayOfWords, key=complexity)
# matchingWord = arrayOfWords[randint(0,len(arrayOfWords))]
pyperclip.copy(matchingWord)
print (matchingWord)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\QC\Desktop\IonTrap-WIPM-master\IonTrap-WIPM-master\GUI_Material\QC2_0TEST.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(539, 822)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
MainWindow.setFont(font)
MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight)
MainWindow.setIconSize(QtCore.QSize(30, 30))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.line = QtWidgets.QFrame(self.centralwidget)
self.line.setGeometry(QtCore.QRect(50, 80, 441, 20))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label_20 = QtWidgets.QLabel(self.centralwidget)
self.label_20.setGeometry(QtCore.QRect(50, 20, 491, 61))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_20.setFont(font)
self.label_20.setObjectName("label_20")
self.label_10 = QtWidgets.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(60, 430, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.label_33 = QtWidgets.QLabel(self.centralwidget)
self.label_33.setGeometry(QtCore.QRect(50, 100, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
self.label_33.setFont(font)
self.label_33.setObjectName("label_33")
self.radioButton_rabi = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_rabi.setGeometry(QtCore.QRect(350, 490, 151, 19))
self.radioButton_rabi.setObjectName("radioButton_rabi")
self.radioButton_zeeman = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_zeeman.setGeometry(QtCore.QRect(350, 550, 141, 19))
self.radioButton_zeeman.setObjectName("radioButton_zeeman")
self.radioButton_cust = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_cust.setGeometry(QtCore.QRect(350, 610, 115, 19))
self.radioButton_cust.setObjectName("radioButton_cust")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(370, 680, 101, 51))
self.pushButton.setObjectName("pushButton")
self.radioButton_off = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_off.setGeometry(QtCore.QRect(350, 640, 115, 19))
self.radioButton_off.setChecked(True)
self.radioButton_off.setObjectName("radioButton_off")
self.Setting = QtWidgets.QTabWidget(self.centralwidget)
self.Setting.setGeometry(QtCore.QRect(50, 150, 441, 271))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Setting.setFont(font)
self.Setting.setObjectName("Setting")
self.DPL_Cooling = QtWidgets.QWidget()
self.DPL_Cooling.setObjectName("DPL_Cooling")
self.doubleSpinBox_DPL = QtWidgets.QDoubleSpinBox(self.DPL_Cooling)
self.doubleSpinBox_DPL.setGeometry(QtCore.QRect(120, 20, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_DPL.setFont(font)
self.doubleSpinBox_DPL.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_DPL.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_DPL.setMaximum(999999.99)
self.doubleSpinBox_DPL.setObjectName("doubleSpinBox_DPL")
self.label_36 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_36.setGeometry(QtCore.QRect(200, 20, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_36.setFont(font)
self.label_36.setObjectName("label_36")
self.label_37 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_37.setGeometry(QtCore.QRect(20, 20, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_37.setFont(font)
self.label_37.setObjectName("label_37")
self.Laser_397_1_DPL = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_1_DPL.setGeometry(QtCore.QRect(140, 120, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_1_DPL.setFont(font)
self.Laser_397_1_DPL.setChecked(True)
self.Laser_397_1_DPL.setObjectName("Laser_397_1_DPL")
self.Laser_397_2_DPL = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_2_DPL.setGeometry(QtCore.QRect(210, 120, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_2_DPL.setFont(font)
self.Laser_397_2_DPL.setChecked(True)
self.Laser_397_2_DPL.setObjectName("Laser_397_2_DPL")
self.Laser_397_3_DPL = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_3_DPL.setGeometry(QtCore.QRect(280, 120, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_3_DPL.setFont(font)
self.Laser_397_3_DPL.setChecked(True)
self.Laser_397_3_DPL.setObjectName("Laser_397_3_DPL")
self.Laser_397_main_DPL = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_main_DPL.setGeometry(QtCore.QRect(20, 120, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_main_DPL.setFont(font)
self.Laser_397_main_DPL.setChecked(True)
self.Laser_397_main_DPL.setObjectName("Laser_397_main_DPL")
self.Laser_866_DPL = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_866_DPL.setGeometry(QtCore.QRect(20, 150, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_866_DPL.setFont(font)
self.Laser_866_DPL.setChecked(True)
self.Laser_866_DPL.setObjectName("Laser_866_DPL")
self.Laser_854_DPL = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_854_DPL.setGeometry(QtCore.QRect(20, 180, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_854_DPL.setFont(font)
self.Laser_854_DPL.setChecked(True)
self.Laser_854_DPL.setObjectName("Laser_854_DPL")
self.line_2 = QtWidgets.QFrame(self.DPL_Cooling)
self.line_2.setGeometry(QtCore.QRect(10, 60, 411, 20))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.label_42 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_42.setGeometry(QtCore.QRect(20, 80, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_42.setFont(font)
self.label_42.setObjectName("label_42")
self.line_4 = QtWidgets.QFrame(self.DPL_Cooling)
self.line_4.setGeometry(QtCore.QRect(100, 120, 20, 81))
self.line_4.setFrameShape(QtWidgets.QFrame.VLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.label_44 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_44.setGeometry(QtCore.QRect(140, 150, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_44.setFont(font)
self.label_44.setObjectName("label_44")
self.label_45 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_45.setGeometry(QtCore.QRect(140, 180, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_45.setFont(font)
self.label_45.setObjectName("label_45")
self.Setting.addTab(self.DPL_Cooling, "")
self.tab_6 = QtWidgets.QWidget()
self.tab_6.setObjectName("tab_6")
self.label_41 = QtWidgets.QLabel(self.tab_6)
self.label_41.setGeometry(QtCore.QRect(20, 20, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_41.setFont(font)
self.label_41.setObjectName("label_41")
self.label_40 = QtWidgets.QLabel(self.tab_6)
self.label_40.setGeometry(QtCore.QRect(180, 20, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_40.setFont(font)
self.label_40.setObjectName("label_40")
self.doubleSpinBox_OP = QtWidgets.QDoubleSpinBox(self.tab_6)
self.doubleSpinBox_OP.setGeometry(QtCore.QRect(100, 20, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_OP.setFont(font)
self.doubleSpinBox_OP.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_OP.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_OP.setMaximum(999999.99)
self.doubleSpinBox_OP.setObjectName("doubleSpinBox_OP")
self.Laser_729_1_OP = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_1_OP.setGeometry(QtCore.QRect(140, 170, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_1_OP.setFont(font)
self.Laser_729_1_OP.setObjectName("Laser_729_1_OP")
self.Laser_729_3_OP = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_3_OP.setGeometry(QtCore.QRect(280, 170, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_3_OP.setFont(font)
self.Laser_729_3_OP.setObjectName("Laser_729_3_OP")
self.Laser_729_2_OP = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_2_OP.setGeometry(QtCore.QRect(210, 170, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_2_OP.setFont(font)
self.Laser_729_2_OP.setObjectName("Laser_729_2_OP")
self.Laser_729_4_OP = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_4_OP.setGeometry(QtCore.QRect(350, 170, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_4_OP.setFont(font)
self.Laser_729_4_OP.setObjectName("Laser_729_4_OP")
self.Laser_854_OP = QtWidgets.QCheckBox(self.tab_6)
self.Laser_854_OP.setGeometry(QtCore.QRect(20, 200, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_854_OP.setFont(font)
self.Laser_854_OP.setCheckable(True)
self.Laser_854_OP.setChecked(True)
self.Laser_854_OP.setObjectName("Laser_854_OP")
self.line_3 = QtWidgets.QFrame(self.tab_6)
self.line_3.setGeometry(QtCore.QRect(10, 110, 411, 20))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.Laser_729_main_OP = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_main_OP.setGeometry(QtCore.QRect(20, 170, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_main_OP.setFont(font)
self.Laser_729_main_OP.setChecked(True)
self.Laser_729_main_OP.setObjectName("Laser_729_main_OP")
self.label_43 = QtWidgets.QLabel(self.tab_6)
self.label_43.setGeometry(QtCore.QRect(20, 130, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_43.setFont(font)
self.label_43.setObjectName("label_43")
self.line_5 = QtWidgets.QFrame(self.tab_6)
self.line_5.setGeometry(QtCore.QRect(100, 170, 20, 51))
self.line_5.setFrameShape(QtWidgets.QFrame.VLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.label_46 = QtWidgets.QLabel(self.tab_6)
self.label_46.setGeometry(QtCore.QRect(140, 200, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_46.setFont(font)
self.label_46.setObjectName("label_46")
self.doubleSpinBox_SB = QtWidgets.QDoubleSpinBox(self.tab_6)
self.doubleSpinBox_SB.setGeometry(QtCore.QRect(320, 20, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_SB.setFont(font)
self.doubleSpinBox_SB.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_SB.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_SB.setMaximum(999999.99)
self.doubleSpinBox_SB.setObjectName("doubleSpinBox_SB")
self.label_53 = QtWidgets.QLabel(self.tab_6)
self.label_53.setGeometry(QtCore.QRect(400, 20, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_53.setFont(font)
self.label_53.setObjectName("label_53")
self.label_54 = QtWidgets.QLabel(self.tab_6)
self.label_54.setGeometry(QtCore.QRect(240, 20, 81, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_54.setFont(font)
self.label_54.setObjectName("label_54")
self.doubleSpinBox_OPSBrepeat = QtWidgets.QDoubleSpinBox(self.tab_6)
self.doubleSpinBox_OPSBrepeat.setGeometry(QtCore.QRect(100, 70, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_OPSBrepeat.setFont(font)
self.doubleSpinBox_OPSBrepeat.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_OPSBrepeat.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_OPSBrepeat.setMaximum(999999.99)
self.doubleSpinBox_OPSBrepeat.setObjectName("doubleSpinBox_OPSBrepeat")
self.label_56 = QtWidgets.QLabel(self.tab_6)
self.label_56.setGeometry(QtCore.QRect(180, 70, 41, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_56.setFont(font)
self.label_56.setObjectName("label_56")
self.label_57 = QtWidgets.QLabel(self.tab_6)
self.label_57.setGeometry(QtCore.QRect(20, 70, 81, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_57.setFont(font)
self.label_57.setObjectName("label_57")
self.Setting.addTab(self.tab_6, "")
self.tab_7 = QtWidgets.QWidget()
self.tab_7.setObjectName("tab_7")
self.Laser_729_4_opreating = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_4_opreating.setGeometry(QtCore.QRect(350, 120, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_4_opreating.setFont(font)
self.Laser_729_4_opreating.setObjectName("Laser_729_4_opreating")
self.Laser_729_main_opreating = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_main_opreating.setGeometry(QtCore.QRect(20, 120, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_main_opreating.setFont(font)
self.Laser_729_main_opreating.setChecked(True)
self.Laser_729_main_opreating.setObjectName("Laser_729_main_opreating")
self.Laser_729_1_opreating = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_1_opreating.setGeometry(QtCore.QRect(140, 120, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_1_opreating.setFont(font)
self.Laser_729_1_opreating.setObjectName("Laser_729_1_opreating")
self.Laser_854_opreating = QtWidgets.QCheckBox(self.tab_7)
self.Laser_854_opreating.setGeometry(QtCore.QRect(20, 150, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_854_opreating.setFont(font)
self.Laser_854_opreating.setChecked(True)
self.Laser_854_opreating.setObjectName("Laser_854_opreating")
self.line_6 = QtWidgets.QFrame(self.tab_7)
self.line_6.setGeometry(QtCore.QRect(100, 120, 20, 51))
self.line_6.setFrameShape(QtWidgets.QFrame.VLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.Laser_729_2_opreating = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_2_opreating.setGeometry(QtCore.QRect(210, 120, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_2_opreating.setFont(font)
self.Laser_729_2_opreating.setObjectName("Laser_729_2_opreating")
self.label_47 = QtWidgets.QLabel(self.tab_7)
self.label_47.setGeometry(QtCore.QRect(180, 20, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_47.setFont(font)
self.label_47.setObjectName("label_47")
self.doubleSpinBox_opreating = QtWidgets.QDoubleSpinBox(self.tab_7)
self.doubleSpinBox_opreating.setGeometry(QtCore.QRect(100, 20, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_opreating.setFont(font)
self.doubleSpinBox_opreating.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_opreating.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_opreating.setMaximum(999999.99)
self.doubleSpinBox_opreating.setObjectName("doubleSpinBox_opreating")
self.Laser_729_3_opreating = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_3_opreating.setGeometry(QtCore.QRect(280, 120, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_3_opreating.setFont(font)
self.Laser_729_3_opreating.setObjectName("Laser_729_3_opreating")
self.label_48 = QtWidgets.QLabel(self.tab_7)
self.label_48.setGeometry(QtCore.QRect(20, 20, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_48.setFont(font)
self.label_48.setObjectName("label_48")
self.label_49 = QtWidgets.QLabel(self.tab_7)
self.label_49.setGeometry(QtCore.QRect(140, 150, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_49.setFont(font)
self.label_49.setObjectName("label_49")
self.label_50 = QtWidgets.QLabel(self.tab_7)
self.label_50.setGeometry(QtCore.QRect(20, 80, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_50.setFont(font)
self.label_50.setObjectName("label_50")
self.line_7 = QtWidgets.QFrame(self.tab_7)
self.line_7.setGeometry(QtCore.QRect(10, 60, 411, 20))
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.Setting.addTab(self.tab_7, "")
self.tab_8 = QtWidgets.QWidget()
self.tab_8.setObjectName("tab_8")
self.doubleSpinBox_PMT = QtWidgets.QDoubleSpinBox(self.tab_8)
self.doubleSpinBox_PMT.setGeometry(QtCore.QRect(100, 20, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_PMT.setFont(font)
self.doubleSpinBox_PMT.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_PMT.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_PMT.setMaximum(999999.99)
self.doubleSpinBox_PMT.setObjectName("doubleSpinBox_PMT")
self.label_51 = QtWidgets.QLabel(self.tab_8)
self.label_51.setGeometry(QtCore.QRect(20, 20, 81, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.label_51.setFont(font)
self.label_51.setObjectName("label_51")
self.label_52 = QtWidgets.QLabel(self.tab_8)
self.label_52.setGeometry(QtCore.QRect(180, 20, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_52.setFont(font)
self.label_52.setObjectName("label_52")
self.Setting.addTab(self.tab_8, "")
self.tab_9 = QtWidgets.QWidget()
self.tab_9.setObjectName("tab_9")
self.label_55 = QtWidgets.QLabel(self.tab_9)
self.label_55.setGeometry(QtCore.QRect(30, 20, 341, 81))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_55.setFont(font)
self.label_55.setObjectName("label_55")
self.Setting.addTab(self.tab_9, "")
self.tabWidget_2 = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget_2.setGeometry(QtCore.QRect(50, 480, 281, 271))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.tabWidget_2.setFont(font)
self.tabWidget_2.setIconSize(QtCore.QSize(20, 20))
self.tabWidget_2.setUsesScrollButtons(False)
self.tabWidget_2.setObjectName("tabWidget_2")
self.tab_3 = QtWidgets.QWidget()
self.tab_3.setObjectName("tab_3")
self.label_18 = QtWidgets.QLabel(self.tab_3)
self.label_18.setGeometry(QtCore.QRect(200, 190, 41, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_18.setFont(font)
self.label_18.setObjectName("label_18")
self.label_5 = QtWidgets.QLabel(self.tab_3)
self.label_5.setGeometry(QtCore.QRect(200, 35, 31, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.doubleSpinBox_rabistep = QtWidgets.QDoubleSpinBox(self.tab_3)
self.doubleSpinBox_rabistep.setGeometry(QtCore.QRect(120, 130, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_rabistep.setFont(font)
self.doubleSpinBox_rabistep.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_rabistep.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_rabistep.setMaximum(999999.99)
self.doubleSpinBox_rabistep.setObjectName("doubleSpinBox_rabistep")
self.label_9 = QtWidgets.QLabel(self.tab_3)
self.label_9.setGeometry(QtCore.QRect(20, 190, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.doubleSpinBox_rabistart = QtWidgets.QDoubleSpinBox(self.tab_3)
self.doubleSpinBox_rabistart.setGeometry(QtCore.QRect(120, 30, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_rabistart.setFont(font)
self.doubleSpinBox_rabistart.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_rabistart.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_rabistart.setMaximum(999999.99)
self.doubleSpinBox_rabistart.setObjectName("doubleSpinBox_rabistart")
self.label_7 = QtWidgets.QLabel(self.tab_3)
self.label_7.setGeometry(QtCore.QRect(200, 136, 31, 20))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_4 = QtWidgets.QLabel(self.tab_3)
self.label_4.setGeometry(QtCore.QRect(20, 130, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_6 = QtWidgets.QLabel(self.tab_3)
self.label_6.setGeometry(QtCore.QRect(200, 86, 31, 20))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.doubleSpinBox_rabiend = QtWidgets.QDoubleSpinBox(self.tab_3)
self.doubleSpinBox_rabiend.setGeometry(QtCore.QRect(120, 80, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_rabiend.setFont(font)
self.doubleSpinBox_rabiend.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_rabiend.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_rabiend.setMaximum(999999.99)
self.doubleSpinBox_rabiend.setObjectName("doubleSpinBox_rabiend")
self.spinBox_rabirepeat = QtWidgets.QSpinBox(self.tab_3)
self.spinBox_rabirepeat.setGeometry(QtCore.QRect(120, 190, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.spinBox_rabirepeat.setFont(font)
self.spinBox_rabirepeat.setObjectName("spinBox_rabirepeat")
self.label_2 = QtWidgets.QLabel(self.tab_3)
self.label_2.setGeometry(QtCore.QRect(20, 35, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.tab_3)
self.label_3.setGeometry(QtCore.QRect(20, 80, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.tabWidget_2.addTab(self.tab_3, "")
self.tab_4 = QtWidgets.QWidget()
self.tab_4.setObjectName("tab_4")
self.label_19 = QtWidgets.QLabel(self.tab_4)
self.label_19.setGeometry(QtCore.QRect(220, 190, 41, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
self.spinBox_zeemanrepeat = QtWidgets.QSpinBox(self.tab_4)
self.spinBox_zeemanrepeat.setGeometry(QtCore.QRect(140, 190, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.spinBox_zeemanrepeat.setFont(font)
self.spinBox_zeemanrepeat.setObjectName("spinBox_zeemanrepeat")
self.label_12 = QtWidgets.QLabel(self.tab_4)
self.label_12.setGeometry(QtCore.QRect(220, 130, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_12.setFont(font)
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.tab_4)
self.label_13.setGeometry(QtCore.QRect(20, 190, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.label_15 = QtWidgets.QLabel(self.tab_4)
self.label_15.setGeometry(QtCore.QRect(20, 130, 141, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_15.setFont(font)
self.label_15.setObjectName("label_15")
self.doubleSpinBox_zeemanend = QtWidgets.QDoubleSpinBox(self.tab_4)
self.doubleSpinBox_zeemanend.setGeometry(QtCore.QRect(140, 80, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_zeemanend.setFont(font)
self.doubleSpinBox_zeemanend.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_zeemanend.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_zeemanend.setMaximum(999999.99)
self.doubleSpinBox_zeemanend.setObjectName("doubleSpinBox_zeemanend")
self.doubleSpinBox_zeemanstep = QtWidgets.QDoubleSpinBox(self.tab_4)
self.doubleSpinBox_zeemanstep.setGeometry(QtCore.QRect(140, 130, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_zeemanstep.setFont(font)
self.doubleSpinBox_zeemanstep.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_zeemanstep.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_zeemanstep.setMaximum(999999.99)
self.doubleSpinBox_zeemanstep.setObjectName("doubleSpinBox_zeemanstep")
self.label_14 = QtWidgets.QLabel(self.tab_4)
self.label_14.setGeometry(QtCore.QRect(20, 80, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.doubleSpinBox_zeemanstart = QtWidgets.QDoubleSpinBox(self.tab_4)
self.doubleSpinBox_zeemanstart.setGeometry(QtCore.QRect(140, 30, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_zeemanstart.setFont(font)
self.doubleSpinBox_zeemanstart.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_zeemanstart.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_zeemanstart.setMaximum(999999.99)
self.doubleSpinBox_zeemanstart.setObjectName("doubleSpinBox_zeemanstart")
self.label_16 = QtWidgets.QLabel(self.tab_4)
self.label_16.setGeometry(QtCore.QRect(220, 30, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_16.setFont(font)
self.label_16.setObjectName("label_16")
self.label_17 = QtWidgets.QLabel(self.tab_4)
self.label_17.setGeometry(QtCore.QRect(220, 80, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_17.setFont(font)
self.label_17.setObjectName("label_17")
self.label_11 = QtWidgets.QLabel(self.tab_4)
self.label_11.setGeometry(QtCore.QRect(20, 30, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_11.setFont(font)
self.label_11.setObjectName("label_11")
self.tabWidget_2.addTab(self.tab_4, "")
self.tab_5 = QtWidgets.QWidget()
self.tab_5.setObjectName("tab_5")
self.label_28 = QtWidgets.QLabel(self.tab_5)
self.label_28.setGeometry(QtCore.QRect(180, 30, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_28.setFont(font)
self.label_28.setObjectName("label_28")
self.label_21 = QtWidgets.QLabel(self.tab_5)
self.label_21.setGeometry(QtCore.QRect(20, 80, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.doubleSpinBox_custTime = QtWidgets.QDoubleSpinBox(self.tab_5)
self.doubleSpinBox_custTime.setGeometry(QtCore.QRect(100, 80, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_custTime.setFont(font)
self.doubleSpinBox_custTime.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_custTime.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_custTime.setMaximum(999999.99)
self.doubleSpinBox_custTime.setObjectName("doubleSpinBox_custTime")
self.doubleSpinBox_custF = QtWidgets.QDoubleSpinBox(self.tab_5)
self.doubleSpinBox_custF.setGeometry(QtCore.QRect(100, 30, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_custF.setFont(font)
self.doubleSpinBox_custF.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_custF.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_custF.setMaximum(999999.99)
self.doubleSpinBox_custF.setObjectName("doubleSpinBox_custF")
self.label_29 = QtWidgets.QLabel(self.tab_5)
self.label_29.setGeometry(QtCore.QRect(20, 140, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_29.setFont(font)
self.label_29.setObjectName("label_29")
self.label_23 = QtWidgets.QLabel(self.tab_5)
self.label_23.setGeometry(QtCore.QRect(180, 80, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_23.setFont(font)
self.label_23.setObjectName("label_23")
self.spinBox_custreapeat = QtWidgets.QSpinBox(self.tab_5)
self.spinBox_custreapeat.setGeometry(QtCore.QRect(100, 140, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.spinBox_custreapeat.setFont(font)
self.spinBox_custreapeat.setObjectName("spinBox_custreapeat")
self.label_22 = QtWidgets.QLabel(self.tab_5)
self.label_22.setGeometry(QtCore.QRect(20, 30, 131, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_22.setFont(font)
self.label_22.setObjectName("label_22")
self.label_27 = QtWidgets.QLabel(self.tab_5)
self.label_27.setGeometry(QtCore.QRect(180, 140, 41, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_27.setFont(font)
self.label_27.setObjectName("label_27")
self.tabWidget_2.addTab(self.tab_5, "")
self.label_34 = QtWidgets.QLabel(self.centralwidget)
self.label_34.setGeometry(QtCore.QRect(440, 30, 51, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
self.label_34.setFont(font)
self.label_34.setObjectName("label_34")
self.radioButton_zeeman_2 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_zeeman_2.setGeometry(QtCore.QRect(350, 580, 141, 19))
self.radioButton_zeeman_2.setObjectName("radioButton_zeeman_2")
self.radioButton_rabi_2 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_rabi_2.setGeometry(QtCore.QRect(350, 520, 181, 19))
self.radioButton_rabi_2.setObjectName("radioButton_rabi_2")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 539, 23))
self.menubar.setObjectName("menubar")
self.menuMain = QtWidgets.QMenu(self.menubar)
self.menuMain.setObjectName("menuMain")
self.menuAdvance = QtWidgets.QMenu(self.menubar)
self.menuAdvance.setObjectName("menuAdvance")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionreset = QtWidgets.QAction(MainWindow)
self.actionreset.setObjectName("actionreset")
self.actionClear_Settings = QtWidgets.QAction(MainWindow)
self.actionClear_Settings.setObjectName("actionClear_Settings")
self.actionClear_Data = QtWidgets.QAction(MainWindow)
self.actionClear_Data.setObjectName("actionClear_Data")
self.actionPaulse_Shaping = QtWidgets.QAction(MainWindow)
self.actionPaulse_Shaping.setObjectName("actionPaulse_Shaping")
self.actionDDS_Controller = QtWidgets.QAction(MainWindow)
self.actionDDS_Controller.setObjectName("actionDDS_Controller")
self.menuMain.addAction(self.actionreset)
self.menuMain.addAction(self.actionClear_Settings)
self.menuMain.addAction(self.actionClear_Data)
self.menuAdvance.addAction(self.actionPaulse_Shaping)
self.menuAdvance.addAction(self.actionDDS_Controller)
self.menubar.addAction(self.menuMain.menuAction())
self.menubar.addAction(self.menuAdvance.menuAction())
self.retranslateUi(MainWindow)
self.Setting.setCurrentIndex(1)
self.tabWidget_2.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label_20.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:16pt; font-weight:600; color:#000000;\">Ion Trap QC Control System 2.1</span></p></body></html>"))
self.label_10.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:11pt; color:#002800;\">Mode</span></p></body></html>"))
self.label_33.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:11pt; font-weight:600; color:#002800;\">Basic Setting</span></p></body></html>"))
self.radioButton_rabi.setText(_translate("MainWindow", "Rabi Scan (Not shaped)"))
self.radioButton_zeeman.setText(_translate("MainWindow", "Zeeman Scan (DDS)"))
self.radioButton_cust.setText(_translate("MainWindow", "Customized"))
self.pushButton.setText(_translate("MainWindow", "Submit"))
self.radioButton_off.setText(_translate("MainWindow", "OFF"))
self.label_36.setText(_translate("MainWindow", "us"))
self.label_37.setText(_translate("MainWindow", "Doppler Cooling"))
self.Laser_397_1_DPL.setText(_translate("MainWindow", "397_1"))
self.Laser_397_2_DPL.setText(_translate("MainWindow", "397_2"))
self.Laser_397_3_DPL.setText(_translate("MainWindow", "397_3"))
self.Laser_397_main_DPL.setText(_translate("MainWindow", "397 Main"))
self.Laser_866_DPL.setText(_translate("MainWindow", "866"))
self.Laser_854_DPL.setText(_translate("MainWindow", "854"))
self.label_42.setText(_translate("MainWindow", "Laser Selected"))
self.label_44.setText(_translate("MainWindow", "NA"))
self.label_45.setText(_translate("MainWindow", "NA"))
self.Setting.setTabText(self.Setting.indexOf(self.DPL_Cooling), _translate("MainWindow", "DPL Cooling"))
self.label_41.setText(_translate("MainWindow", "Optical Pump"))
self.label_40.setText(_translate("MainWindow", "us"))
self.Laser_729_1_OP.setText(_translate("MainWindow", "729_1"))
self.Laser_729_3_OP.setText(_translate("MainWindow", "729_3"))
self.Laser_729_2_OP.setText(_translate("MainWindow", "729_2"))
self.Laser_729_4_OP.setText(_translate("MainWindow", "729_4"))
self.Laser_854_OP.setText(_translate("MainWindow", "854"))
self.Laser_729_main_OP.setText(_translate("MainWindow", "729 Main"))
self.label_43.setText(_translate("MainWindow", "Laser Selected"))
self.label_46.setText(_translate("MainWindow", "NA"))
self.label_53.setText(_translate("MainWindow", "us"))
self.label_54.setText(_translate("MainWindow", "SB Cooling"))
self.label_56.setText(_translate("MainWindow", "times"))
self.label_57.setText(_translate("MainWindow", "Repeat"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_6), _translate("MainWindow", "Opt Pump SB Cooling"))
self.Laser_729_4_opreating.setText(_translate("MainWindow", "729_4"))
self.Laser_729_main_opreating.setText(_translate("MainWindow", "729 Main"))
self.Laser_729_1_opreating.setText(_translate("MainWindow", "729_1"))
self.Laser_854_opreating.setText(_translate("MainWindow", "854 1"))
self.Laser_729_2_opreating.setText(_translate("MainWindow", "729_2"))
self.label_47.setText(_translate("MainWindow", "us"))
self.Laser_729_3_opreating.setText(_translate("MainWindow", "729_3"))
self.label_48.setText(_translate("MainWindow", "Opreating"))
self.label_49.setText(_translate("MainWindow", "NA"))
self.label_50.setText(_translate("MainWindow", "Laser Selected"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_7), _translate("MainWindow", "Opreating"))
self.label_51.setText(_translate("MainWindow", "Detecting"))
self.label_52.setText(_translate("MainWindow", "us"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_8), _translate("MainWindow", "Detecting"))
self.label_55.setText(_translate("MainWindow", "<html><head/><body><p>Temporarily we couple the operation period to the frequency </p><p>of 50Hz, if you want to change or cancle it, please change </p><p>the code responsible for running.(not in the GUI file)</p></body></html>"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_9), _translate("MainWindow", "Gap"))
self.label_18.setText(_translate("MainWindow", "times"))
self.label_5.setText(_translate("MainWindow", "us"))
self.label_9.setText(_translate("MainWindow", "Repeat"))
self.label_7.setText(_translate("MainWindow", "us"))
self.label_4.setText(_translate("MainWindow", "Time for Step"))
self.label_6.setText(_translate("MainWindow", "us"))
self.label_2.setText(_translate("MainWindow", "Time to Start"))
self.label_3.setText(_translate("MainWindow", "Time to End"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_3), _translate("MainWindow", "Rabi Scan"))
self.label_19.setText(_translate("MainWindow", "times"))
self.label_12.setText(_translate("MainWindow", "Hz"))
self.label_13.setText(_translate("MainWindow", "Repeat"))
self.label_15.setText(_translate("MainWindow", "Frequency for Step"))
self.label_14.setText(_translate("MainWindow", "Frequency of End"))
self.label_16.setText(_translate("MainWindow", "Hz"))
self.label_17.setText(_translate("MainWindow", "Hz"))
self.label_11.setText(_translate("MainWindow", "Frequency of Start"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_4), _translate("MainWindow", "Zeeman Scan"))
self.label_28.setText(_translate("MainWindow", "Hz"))
self.label_21.setText(_translate("MainWindow", "Rabi Time"))
self.label_29.setText(_translate("MainWindow", "Repeat"))
self.label_23.setText(_translate("MainWindow", "us"))
self.label_22.setText(_translate("MainWindow", "Frequency "))
self.label_27.setText(_translate("MainWindow", "times"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_5), _translate("MainWindow", "Costomized"))
self.label_34.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:11pt; font-style:italic; color:#000000;\">WIPM </span></p></body></html>"))
self.radioButton_zeeman_2.setText(_translate("MainWindow", "Zeeman Scan (AWG)"))
self.radioButton_rabi_2.setText(_translate("MainWindow", "Rabi Scan (shaped)"))
self.menuMain.setTitle(_translate("MainWindow", "Main"))
self.menuAdvance.setTitle(_translate("MainWindow", "Advance"))
self.actionreset.setText(_translate("MainWindow", "Introduction"))
self.actionClear_Settings.setText(_translate("MainWindow", "Clear Settings"))
self.actionClear_Data.setText(_translate("MainWindow", "Clear Data"))
self.actionPaulse_Shaping.setText(_translate("MainWindow", "Paulse Shaping"))
self.actionDDS_Controller.setText(_translate("MainWindow", "DDS Controller"))
|
from urllib.parse import urlencode
url = "http://in-stjoseph-assessor.governmax.com/propertymax/search_property.asp?l_nm=owner&user=guest_in-stjoseph-assessor&pass=manatron&sid={}"
query = {
'l_nm': 'owner',
'user': 'guest_in-stjoseph-assessor',
'pass': 'manatron',
'sid': 'F83D36C5F0094B86B4BE9307649DA66F'
}
query_encoded = urlencode(query)
new_url = "http://in-stjoseph-assessor.governmax.com/propertymax/search_property.asp?" + query_encoded
print(new_url)
sid = 'F83D36C5F0094B86B4BE9307649DA66F'
full_name = "Michael+Jason"
query = {
'sid': sid,
'l_nm': 'owner',
'l_wc': '|name_search=CONTAINS|n.own_name='+full_name,
'name_search': 'CONTAINS',
'n.own_name': full_name
}
query_encoded = urlencode(query).replace("%7C", "|").replace('%3D', '=').replace('%2B', "+")
new_url = "http://in-stjoseph-assessor.governmax.com/propertymax/search_property.asp?" + query_encoded
print(new_url)
|
def maximum_number(*args):
max_num = args[0]
for i in args:
if i > max_num:
max_num = i
return max_num
print(maximum_number(-10,-4,-7,-10,-30))
|
from src.Model import Model
class DefaultInput(object):
def __init__(self, model):
if not isinstance(model, Model):
raise TypeError("Passed wrong object to DefaultInput")
self.model = model
def up(self):
self.model.stepUp()
def down(self):
self.model.stepDown()
def start(self):
pass
|
import matplotlib.pyplot as plt
import numpy as np
import uncertainties.unumpy as unp
import scipy.constants as con
from scipy.optimize import curve_fit
from scipy import stats
from uncertainties import ufloat
from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,
AutoMinorLocator)
########## Fitfunktion
def f(x, a, b):
return a * x + b
########## BETA-Strahler
#Nulleffekt
T=900
N=586
errN = np.sqrt(N)
N /= T
errN = errN/T
N0 = ufloat(N, errN)
print('Nullrate /s: ', N0)
################################################################################
print('\n\n'+'BETA')
t, N, d, errd = np.genfromtxt('data/beta.txt', unpack=True)
###### Massenbelegung
rho = 2.7 # g/cm^3; Dichte von Aluminium; https://www.chemie.de/lexikon/Aluminium.html
d = unp.uarray(d, errd)
d *= 10**(-4) # cm
R = rho * d # g/cm^2
print('Massenbelegung in g/cm^3: \n',R)
######
N /= t #Events per second
N = N - unp.nominal_values(N0) #Korrektur um Nullrate
print(N)
# Lasse Index 4 aus, da < 0
mask = (N>=0)
R = R[mask]
N = N[mask]
# Fehler von N
errN = np.sqrt(N)
Events = unp.uarray(N, errN)
print('\nEvents per second korrigiert: \n', Events)
print(np.log(N))
# Fit 1
params, cov = curve_fit(f, unp.nominal_values(R[0:5]), np.log(N[0:5]))
err = np.sqrt(np.diag(cov))
# Ausgabe der Fitparameter 1
print('\nFitparameter:')
print('a = ', params[0], r'\pm', err[0])
print('b = ', params[1], r'\pm', err[1])
#Für e Funktion ist B=e^b
print('B = ',np.e**params[1], r'\pm', np.e**err[1])
# Fit 1
params2, cov2 = curve_fit(f, unp.nominal_values(R[4:10]), np.log(N[4:10]))
err2 = np.sqrt(np.diag(cov2))
# Ausgabe der Fitparameter 2
print('\nFitparameter:')
print('a = ', params2[0], r'\pm', err2[0])
print('b = ', params2[1], r'\pm', err2[1])
#Für e Funktion ist B=e^b
print('B = ',np.e**params2[1], r'\pm', np.e**err2[1])
#R_MAX
A1=ufloat(params[0],err[0])
A2=ufloat(params2[0],err2[0])
B1=ufloat(params[1],err[1])
B2=ufloat(params2[1],err2[1])
RMAX=(B2-B1)/(A1-A2)
EMAX=1.92* (RMAX**2+0.22*RMAX)**(0.5)
print('RMAX= ',RMAX,'\nEMAX= /MeV',EMAX)
# linspace für Fits
x = np.linspace(0.03, 0.135, 1000)
fig = plt.figure()
ax1 = fig.add_axes([0.1, 0.13, 0.8, 0.83])
ax1b = fig.add_axes([0.17, 0.20, 0.20, 0.19])
for axis in ['top','bottom','left','right']:
ax1.spines[axis].set_linewidth(0.3)
ax1b.spines[axis].set_linewidth(0.3)
# Messwerte und Fehler
ax1.errorbar(unp.nominal_values(R), np.log(N), xerr=unp.std_devs(R), yerr=np.log(errN), fmt='o', color='#d84d0d', markersize=2, elinewidth=0.7, label='Messwerte')
# Fits
ax1.plot(x, f(x, *params), linewidth=0.7, color='b', label='Linearer Fit (Bereich 1)')
ax1.plot(x, f(x, *params2), linewidth=0.7, color='g', label='Linearer Fit (Bereich 2)')
# Parameter des Plots
ax1.set_xlabel(r'$R \:/\: \si{\gram\centi\meter^{-2}}$')
ax1.set_ylabel(r'$\ln[(N - N_0) \:/\: \si{\per\second}]$')
# ax1.set_yscale('log')
ax1.grid(linestyle='dotted', linewidth=0.3)
leg1 = ax1.legend(loc='best', fancybox=False, fontsize='small', edgecolor='k')
leg1.get_frame().set_linewidth(0.3)
ax1.set_xlim(0.03, 0.135)
ax1.set_ylim(-5.5, 3)
ax1.xaxis.set_minor_locator(AutoMinorLocator())
ax1.yaxis.set_minor_locator(AutoMinorLocator())
ax1.tick_params(axis='both', direction='in')
ax1.tick_params(which='major', direction='in', length=7, width=0.3)
ax1.tick_params(which='minor', direction='in', length=4, width=0.3)
# subplot
sp = unp.nominal_values(RMAX)
x1b = np.linspace(sp-0.004, sp+0.004, 1000)
ax1b.plot(x1b, f(x1b, *params), color='b', linewidth=0.7)
ax1b.plot(x1b, f(x1b, *params2), color='g', linewidth=0.7)
ax1b.vlines(sp, -2.5, -1, linewidth=0.5, alpha=0.5)
# ax1b.set_xlabel(r'$R \:/\: \si{\gram\centi\meter^{-2}}$', fontsize=7)
# ax1b.set_ylabel(r'$\ln[(N - N_0) \:/\: \si{\per\second}]$', fontsize=7)
ax1b.set_xlim(sp-0.004, sp+0.004)
ax1b.set_ylim(-2.5, -1)
ax1b.set_title('Schnittpunkt' + '\n' + 'der beiden Fits', fontsize=7)
ax1b.xaxis.set_minor_locator(AutoMinorLocator())
ax1b.yaxis.set_minor_locator(AutoMinorLocator())
ax1b.tick_params(axis='both', direction='in')
ax1b.tick_params(which='major', direction='in', length=7, width=0.3, labelsize=6)
ax1b.tick_params(which='minor', direction='in', length=4, width=0.3, labelsize=6)
ax1b.grid(linestyle='dotted', linewidth=0.3)
# in matplotlibrc leider (noch) nicht möglich
plt.tight_layout(pad=0, h_pad=1.08, w_pad=1.08)
plt.savefig('plots/beta.pdf')
plt.close()
########## GAMMA-Strahler
#Nulleffekt
print('\n\n'+'Nullrate GAMMA')
T=900
N=1049
errN = np.sqrt(N)
N /= T
errN = errN/T
N0 = ufloat(N, errN)
print('Nullrate /s: ', N0)
################################ BLEI ##########################################
#Absorbtionsgesetz
# def ff(x, mu, N0):
# return N0*np.e**(mu*x)
print('\n\n'+'BLEI')
db, tb, Nb = np.genfromtxt('data/datenBlei.txt', unpack=True)
db *= 10**(-2)
errNb = np.sqrt(Nb)/tb
Nb /= tb #Events per second
Nb = Nb - unp.nominal_values(N0) #Korrektur um Nullrate
print(Nb)
# N = ufloat(N, errN)
Eventsb = unp.uarray(Nb, errNb)
print('\nEvents per second korrigiert: ', Eventsb)
# Fit Blei
paramsb, covb = curve_fit(f, db, np.log(Nb))
# paramsb, covb = curve_fit(ff, db, Nb)
errb = np.sqrt(np.diag(covb))
# Ausgabe der Fitparameter für Blei
print('\nFitparameter:')
print('a = ', paramsb[0], r'\pm', errb[0])
print('N0 = ', paramsb[1], r'\pm', errb[1])
print('\nmu = ', -paramsb[0], r'\pm', errb[0])
# linspace (für Blei und Eisen)
x2 = np.linspace(0.002, 0.052, 1000)
fig = plt.figure()
ax2 = fig.add_axes([0.1, 0.13, 0.8, 0.83])
for axis in ['top','bottom','left','right']:
ax2.spines[axis].set_linewidth(0.3)
# Messwerte und Fehler
ax2.errorbar(db, np.log(Nb), yerr=np.log(errNb), fmt='o', color='#d84d0d', markersize=2, elinewidth=0.7, label='Messwerte')
# Fit
ax2.plot(x2, f(x2, *paramsb), linewidth=0.7, label='Regressionskurve')
# Parameter des Plots
ax2.set_xlabel(r'$d \:/\: \si{\meter}$')
ax2.set_ylabel(r'$\ln[(N - N_0) \:/\: \si{\per\second}]$')
# ax1.set_yscale('log')
ax2.grid(linestyle='dotted', linewidth=0.3)
leg2 = ax2.legend(loc=3, fancybox=False, fontsize='small', edgecolor='k')
leg2.get_frame().set_linewidth(0.3)
ax2.set_xlim(0.002, 0.052)
ax2.xaxis.set_minor_locator(AutoMinorLocator())
ax2.yaxis.set_minor_locator(AutoMinorLocator())
ax2.tick_params(axis='both', direction='in')
ax2.tick_params(which='major', direction='in', length=7, width=0.3)
ax2.tick_params(which='minor', direction='in', length=4, width=0.3)
# in matplotlibrc leider (noch) nicht möglich
plt.tight_layout(pad=0, h_pad=1.08, w_pad=1.08)
plt.savefig('plots/Blei.pdf')
plt.close()
############################### EISEN ##########################################
print('\n\n'+'EISEN')
de, te, Ne = np.genfromtxt('data/datenEisen.txt', unpack=True)
de *= 10**(-2)
errNe = np.sqrt(Ne)/te
Ne /= te #Events per second
Ne = Ne - unp.nominal_values(N0) #Korrektur um Nullrate
print(Ne)
# N = ufloat(N, errN)
Eventse = unp.uarray(Ne, errNe)
print('\nEvents per second korrigiert: ', Eventse)
# Fit Eisen
paramse, cove = curve_fit(f, de, np.log(Ne))
# paramse, cove = curve_fit(ff, de, Ne)
erre = np.sqrt(np.diag(cove))
# Ausgabe der Fitparameter für Eisen
print('\nFitparameter:')
print('a = ', paramse[0], r'\pm', erre[0])
print('N_0 = ', paramse[1], r'\pm', erre[1])
print('\nmu = ', -paramse[0], r'\pm', erre[0])
fig = plt.figure()
ax3 = fig.add_axes([0.1, 0.13, 0.8, 0.83])
for axis in ['top','bottom','left','right']:
ax3.spines[axis].set_linewidth(0.3)
# Messwerte und Fehler
ax3.errorbar(de, np.log(Ne), yerr=np.log(errNe), fmt='o', color='#d84d0d', markersize=2, elinewidth=0.7, label='Messwerte')
# Fit
ax3.plot(x2, f(x2, *paramse), linewidth=0.7, label='Regressionskurve')
# Parameter des Plots
ax3.set_xlabel(r'$d \:/\: \si{\meter}$')
ax3.set_ylabel(r'$\ln[(N - N_0) \:/\: \si{\per\second}]$')
# ax1.set_yscale('log')
ax3.grid(linestyle='dotted', linewidth=0.3)
leg3 = ax3.legend(loc=3, fancybox=False, fontsize='small', edgecolor='k')
leg3.get_frame().set_linewidth(0.3)
ax3.set_xlim(0.002, 0.052)
ax3.xaxis.set_minor_locator(AutoMinorLocator())
ax3.yaxis.set_minor_locator(AutoMinorLocator())
ax3.tick_params(axis='both', direction='in')
ax3.tick_params(which='major', direction='in', length=7, width=0.3)
ax3.tick_params(which='minor', direction='in', length=4, width=0.3)
# in matplotlibrc leider (noch) nicht möglich
plt.tight_layout(pad=0, h_pad=1.08, w_pad=1.08)
plt.savefig('plots/Eisen.pdf')
plt.close()
#Theorie
r = 2.82e-15 # m
eps = 1.295
# Nl = 2.686780111e19 #cm^-3 Keine Ahnung, was das ist, brauchen wir aber auch nicht. Oder soll das die Avogadro-Konstate sein?
rhoB = 11.34 * 10**6 #g/m^3
rhoE = 7.874 * 10**6 #g/m^3
ZE = 26
ZB = 82
# ME = 9.273e-20 #gram Weiß nicht genau, wie du hier auf diese Werte gekommen bist. Weiter unten stehen jedenfalls die "Richtigen"
# MB = 3.441e-21 #gram
sig = 2 * np.pi * r**2 * (((1 + eps) / eps**2) * (((2 * (1 + eps)) / (1 + 2 * eps)) - 1 / eps * np.log(1 + 2 * eps)) + 1 / (2* eps) * np.log(1 + 2 * eps) - (1 + 3 * eps) / (1 + 2 * eps)**2 )
# sigma = 2 * np.pi * r**2 * ((1 + eps) / eps**2 * ((2 * (1 + eps) / (1 + 2 * eps) - 1 / eps * np.log(1+2*eps) ) + 1/(2*eps)*np.log(1+2*eps) - (1+3*eps)/(1+2*eps)**2 ))
# muComB = ZB * Nl * rhoB / MB * sigma
# muComE = ZE * Nl * rhoE / ME * sigma
# print('\nTheorie\nsigma: ',sigma,'\nmuB: ',muComB,'\nmuE: ',muComE)
MB = 207.2 # g/mol; https://www.chemie.de/lexikon/Blei.html
ME = 55.845 # g/mol; https://www.chemie.de/lexikon/Eisen.html
muB = ZB * con.N_A * rhoB * sig / MB # Gleichung 2 in der Theorie
muE = ZE * con.N_A * rhoE * sig / ME
print('\nTheorie\nsigma: ', sig, '\nmuB: ', muB, ' 1/m', '\nmuE: ', muE, ' 1/m')
##################### DISKUSSION ###############################################
EMAX_THEO = ufloat(0.295062, 0.001133) # MeV
DE = (EMAX - EMAX_THEO) / EMAX_THEO
print('Abweichung EMAX = ', DE*100, '%')
muBex = ufloat(-paramsb[0], errb[0])
muEex = ufloat(-paramse[0], erre[0])
dMb = (muBex - muB) / muB
dMe = (muEex - muE) / muE
print('Abweichung muB = ', dMb *100)
print('Abweichung muE = ', dMe *100)
|
from django.conf.urls import patterns, url
from bank import views
from helper_functions import PionerAutocomplete
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'),
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'bank/login.html',},
name='login'),
url(r'^my_trans/$', views.show_my_trans, name='my_trans'),
url(r'^my_att/$', views.show_my_att, name='my_att'),
url(r'^all_acc$', views.all_pioner_accounts, name='all_acc'),
url(r'^all_acc_ped$', views.all_ped_accounts, name='all_acc_ped'),
url(r'^add_trans/special/$', views.add_special, name='add_special'),
url(r'^add_trans/mass_special/$', views.add_mass_special, name='add_mass_special'),
url(r'^add_trans/zaryadka/(?P<meta_link_pk>[0-9]+)', views.add_zaryadka, name='add_zaryadka'),
url(r'^add_trans/zaryadka/', views.add_zaryadka, name='add_zaryadka'),
url(r'^add_trans/sem/', views.add_sem, name='add_sem'),
url(r'^add_trans/p2p/', views.add_p2p, name='add_p2p'),
url(r'^add_trans/fac/', views.add_fac, name='add_fac'),
url(r'^add_trans/lab/', views.add_lab, name='add_lab'),
url(r'^add_trans/activity/', views.add_activity, name='add_activity'),
url(r'^add_trans/fine/', views.add_fine, name='add_fine'),
url(r'^add_trans/lec/', views.add_lec, name='add_lec'),
url(r'^add_trans/fac_att/', views.add_fac_att, name='add_fac_att'),
url(r'^add_trans/exam/(?P<meta_link_pk>[0-9]+)', views.add_exam, name='add_exam'),
url(r'^add_trans/exam/', views.add_exam, name='add_exam'),
url(r'^dec_trans/(?P<trans_id>[0-9]+)/$', views.dec_trans, name='trans_dec'),
url(r'^dec_trans_ok/(?P<trans_id>[0-9]+)/$', views.dec_trans_ok, name='trans_dec_ok'),
url(r'^trans_list/(?P<username>.+)/$', views.trans_list, name='trans_list'),
url(r'^meta_list/(?P<trans_id>.+)/$', views.meta_list, name='meta_list'),
url(r'^trans_red/(?P<trans_id>.+)/$', views.trans_red, name='trans_red'),
url(r'manage_p2p', views.manage_p2p, name='manage_p2p'),
url(r'^pioner-autocomplete/$', PionerAutocomplete.as_view(), name='pioner-autocomplete'),
url(r'^super_table/$', views.super_table, name='super_table'),
url(r'^media/$', views.media, name='media'),
)
|
from django.shortcuts import render, redirect
def index(request):
if 'nr_submits' not in request.session:
request.session['nr_submits'] = 0
return render( request, "survey_form/index.html" )
def submit(request):
if request.method == "POST":
print request.POST
request.session['nr_submits'] += 1
request.session['name'] = request.POST['name'][:]
request.session['location'] = request.POST['location'][:]
request.session['fav_lang'] = request.POST['fav_lang'][:]
request.session['comments'] = request.POST['comments'][:]
return redirect ( '/result' )
else:
return redirect ( '/' )
def result(request):
return render( request, "survey_form/result.html" )
|
import sys
sys.path.append('../../python')
import caffe
from caffe import surgery, score
import numpy as np
import os
weights = sys.argv[1]
caffe.set_phase_train()
caffe.set_mode_gpu()
caffe.set_device(1)
solver = caffe.SGDSolver('solver.prototxt')
solver.net.copy_from(weights)
score.boundary_eval(solver, ('bsds', 'val'), layer='prob')
|
def max_product(lst, n_largest_elements):
output = 1
lst.sort()
lst.reverse()
nums = []
for i in range(0, n_largest_elements):
nums.append(lst[i])
for i in nums:
output *= i
return output
print(max_product([4, 3, 5], 2))
|
import time
# DEVELOPER: https://github.com/undefinedvalue0103/nullcore-1.0/
vk = None
utils = None
config = None
logging = None
root = None
def handle(message):
act = message['action']
return ''
|
class GameStats():
"""Track statistics for Cha vs. Krtek"""
def __init__(self, ai_settings):
"""Initialize statistics."""
self.ai_settings = ai_settings
self.reset_stats()
# Start Cha vs. Krtek in an inactive state.
self.game_active = False
def reset_stats(self):
"""Initialize statistics that can reset during the game."""
self.chas_left = self.ai_settings.cha_limit
|
import os
NEBULOUSLABS_GIT_BASEURL = 'https://gitlab.com/NebulousLabs/'
SIACOINCLASSIC_GIT_BASEURL = 'git@github.com:SiacoinClassic/'
REPOSITORYS_DIR = os.path.join(os.getcwd(), 'repositorys')
DEPENDENCIES = {
'demotemutex',
'fastrand',
'merkletree',
'bolt',
'entropy-mnemonics',
'errors',
'go-upnp',
'ratelimit',
'threadgroup',
'writeaheadlog',
'glyphcheck'
}
|
import numpy as np
value = np.random.randint(0,100, 10)
print(value)
condition = value %2 == 0
print(condition)
print(value[condition])
|
from flask import Flask, render_template, url_for, request, redirect
from flask_table import Table, Col
from flask_mysqldb import MySQL
import mysql.connector
import functools
import datetime
# import yaml
app = Flask(__name__)
#Configuring DB
# db = yaml.load(open('db.yaml'), Loader=yaml.FullLoader )
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = 'admin'
app.config['MYSQL_DB'] = 'questionbank'
mysql = MySQL(app)
# app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
# db = SQLAlchemy(app)
# class Todo(db.Model):
# id = db.Column(db.integer, )
@app.route('/')
def index():
return render_template('index.html')
# count = cur.execute("SELECT MAX(id) FROM questions")
# count = cur.fetchone()
# count = functools.reduce(lambda sub, ele: sub * 10 + ele, count)
return 'str(results)' #the str is used for integer values
@app.route('/generate', methods=['POST', 'GET'])
def generate():
cur = mysql.connection.cursor()
if request.method == 'POST':
try:
mail = request.form['mail']
subject = request.form['subject']
file1 = open('output.txt', 'r', encoding="utf8")
Lines = file1.readlines()
count = 0
count = cur.execute("SELECT MAX(id) FROM questions")
count = cur.fetchone()
count = functools.reduce(lambda sub, ele: sub * 10 + ele, count)
print(count)
if(count==None):
count=0
count = int(count)+1
print(count)
# Strips the newline character
for line in Lines:
print(line.strip())
currentDT = datetime.datetime.now()
#mycursor.execute("insert into questions(id,subject,questions,doc,faculty)values(count,'general',line.strip(),currentDT,'salehin@gmail.com');")
query = "insert into questions(id,subject,questions,doc,faculty)values (%s,%s,%s,%s,%s)"
cur.execute(query, (count,subject,line.strip(),currentDT,mail))
mysql.connection.commit() #commiting the push
print(count, line.strip())
count=count+1
except Exception as e:
print("This is the Error" + e)
return e
return render_template('generate.html');
else:
return render_template('generate.html')
@app.route('/about')
def about():
# return render_template('about.html')
# count = cur.execute("SELECT MAX(id) FROM questions")
# count = cur.fetchone()
# count = functools.reduce(lambda sub, ele: sub * 10 + ele, count)
return 'str(results)' #the str is used for integer values
@app.route('/input')
def input():
# return render_template('about.html')
# count = cur.execute("SELECT MAX(id) FROM questions")
# count = cur.fetchone()
# count = functools.reduce(lambda sub, ele: sub * 10 + ele, count)
return 'str(results)' #the str is used for integer values
@app.route('/show')
def show():
# return render_template('show.html')
cur = mysql.connection.cursor()
# count = cur.execute("SELECT MAX(id) FROM questions")
# count = cur.fetchone()
# count = functools.reduce(lambda sub, ele: sub * 10 + ele, count)
cur.execute('''SELECT * FROM questions''')
results = cur.fetchall()
cur.close()
print (results) #prints in console
#return results
return render_template('show.html', output = results) #the str is used for integer values
if __name__== "__main__":
app.run(debug=True)
|
import os
from player import Player
from actions import Actions
from data import Data
class Game:
def __init__(self, players_turn, feature_length, label_length):
self.players_turn = players_turn
self.game_over = False
self.user = Player('user')
self.opponent = Player('opponent')
self.game_actions = Actions()
self.player_training_data = Data(feature_length, label_length)
self.opponent_training_data = Data(feature_length, label_length)
def int_try_parse(self, value):
try:
return int(value), True
except ValueError:
return value, False
def run(self, opponents_action):
if self.players_turn:
self.user.print_health()
self.game_actions.display_player_actions(self.user)
print('5. Exit')
user_input = input('Action (1-5)')
players_action, is_valid = self.int_try_parse(user_input)
os.system('cls')
if is_valid and players_action > 0 and players_action <= 5:
if players_action == 5:
self.game_over = True
else:
self.player_training_data.record(players_action, self.user, self.opponent, True)
self.game_actions.perfrom(self.user, self.opponent, players_action)
self.game_actions.display_player_chosen_action(self.user, players_action)
self.players_turn = False
else:
print('Please enter a valid option from 1-5')
else: #AI's turn
#print('opponent\'s choice number: {}'.format(opponents_action))
self.opponent_training_data.record(opponents_action, self.user, self.opponent, False)
self.opponent.print_health()
self.game_actions.display_ai_chosen_action(self.opponent, opponents_action)
self.game_actions.perfrom(self.opponent, self.user, opponents_action)
self.players_turn = True
if self.user.alive is False or self.opponent.alive is False:
os.system('cls')
self.game_over = True
if self.user.alive is False:
print('You lost')
return False
#return True, self.players_turn, self.user, self.opponent, self.opponent_training_data
else:
print('You Won')
return True
#return True, self.players_turn, self.user, self.opponent, self.player_training_data
return None
#return self.game_over, self.players_turn, self.user, self.opponent, None
|
from config.VarConfig import iePath, chromePath
from util.DirAndTime import DirAndTime
from util.ObjectMap import get_element
from util.WaitUntil import WaitUnit
from selenium import webdriver
driver = None
waitUtil = None
# 打开浏览器
def open_browser(browser):
global driver, waitUtil
try:
if browser.lower() == 'ie':
#driver = webdriver.Ie(executable_path=iePath)
driver = webdriver.Ie()
elif browser.lower() == 'chrome':
driver = webdriver.Chrome(executable_path=chromePath)
#driver = webdriver.Chrome()
else:
# driver = webdriver.Firefox(executable_path=fireFox)
driver = webdriver.Firefox()
except Exception as e:
raise e
else:
waitUtil = WaitUnit(driver) # driver 创建之后, 创建等待类实例对象
# 浏览器窗口最大化
def maximize_browser():
try:
driver.maximize_window()
except Exception as e:
raise e
# 加载网址
def load_url(url):
try:
driver.get(url)
except Exception as e:
raise e
# 强制等待
def sleep(num):
try:
import time
time.sleep(num)
except Exception as e:
raise e
# 清除输入框的内容
def clear(by, locator):
try:
get_element(driver, by, locator).clear()
except Exception as e:
raise e
# 输入框中输入内容
def input_value(by, locator, value):
try:
element = get_element(driver, by, locator)
# element.click()
element.send_keys(value)
except Exception as e:
raise e
# 点击操作
def click_btn(by, locator):
try:
get_element(driver, by, locator).click()
except Exception as e:
raise e
# 断言页面的title
def assert_title(title):
try:
assert title in driver.title, "%s not found in title!" % title
except AssertionError as e:
raise AssertionError(e)
except Exception as e:
raise e
# 断言目标字符串是否包含在页面源码中
def assert_string_in_page_source(string):
try:
assert string in driver.page_source, "%s not found in page source!" % string
except AssertionError as e:
raise AssertionError(e)
except Exception as e:
raise e
def assert_error_info(by, locator, string):
element = get_element(driver, by, locator)
text = element.text
assert text == string
# 获取当前页面的title
def get_title():
try:
return driver.title
except Exception as e:
raise e
# 获取页面源码
def get_page_source():
try:
return driver.page_source
except Exception as e:
raise e
# 切换到frame里面
def switch_to_frame(by, locator):
try:
driver.switch_to.frame(get_element(driver, by, locator))
except Exception as e:
raise e
# 跳到默认的frame
def switch_to_default():
try:
driver.switch_to.default_content()
except Exception as e:
raise e
# 屏幕截图
def save_screen_shot():
picture_name = DirAndTime.create_picture_path() + '\\' + DirAndTime.get_current_time() + '.png'
try:
driver.get_screenshot_as_file(picture_name)
except Exception as e:
raise e
else:
return picture_name
def wait_presence_of_element_located(by, locator):
"""显示等待页面元素出现在DOM中,单并不一定可见"""
waitUtil.presence_of_element_located(by, locator)
def wait_frame_to_be_available_and_switch_to_it(by, locator):
"""检查frame是否存在,存在就切换到frame中"""
waitUtil.frame_to_be_available_and_switch_to_it(by, locator)
def wait_visibility_of_element_located(by, locator):
"""显示等待页面元素出现在DOM中,并且可见"""
waitUtil.visibility_of_element_located(by, locator)
# 关闭浏览器
def quit_browser():
try:
driver.quit()
except Exception as e:
raise e
if __name__ == '__main__':
open_browser('chrome')
load_url('http://www.baidu.com')
# inputValue('id', 'kw','python')
# clear('id', 'kw')
# inputValue('id', 'kw', 'python')
# clickBtn('id', 'su')
# sleep(3)
# title = getTitle()
# print(title)
# assertTitle('python')
# assert_string_in_page_source('python')
ctrl_v('python')
|
"""
Time Complexity: O(logN)
"""
"""
Recursive Binary Search
"""
def recursiveBinarySearch(inputArray, low, high, toSearch):
if high >= low:
mid = (high + low) // 2
if inputArray[mid] == toSearch:
return mid
elif arr[mid] > toSearch:
return recursiveBinarySearch(inputArray, low, mid - 1, toSearch)
else:
return recursiveBinarySearch(inputArray, mid + 1, high, x)
else:
return -1
"""
Iterative Binary Search
"""
def iterativeBinarySearch(inputArray, toSearch):
low = 0
high = len(inputArray) - 1
mid = 0
while low <= high:
mid = (high + low) // 2
if inputArray[mid] < toSearch:
low = mid + 1
elif inputArray[mid] > toSearch:
high = mid - 1
else:
return mid
return -1
|
from pdfminer.pdfdocument import PDFDocument, PDFTextExtractionNotAllowed
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LTTextBoxHorizontal, LAParams, LTTextLineHorizontal, LTFigure, LTRect, LTLine, LTCurve
# 文件对象
pd_file = open("extract/国家医疗保障DRG(CHS-DRG)分组方案.pdf", "rb")
# pdf文件解析对象
parser = PDFParser(pd_file)
# print(parser)
# pdf文档对象
document = PDFDocument(parser)
parser.set_document(document)
document.set_parser(parser)
# 初始化文档密码
document.initialize()
if document.is_extractable:
print(True)
else:
raise PDFTextExtractionNotAllowed
# 存储文档资源
src = PDFResourceManager()
# 设备对象
device = PDFPageAggregator(src, laparams=LAParams())
# 解释器对象
inter = PDFPageInterpreter(src, device)
pages = document.get_pages()
for page in pages:
# print(page.contents)
inter.process_page(page)
layout = device.get_result()
for x in layout:
if isinstance(x, LTTextBoxHorizontal):
print(str(x.get_text()))
# t = dir(x)
# print(t)
# print(type(x))
|
from __future__ import absolute_import
import apache_beam as beam
import os, datetime
from apache_beam import pvalue
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.io import ReadFromText
from apache_beam.io import WriteToText
# extracting song titles
class ExtractFn(beam.DoFn):
def process(self, element):
name = element
title = name.get('Title')
return [(title,1)]
# summing song appearances on Billboard charts
class SumBillboardFn(beam.DoFn):
def process(self, element):
name, counts_obj = element
counts = list(counts_obj)
sum_counts = len(counts)
return [(name, sum_counts)]
# creating BQ record
class MakeBQRecordFn(beam.DoFn):
def process(self, element):
name, total_appearances = element
record = {'Title' : name, 'Years_on_chart' : total_appearances}
return [record]
PROJECT_ID = 'coherent-server-252621'
BUCKET = 'gs://shawnrussell2019-imdb'
DIR_PATH_IN = BUCKET + '/input/'
DIR_PATH_OUT = BUCKET + '/output/' + datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S') + '/'
# Project ID is needed for BigQuery data source, even for local execution.
options = {
'runner': 'DataflowRunner',
'job_name': 'year-cluster',
'project': PROJECT_ID,
'temp_location': BUCKET + '/temp',
'staging_location': BUCKET + '/staging',
}
opts = beam.pipeline.PipelineOptions(flags=[], **options)
# Create a Pipeline using a local runner for execution.
with beam.Pipeline('DataflowRunner', options=opts) as p:
# create a PCollection from the file contents.
in_pcoll = p | 'Read from BigQuery' >> beam.io.Read(beam.io.BigQuerySource(query='SELECT Title from billboard_modeled.Yearly_Ratings'))
# write PCollection to log file
in_pcoll | 'Write to input' >> WriteToText('input.txt')
# apply a ParDo to the PCollection
extract_pcoll = in_pcoll | 'Extract songs' >> beam.ParDo(ExtractFn())
# write PCollections to files
extract_pcoll | 'Write to extract' >> WriteToText('extract.txt')
# apply GroupByKey
grouped_pcoll = extract_pcoll | 'Group by song' >> beam.GroupByKey()
# write PCollections to files
grouped_pcoll | 'Write to grouped' >> WriteToText('grouped.txt')
# Sum number of appearances in Billboard charts
summed_pcoll = grouped_pcoll | 'Sum up Billboard appearances' >> beam.ParDo(SumBillboardFn())
# write PCollections to files
summed_pcoll | 'Write to summed' >> WriteToText('summed.txt')
# make BQ records
bq_summed_pcoll = summed_pcoll | 'Make BQ Record' >> beam.ParDo(MakeBQRecordFn())
# write BQ records to files
bq_summed_pcoll | 'Write BQ records' >> WriteToText('bq_records.txt')
title_table_name = PROJECT_ID + ':Yearly_Ratings_workflow_modeled.Years_Charting'
table_schema = 'Title:STRING,Years_on_chart:INTEGER'
# write Pcoll to BQ tables
bq_summed_pcoll | 'Write Years Charting table' >> beam.io.Write(beam.io.BigQuerySink(title_table_name,
schema=table_schema,
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE))
|
age = 17
if age==0:
print("zero")
elif age==1:
print("one")
else:
print("old")
|
import numpy as np
import cv2
# from keras.preprocessing.image import ImageDataGenerator
from keras.utils.data_utils import Sequence
from imgaug import augmenters as iaa
import random
import utils
import glob
class OneShotTrainingSequence(Sequence):
def __init__(self, image_dir, mask_dir, da=False, batch_size=1, image_width=64, image_height=64):
self.image_width = image_width
self.image_height = image_height
self.da = da
self.image_list, self.mask_list, self.file_name_list = \
self.load_image_and_mask(image_dir, mask_dir)
self.batch_size = batch_size
self.channel_number = 3
self.seq = iaa.Sequential([
iaa.Add((-10, 10))])
def load_image_and_mask(self, image_dir, mask_dir):
image_list = []
mask_list = []
file_name_list = []
for img_name in glob.glob(image_dir + '/*.png'):
pure_name = img_name.split('/')[-1]
pure_name = pure_name.split('.')[0]
file_name_list.append(pure_name)
im = cv2.imread(img_name, cv2.IMREAD_COLOR)
im = cv2.resize(im, (self.image_width, self.image_height))
# aug_image = cv2.equalizeHist(im_gray)
aug_image = im
image_list.append(aug_image)
mask = cv2.imread(mask_dir + '/{}.png'.format(pure_name), cv2.IMREAD_GRAYSCALE)
mask = cv2.resize(mask, (self.image_height, self.image_width), interpolation=cv2.INTER_NEAREST)
mask_list.append(mask)
return np.asarray(image_list), np.asarray(mask_list), file_name_list
def __len__(self):
return self.image_list.shape[0]#int(self.image_list.shape[0] / float(self.batch_size))
def __getitem__(self, idx):
batch_x = np.zeros(shape=(self.batch_size, self.image_width, self.image_height, 3), dtype=np.float32)
batch_y = np.zeros(shape=(self.batch_size, self.image_width, self.image_height, 1), dtype=np.uint8)
for i in range(self.batch_size):
if self.da:
idx = random.randint(0,self.image_list.shape[0]-1)
#augmentation
if random.random() < -0.7:
angle = 0
else:
#angle = random.uniform(-25, 25)
angle = random.uniform(-10, 10)
if random.random() < -0.5:
scale = 1.0
else:
#scale = random.uniform(0.9, 1.1)
scale = random.uniform(0.95, 1.05)
tmp_rotation_matrix = cv2.getRotationMatrix2D((self.image_width/2, self.image_height/2),
angle=angle, scale=scale)
rotation_matrix = np.eye(3, dtype=np.float32)
rotation_matrix[0:2, :] = tmp_rotation_matrix
shearing_matrix = np.eye(3, dtype=np.float32)
if random.random() > -0.5:
shearing_matrix[0,1] = 0.0
shearing_matrix[1,0] = 0.0
else:
shearing_matrix[0,1] = random.uniform(-0.005, 0.005)
shearing_matrix[1,0] = random.uniform(-0.005, 0.005)
translation_matrix = np.eye(3, dtype=np.float32)
translation_matrix[0,2] = random.randint(-10, 10)
translation_matrix[1,2] = random.randint(-10, 10)
transform_matrix = np.matmul(translation_matrix, np.matmul(shearing_matrix, rotation_matrix))
transformed_image = cv2.warpPerspective(self.image_list[idx], transform_matrix, (self.image_width, self.image_height),\
flags=cv2.INTER_LINEAR, borderValue = (255,255,255))
transformed_mask = np.zeros((self.image_height, self.image_width), dtype = np.uint8)
temp_mask = cv2.warpPerspective(self.mask_list[idx], transform_matrix, (self.image_width, self.image_height),\
flags=cv2.INTER_NEAREST, borderValue = (0))
transformed_mask[temp_mask>100] = 255
##aug_image = self.seq.augment_image(transformed_image)
aug_image = transformed_image
#show_mask = utils.drawMultiRegionMultiChannel(transformed_mask)
##aug_image = cv2.equalizeHist(aug_image)
#cv2.imwrite('../data/augmentation/{}_img.png'.format(i), aug_image)
#cv2.imwrite('../data/augmentation/{}_mask.png'.format(i), show_mask)
batch_x[i] = aug_image
batch_y[i,:,:,0] = transformed_mask
#batch_x[i] = self.image_list[idx]
#batch_y[i,:,:,0] = self.mask_list[idx]
else:
batch_x[i] = self.image_list[idx]
batch_y[i,:,:,0] = self.mask_list[idx]
batch_x = batch_x/255.0
batch_y[batch_y<100] = 0
batch_y[batch_y>=100] = 1
return batch_x, batch_y
|
#! /usr/local/bin/python
import json
import netCDF4 as nc
import os
files_dir = '/Users/michaesm/Downloads/test/'
def read_json(open_file, var_name):
qp = nc.chartostring((open_file.variables[var_name][:]))
try:
parsed_json = json.loads(qp[0])
return parsed_json
except:
parsed_json = 'n/a'
return parsed_json
# def print_provenance(json_obj, sp_len):
# # get json dictionary keys
# # keys = json_obj.keys()
#
# for key in json_obj.keys():
# print key + ": " + str(json_obj[key]) + "\n"
def parse_json_response(content):
for key, value in content.iteritems():
print str(key) + ": " + str(value)
if type(value) is dict:
parse_json_response(value)
for root, dirs, files in os.walk(files_dir):
for f in files:
if f.endswith(".nc"):
nc_file = os.path.join(root,f)
# print ncFile
print "Printing provenance information for " + nc_file
print "key : Value"
file_load = nc.Dataset(nc_file)
prov_list = [s for s in file_load.variables if 'provenance' in s]
prov_list = [s for s in file_load.variables if not 'keys' in s]
for var in prov_list:
p_json = read_json(file_load, var)
outfile = open(nc_file + "-" + var + ".json", "w")
json.dump(p_json, outfile)
outfile.close()
# if p_json == 'n/a':
# continue
# else:
# parse_json_response(p_json)
# json.dump(p_json)
# cp = nc.chartostring(f.variables['computed_provenance'][:])
# cp_parsed_json = json.loads(cp[0])
# cpK = cp_parsed_json.keys()
#
# ip = nc.chartostring(f.variables['instrument_provenance'][:])
# ip_parsed_json = json.loads(ip[0])
# ipK = ip_parsed_json.keys()
|
from queries import DELETE_USER, READ_USER, UPDATE_USER
from queries import INSERT_INTO_DATABASE
from flask import Flask,render_template,request,redirect
from queries import *
from decouple import config
from flask_mysqldb import MySQL
from dotenv import load_dotenv
load_dotenv()
import MySQLdb
import os
#app = Flask(__name__)
#app.secret_key = 'ashwinikumar'
#mysql = MySQL(app)
def create_user(username, password, email, confirmemail, uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
print("inside create messsage")
cursor.execute( INSERT_INTO_DATABASE, (username,password,email,confirmemail,uniqueid))
db.commit()
def read_user(uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
cursor.execute( READ_USER , (uniqueid, ))
account = cursor.fetchone()
return account
def delete_user(uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
cursor.execute( DELETE_USER, (uniqueid, ))
db.commit()
def update_user(username, password, email, confirmemail, uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
print("update given messsage")
cursor.execute( UPDATE_USER,(username,password,email,confirmemail,uniqueid,))
db.commit()
|
import json
import os
import sys
import pandas as pd
from PyQt5.QtCore import (QAbstractTableModel, QRegExp, QSortFilterProxyModel,
Qt)
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import (QApplication, QComboBox, QFrame, QGroupBox,
QHBoxLayout, QLabel, QLineEdit, QListView,
QMainWindow, QMessageBox, QPushButton, QSpinBox,
QTabWidget, QVBoxLayout, QWidget)
import constants
from recommender import Recommender
from recommender import global_animes as animes
IMG_DIR = 'images/covers/'
user_ratings = {}
recommender = Recommender()
class PandasModel(QAbstractTableModel):
def __init__(self, data, parent=None):
QAbstractTableModel.__init__(self, parent)
self._data = data
def rowCount(self, parent=None):
return len(self._data.values)
def columnCount(self, parent=None):
return self._data.columns.size
def data(self, index, role=Qt.DisplayRole):
if index.isValid():
if role == Qt.DisplayRole:
return str(self._data.values[index.row()][index.column()])
elif role == Qt.UserRole:
return self._data.values[index.row()]
return None
class SearchGroupBox(QGroupBox):
def __init__(self, model, parent=None):
super(SearchGroupBox, self).__init__(parent)
self.setupUi()
self.proxyModel = QSortFilterProxyModel()
self.proxyModel.setSourceModel(model)
self.proxyModel.setFilterKeyColumn(1) # column 1 is anime name
self.listView.setModel(self.proxyModel)
self.listView.setModelColumn(1)
self.searchBox.textChanged.connect(self.filterList)
def setupUi(self):
self.setFixedWidth(600)
self.setTitle("Anime List")
self.searchBox = QLineEdit()
self.listView = QListView()
layout = QVBoxLayout()
layout.addWidget(self.searchBox)
layout.addWidget(self.listView)
self.setLayout(layout)
def filterList(self, text):
self.proxyModel.setFilterRegExp(
QRegExp(text, Qt.CaseInsensitive, QRegExp.Wildcard))
class CardGroupBox(QGroupBox):
COVER_WIDTH = 225
COVER_HEIGHT = 319
COVER_FACTOR = 1.2
def __init__(self, parent=None):
super(CardGroupBox, self).__init__(parent)
self.fixedGenreText = "Genre: "
self.fixedTypeText = "Type: "
self.fixedEpisodesText = "Episodes: "
self.fixedRatingText = "Rating: "
self.currentAnimeId = None
self.setupUi()
self.rateComboBox.currentIndexChanged.connect(self.changeButtonState)
self.ratePushButton.clicked.connect(self.rate)
self.rateComboBox.addItems([
'Select',
'(10) Masterpiece',
'(9) Great',
'(8) Very Good',
'(7) Good',
'(6) Fine',
'(5) Average',
'(4) Bad',
'(3) Very Bad',
'(2) Horrible',
'(1) Appalling',
])
def changeButtonState(self, comboBoxIndex):
self.ratePushButton.setEnabled(comboBoxIndex != 0)
def rate(self):
rating = int(self.rateComboBox.currentText()[1:].split(')')[0])
global user_ratings
user_ratings[self.currentAnimeId] = rating
okMessageBox = QMessageBox()
okMessageBox.setText(f"Thanks for rating {self.titleLabel.text()}!")
okMessageBox.exec()
def setupUi(self):
self.setFixedWidth(300)
self.setTitle("Info")
self.titleLabel = QLabel()
self.titleLabel.setStyleSheet("QLabel { font-size: 18px }")
self.titleLabel.setWordWrap(True)
self.cover = QLabel()
self.cover.setFixedWidth(self.COVER_WIDTH // self.COVER_FACTOR)
self.cover.setFixedHeight(self.COVER_HEIGHT // self.COVER_FACTOR)
self.genre = QLabel()
self.genre.setWordWrap(True)
self.type = QLabel()
self.type.setWordWrap(True)
self.episodes = QLabel()
self.episodes.setWordWrap(True)
self.rating = QLabel()
self.rateLabel = QLabel("Your rate: ")
self.rateComboBox = QComboBox()
self.ratePushButton = QPushButton("Rate!")
rateLayout = QHBoxLayout()
rateLayout.addWidget(self.rateLabel)
rateLayout.addWidget(self.rateComboBox)
rateLayout.addWidget(self.ratePushButton)
lines = []
for i in range(2):
line = QFrame()
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
lines.append(line)
layout = QVBoxLayout()
layout.addWidget(self.titleLabel)
layout.addWidget(lines[0])
layout.addWidget(self.cover)
layout.addWidget(self.genre)
layout.addWidget(self.type)
layout.addWidget(self.episodes)
layout.addWidget(self.rating, 1, Qt.AlignTop)
layout.addWidget(lines[1])
layout.addLayout(rateLayout)
self.setLayout(layout)
def setData(self, data):
if data is None:
return
self.currentAnimeId = str(data[0])
self.titleLabel.setText(data[1])
cover_path = os.path.join(IMG_DIR, f'{data[0]}.jpg')
cover_pixmap = QPixmap(cover_path)
cover_pixmap.scaled(
self.COVER_WIDTH // self.COVER_FACTOR,
self.COVER_HEIGHT // self.COVER_FACTOR)
self.cover.setPixmap(cover_pixmap)
self.genre.setText(self.fixedGenreText + data[2])
self.type.setText(self.fixedTypeText + data[3])
self.episodes.setText(self.fixedEpisodesText + data[4])
self.rating.setText(self.fixedRatingText + str(data[5]))
global user_ratings
try:
self.rateComboBox.setCurrentIndex(
11 - user_ratings[self.currentAnimeId])
except Exception as e:
self.rateComboBox.setCurrentIndex(0)
print(e)
class RatingWidget(QWidget):
def __init__(self, model, parent=None):
super(RatingWidget, self).__init__(parent)
self.model = model
self.setupUi()
self.searchGroupBox.listView.selectionModel().currentChanged.connect(
self.selectItem)
self.searchGroupBox.listView.selectionModel().currentChanged.emit(
self.model.index(0, 0), self.model.index(0, 0))
def setupUi(self):
self.searchGroupBox = SearchGroupBox(self.model)
self.cardGroupBox = CardGroupBox()
layout = QHBoxLayout()
layout.addWidget(self.searchGroupBox)
layout.addWidget(self.cardGroupBox)
self.setLayout(layout)
def selectItem(self, index):
data = index.data(Qt.UserRole)
self.cardGroupBox.setData(data)
class RecommendationWidget(QWidget):
def __init__(self, parent=None):
super(RecommendationWidget, self).__init__(parent)
self.setupUi()
self.metricsComboBox.addItems([
constants.PEARSON,
constants.EUCLIDEAN,
constants.COSINE,
constants.MANHATTAN
])
self.knnSpinBox.setRange(2, 10)
self.knnSpinBox.setValue(3)
self.recommendPushButton.clicked.connect(self.recommend)
def recommend(self):
global recommender, user_ratings
recommender.new_user = user_ratings
print(recommender.new_user)
metric = self.metricsComboBox.currentText()
k = self.knnSpinBox.value()
recommendations = recommender.get_recommendation(metric, k)
recommendations = [r[1] for r in recommendations[:10]]
global animes
df = animes[animes['name'].isin(recommendations)]
self.ratingWidget.searchGroupBox.proxyModel.setSourceModel(PandasModel(df))
def setupUi(self):
df = pd.read_csv('anime.csv').sample(5)
self.metricsLabel = QLabel("Select the distance metric: ")
self.metricsComboBox = QComboBox()
self.knnLabel = QLabel("Select the number of neighboors: ")
self.knnSpinBox = QSpinBox()
self.recommendPushButton = QPushButton("Get your recommendation!")
recommendation_layout = QHBoxLayout()
recommendation_layout.addWidget(self.metricsLabel)
recommendation_layout.addWidget(self.metricsComboBox)
recommendation_layout.addWidget(self.knnLabel)
recommendation_layout.addWidget(self.knnSpinBox)
recommendation_layout.addWidget(self.recommendPushButton)
self.ratingWidget = RatingWidget(PandasModel(df))
layout = QVBoxLayout()
layout.addLayout(recommendation_layout)
layout.addWidget(self.ratingWidget)
self.setLayout(layout)
class MainWindow(QMainWindow):
def __init__(self, df, parent=None):
super(MainWindow, self).__init__(parent)
self.df = df
self.setupUi()
def setupUi(self):
self.setFixedHeight(600)
self.ratingWidget = RatingWidget(PandasModel(self.df))
self.recommendationWidget = RecommendationWidget()
self.tabWidget = QTabWidget()
self.tabWidget.addTab(self.ratingWidget, "Animes")
self.tabWidget.addTab(self.recommendationWidget, "Recommendations")
self.setCentralWidget(self.tabWidget)
def main():
df = pd.read_csv('anime.csv').head(100)
app = QApplication(sys.argv)
app.setApplicationName("Anime Recommender")
mainWindow = MainWindow(df)
mainWindow.show()
app.exec_()
if __name__ == '__main__':
main()
|
# Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import json
from collections import defaultdict
from dataclasses import dataclass
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import InterpreterConstraintsField
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex_environment import PythonExecutable
from pants.base.specs import FileGlobSpec, RawSpecs
from pants.engine.fs import AddPrefix, CreateDigest, Digest, FileContent, MergeDigests
from pants.engine.internals.selectors import Get, MultiGet
from pants.engine.process import Process, ProcessResult
from pants.engine.rules import collect_rules, rule
from pants.engine.target import (
HydratedSources,
HydrateSourcesRequest,
SourcesField,
Target,
Targets,
)
from pants.util.frozendict import FrozenDict
from pants.util.resources import read_resource
@dataclass(frozen=True)
class DjangoApps:
label_to_name: FrozenDict[str, str]
def add_from_json(self, json_bytes: bytes) -> "DjangoApps":
apps = dict(self.label_to_name, **json.loads(json_bytes.decode()))
return DjangoApps(FrozenDict(sorted(apps.items())))
_script_resource = "scripts/app_detector.py"
@rule
async def detect_django_apps(python_setup: PythonSetup) -> DjangoApps:
# A Django app has a "name" - the full import path to the app ("path.to.myapp"),
# and a "label" - a short name, usually the last segment of the import path ("myapp").
#
# An app provides this information via a subclass of AppConfig, living in a
# file named apps.py. Django loads this information into an app registry at runtime.
#
# Some parts of Django, notably migrations, use the label to reference apps. So to do custom
# Django dep inference, we need to know the label -> name mapping.
#
# The only truly correct way to enumerate Django apps is to run the Django app registry code.
# However we can't do this until after dep inference has completed, and even then it would be
# complicated: we wouldn't know which settings.py to use, or whether it's safe to run Django
# against that settings.py. Instead, we do this statically via parsing the apps.py file.
#
# NB: Legacy Django apps may not have an apps.py, in which case the label is assumed to be
# the name of the app dir, but the recommendation for many years has been to have it, and
# the Django startapp tool creates it for you. If an app does not have such an apps.py,
# then we won't be able to infer deps on that app unless we find other ways of detecting it.
# We should only do that if that case turns out to be common, and for some reason users can't
# simply create an apps.py to fix the issue.
#
# NB: Right now we only detect first-party apps in repo. We assume that third-party apps will
# be dep-inferred as a whole via the full package path in settings.py anyway.
# In the future we may find a way to map third-party apps here as well.
django_apps = DjangoApps(FrozenDict())
targets = await Get(
Targets,
RawSpecs,
RawSpecs.create(
specs=[FileGlobSpec("**/apps.py")], description_of_origin="Django app detection"
),
)
if not targets:
return django_apps
script_file_content = FileContent(
"script/__visitor.py", read_resource(__name__, _script_resource)
)
script_digest = await Get(Digest, CreateDigest([script_file_content]))
apps_sandbox_prefix = "_apps_to_detect"
# Partition by ICs, so we can run the detector on the appropriate interpreter.
ics_to_tgts: dict[InterpreterConstraints, list[Target]] = defaultdict(list)
for tgt in targets:
ics = InterpreterConstraints(
tgt[InterpreterConstraintsField].value_or_global_default(python_setup)
)
ics_to_tgts[ics].append(tgt)
for ics, tgts in ics_to_tgts.items():
sources = await MultiGet( # noqa: PNT30: requires triage
[Get(HydratedSources, HydrateSourcesRequest(tgt[SourcesField])) for tgt in tgts]
)
apps_digest = await Get( # noqa: PNT30: requires triage
Digest, MergeDigests([src.snapshot.digest for src in sources])
)
prefixed_apps_digest = await Get( # noqa: PNT30: requires triage
Digest, AddPrefix(apps_digest, apps_sandbox_prefix)
)
input_digest = await Get( # noqa: PNT30: requires triage
Digest, MergeDigests([prefixed_apps_digest, script_digest])
)
python_interpreter = await Get( # noqa: PNT30: requires triage
PythonExecutable, InterpreterConstraints, ics
)
process_result = await Get( # noqa: PNT30: requires triage
ProcessResult,
Process(
argv=[
python_interpreter.path,
script_file_content.path,
apps_sandbox_prefix,
],
input_digest=input_digest,
description="Detect Django apps",
),
)
django_apps = django_apps.add_from_json(process_result.stdout or b"{}")
return django_apps
def rules():
return [
*collect_rules(),
]
|
from django.conf.urls import include
from django.conf.urls import url
from blog.views import *
from rest_framework.urlpatterns import format_suffix_patterns
from blog.myviews import *
from rest_framework.routers import DefaultRouter
from django.conf import settings
from blog.upload import upload_image
urlpatterns = [
url(r'^archive/$',archive, name='archive'),
url(r'^blog/$',index, name='index'),
url(r'^article/$',article,name='article'),
url(r'^comment_post/$',comment_post,name='comment_post'),
url(r'^login/$',do_login,name='login'),
url(r'^logout/$',do_logout,name='logout'),
url(r'^reg/$',do_reg,name='reg'),
url(r'^tag_article/$',tag_to_article,name='tag_article'),
url(r'^admin/upload/(?P<dir_name>[^/]+)$',upload_image,name='upload_image'),
url(r'^reply/$',reply,name='reply'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
#序列化
router = DefaultRouter()
router.register(r'articles', ArticleViewSet)
router.register(r'users', UserViewSet)
router.register(r'comments',CommentViewSet)
router.register(r'tags',TagViewSet)
router.register(r'Categorys',CategoryViewSet)
urlpatterns += [
url(r'^api-auth/', include('rest_framework.urls',namespace='rest_framework')),
url(r'^api/',include(router.urls)),
url(r'^Articles/',ArticleList.as_view()),
]
if settings.DEBUG:
urlpatterns += [
url(r'^uploads/(?P<path>.*)$',\
'django.views.static.serve',\
{'document_root':settings.MEDIA_ROOT,}),
]
'''
article_list = ArticleViewSet.as_view({
'get': 'list',
'post': 'create'
})
article_detail = ArticleViewSet.as_view({
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
})
user_list = UserViewSet.as_view({
'get': 'list'
})
user_detail = UserViewSet.as_view({
'get': 'retrieve'
})
url(r'^articles/$',articlelist.as_view(),name='article-list'),
url(r'^articles/(?P<pk>(\d+))/$',articledetail.as_view(),name='article-detail'),
url(r'^users/$',UserList.as_view(),name='user-list'),
url(r'^users/(?P<pk>(\d+))/$',UserDetail.as_view(),name='user-detail'),
'''
|
import requests
import json
import csv
from time import sleep
url = "https://www.mcdonalds.com.cn/ajaxs/search_by_point"
headers = {
'Connection': 'Keep-Alive',
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.8',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36',
'Accept-Encoding': 'gzip, deflate,br'
}
latlon = []
name = []
address = []
csv_file = csv.reader(open('mdl-latlonlist.csv', 'r', encoding='utf-8'))
for stu in csv_file:
latlon.append(stu)
f = open('mdl171124.csv', "w", encoding="utf-8")
try:
for lg in range(len(latlon)):
sleep(5)
response = requests.post(url, data={'point': latlon[lg]}, verify=False)
sleep(5)
print(response.text)
data = json.loads(response.text)
print(data)
s = data['datas']
print(s)
print(type(s))
print(len(s))
if s != 0:
for i in s:
f.write('sellatlon')
f.write(',')
for k, v in i.items():
f.write(k)
f.write(',')
print(k)
print(v)
f.write('\n')
break
for i in s:
sellatlon = str(latlon[lg]).replace(',', ',')
f.write(sellatlon)
f.write(',')
for k, v in i.items():
v = str(v).replace(',', ',')
f.write(v)
f.write(',')
f.write('\n')
f.close()
except:
print('error')
|
# Generated by Django 2.1.4 on 2018-12-13 01:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('UserProfile', '0002_auto_20181212_1731'),
]
operations = [
migrations.AlterModelTable(
name='profile',
table='UserProfile',
),
]
|
class ResourceNotFound(Exception):
description: str = "Occurs when a customer with a specific Id does not exist"
def __str__(self):
return "The requested resource not found"
|
import torch
import logging
import os
import io
import array
import six
from tqdm import tqdm
from torchtext.vocab import Vectors
logger = logging.getLogger("data")
class Crosslingual(Vectors):
def __init__(self, name, language='en', **kwargs):
self.name = name
self.language = language
super(Crosslingual, self).__init__(self.name, **kwargs)
def cache(self, name, cache=None, url=None, language='en', **kwargs):
path = name
if not os.path.isfile(path):
raise RuntimeError('no vectors found at {}'.format(name))
# str call is necessary for Python 2/3 compatibility, since
# argument must be Python 2 str (Python 3 bytes) or
# Python 3 str (Python 2 unicode)
itos, vectors, dim = [], array.array(str('d')), None
# Try to read the whole file with utf-8 encoding.
binary_lines = False
try:
with io.open(path, encoding="utf8") as f:
lines = [line for line in f]
# If there are malformed lines, read in binary mode
# and manually decode each word from utf-8
except BaseException:
logger.warning("Could not read {} as UTF8 file, "
"reading file as bytes and skipping "
"words with malformed UTF8.".format(path))
with open(path, 'rb') as f:
lines = [line for line in f]
binary_lines = True
logger.info("Loading vectors from {}".format(path))
lines = filter(lambda x: x.split(':')[0] == self.language, lines)
for line in tqdm(lines, unit_scale=True, miniters=1, desc=name):
# Explicitly splitting on " " is important, so we don't
# get rid of Unicode non-breaking spaces in the vectors.
entries = line.rstrip().split(b" " if binary_lines else " ")
word, entries = entries[0].split(':')[-1], entries[1:]
if dim is None and len(entries) > 1:
dim = len(entries)
elif len(entries) == 1:
logger.warning(
"Skipping token {} with 1-dimensional "
"vector {}; likely a header".format(
word, entries))
continue
elif dim != len(entries):
raise RuntimeError(
"Vector for token {} has {} dimensions, but previously "
"read vectors have {} dimensions. All vectors must have "
"the same number of dimensions.".format(
word, len(entries), dim))
if binary_lines:
try:
if isinstance(word, six.binary_type):
word = word.decode('utf-8')
except BaseException:
logger.info(
"Skipping non-UTF8 token {}".format(repr(word)))
continue
vectors.extend(float(x) for x in entries)
itos.append(word)
self.itos = itos
self.stoi = {word: i for i, word in enumerate(itos)}
self.vectors = torch.Tensor(vectors).view(-1, dim)
self.dim = dim
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 24 15:58:02 2017
@author: dgratz
"""
import numpy as np
from glob import glob
from readFile import readFile
import re
from ParameterSensitivity import ParamSensetivity
import matplotlib.pyplot as plt
from calcSync import calcTimeSync, calcSyncVarLen
cons = np.logspace(-4,-1,10)*3.87138
pvars = list(readFile('D:/synchrony-data/2SAN1RandLogNormal/0/cell_0_0_dss0_pvars.tsv').keys())
datadir = 'D:/synchrony-data/600Sims10Conns1Rand/'
filesPvars = glob(datadir+'*/*_pvars.tsv')
numData = 616
pvarsVals = [np.zeros((2,numData,len(pvars))) for con in range(len(cons))]
s = re.compile('/')
u = re.compile('_')
for file in filesPvars:
temp = readFile(file)
fnames = s.split(file)
uparts = u.split(fnames[-1])
(row,col) = tuple(map(lambda x: int(x),filter(lambda x: x.isdigit(),uparts)))
num = int(fnames[-2])
for i,pvar in enumerate(pvars):
pvarsVals[num//numData][col,num%numData,i] = temp[pvar]
props = ['vOld/peak','vOld/cl','vOld/min','caI/peak','caI/min',
'vOld/ddr']
filesProps = glob(datadir+'*/*dss0.tsv')
propsVals = [np.zeros((2,numData,len(props))) for con in range(len(cons))]
for file in filesProps:
temp = readFile(file)
fnames = s.split(file)
uparts = u.split(fnames[-1])
(row,col) = tuple(map(lambda x: int(x),filter(lambda x: x.isdigit(),uparts)))
num = int(fnames[-2])
for i,prop in enumerate(props):
propsVals[num//numData][col,num%numData,i] = temp['cell'+str(row)+'_'+str(col)+'/'+prop]
'''
Synchrony Params Sens
'''
filesProps = glob(datadir+'*/*dt0.tsv')
dtPropsVals = [np.zeros((1,2,numData,2),dtype='object') for con in range(len(cons))]
bad = [set() for con in range(len(cons))]
for file in filesProps:
temp = readFile(file)
fnames = s.split(file)
uparts = u.split(fnames[-1])
(row,col) = tuple(map(lambda x: int(x),filter(lambda x: x.isdigit(),uparts)))
num = int(fnames[-2])
dtPropsVals[num//numData][0,col,num%numData,0] = temp['cell'+str(row)+'_'+str(col)+'/vOld/peak']
dtPropsVals[num//numData][0,col,num%numData,1] = temp['cell'+str(row)+'_'+str(col)+'/vOld/maxt']
cls = temp['cell'+str(row)+'_'+str(col)+'/vOld/cl']
if np.std(cls[-10:-1]) > 100:
bad[num//numData].add(num%numData)
propsValsSync = [np.zeros((numData,2)) for con in range(len(cons))]
for i in range(numData):
for co in range(10):
times,syncT,syncV = calcSyncVarLen(dtPropsVals[co][:,:,i,0],dtPropsVals[co][:,:,i,1])
propsValsSync[co][i,0] = np.nanmean(syncT[-30:len(syncT)])
if np.isnan(propsValsSync[co][i,0]):
bad[co].add(i)
propsValsSync[co][i,1] = np.nanmean(syncV[-30:len(syncV)])
# if propsValsSync[i,0] > 50:
# bad.add(i)
syncs = ['syncT','syncV']
bad = list(bad)
for co in range(len(bad)):
pvarsVals[co] = np.delete(pvarsVals[co],list(bad[co]),axis=1)
propsValsSync[co] = np.delete(propsValsSync[co],list(bad[co]),axis=0)
propsVals[co] = np.delete(propsVals[co],list(bad[co]),axis=1)
coefs = np.zeros(shape=(10,len(pvars),len(props)))
for i in range(10):
coefs[i,:,:] = ParamSensetivity(pvarsVals[i][1,:,:], propsVals[i][1,:,:])
for pr in range(coefs.shape[2]):
plt.figure()
plt.xscale('log')
for pv in range(coefs.shape[1]):
plt.scatter(cons,coefs[:,pv,pr],label=pvars[pv])
plt.title(props[pr])
plt.legend()
syncCoefs = np.zeros(shape=(10,len(pvars),len(syncs)))
for co in range(10):
syncCoefs[co,:,:] = ParamSensetivity(pvarsVals[co][1,:,:],propsValsSync[co][:,:])
for sy in range(syncCoefs.shape[2]):
plt.figure()
plt.xscale('log')
for pv in range(syncCoefs.shape[1]):
plt.scatter(cons,syncCoefs[:,pv,sy],label=pvars[pv])
plt.title(syncs[sy])
plt.legend()
|
from itertools import permutations
skup = set()
for it in permutations('123456789', 9):
for i in xrange(1, 4):
for j in xrange(3,5):
a, b, c = int(''.join(it[0:i])), int(''.join(it[i:i+j])), int(''.join(it[i+j:]))
if a*b == c: skup.add(a*b)
print sum(skup)
|
from .BaseEditor import BaseEditor
from .ScrubSpinBox import IntScrubSpinBox, MinVal, MaxVal
from PyQt5 import QtCore
class IntegerEditor(BaseEditor):
def __init__(self, parent, item, model):
BaseEditor.__init__(self, parent, item, model)
self.spinBox = IntScrubSpinBox(self)
self.spinBox.setRange(MinVal, MaxVal)
self.spinBox.valueChanged.connect(self.__valueChanged)
self.layout().addWidget(self.spinBox)
def __valueChanged(self, val):
self.setModelData(self.model, self.item.index())
def setEditorData(self, index):
self.spinBox.blockSignals(True)
self.spinBox.setValue(int(self.getItemData()))
self.spinBox.blockSignals(False)
def setModelData(self, model, index):
model.setData(index, str(self.spinBox.value()), QtCore.Qt.EditRole)
|
import grpc
from grpcService import data_pb2, data_pb2_grpc
def run():
with grpc.insecure_channel('127.0.0.1:5000') as channel:
stub = data_pb2_grpc.LearnBoarStub(channel)
response_agent = stub.CreateAgent(data_pb2.AgentData(
env_shape = int(4),
num_actions = int(5)
))
for i in range(20):
response_action = stub.SendData(data_pb2.EnvData(
isAlive = True,
food_x = 0.0,
food_z = 0.0,
hp = 100.0,
satiety = 36.0,
reward = 1.
))
print(i)
if __name__ == "__main__":
run()
|
# Generated by Django 2.1.3 on 2019-02-25 19:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0010_variation_barcode'),
]
operations = [
migrations.AlterField(
model_name='variation',
name='image',
field=models.CharField(blank=True, default='', max_length=400),
),
migrations.AlterField(
model_name='variation',
name='price',
field=models.CharField(blank=True, default='', max_length=400),
),
]
|
#coding:utf8
import pylab as P
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def carrega_dados(file_name):
with open(file_name) as f:
nomes = f.readline().strip().split(',')
dados = np.genfromtxt(file_name, delimiter=',', skip_header=1)
print nomes, dados.size
if __name__ == "__main__":
#carrega_dados("comida.csv")
df = pd.read_csv('comida.csv')
ax = df.plot(legend=False)
patches, labels = ax.get_legend_handles_labels()
ax.legend(patches, labels, loc='right')
plt.show()
|
from flask import Flask, render_template,request,abort
app = Flask(__name__)
@app.route('/',methods=["GET"])
def inicio():
datos=[
{"valor":1,"texto":"Windows"},
{"valor":2,"texto":"Linux"},
{"valor":3,"texto":"MacOs"}
]
seleccionado="Linux"
return render_template("inicio.html",datos=datos,seleccionado=seleccionado)
@app.route('/procesar',methods=["POST"])
def procesar():
return render_template("datos.html", datos=request.form)
app.run(debug=True)
|
from math import ceil
print('Loja de tintas\n')
area_a_ser_pintada = float(input('Informe o tamanho em metros quadrados da área a ser pintada: '))
um_litro_pinta = 3
quantidade_de_uma_lata = 18
preco_de_cada_lata = 80.00
litros_necessarios = area_a_ser_pintada / um_litro_pinta
latas_necessarias = int(ceil(litros_necessarios / quantidade_de_uma_lata))
valor_total = latas_necessarias * preco_de_cada_lata
print('Você vai precisar de {} latas de tinta a serem compradas'.format(latas_necessarias))
print('Preço total: R$ {:.2f}'.format(valor_total))
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 4 15:40:18 2017
@author: zx621293
"""
########################using t-SNE##################################
def myTSNE(X,label_refine,label,perplexity):
n = X.shape[0]
if len(label_refine) != n:
label_refine = [0]*n
label = ['no ground truth']
print('No ground truth provided in this dataset')
(n_sample,n_protein) = X.shape
l = len(perplexity)
k = len(label)
fig = plt.figure(figsize=(30,(5*l)))
plt.suptitle("t-SNE with accepted %i experiments, each with %i covariates. \nClasses: %s "
% (X.shape[0],X.shape[1],label), fontsize=24)
number = np.linspace(1,l,l)
YY = list()
for i,perp in zip(number,perplexity):
t0 = time()
tsne = manifold.TSNE(n_components=2, init='pca', random_state=0, perplexity=perp)
Y = tsne.fit_transform(X)
YY.append(Y)
t1 = time()
print("t-SNE with perpexity %2.f: %.2g sec" % (perp,t1 - t0))
fig.add_subplot(l/2,2,i)
for j,lab in zip(np.linspace(0,k-1,k),label):
plt.scatter(Y[label_refine==j, 0], Y[label_refine==j, 1],cmap=plt.cm.Spectral,label=lab)
plt.title("t-SNE with perpexity %.2f (%.2g sec)" % (perp,t1 - t0))
plt.legend(loc=4)
plt.axis()
plt.show()
return YY; #principal component matrix
|
# -*- coding: utf-8 -*-
"""
ytelapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
class AudioDirectionEnum(object):
"""Implementation of the 'AudioDirection' enum.
The direction the audio effect should be placed on. If IN, the effects
will occur on the incoming audio stream. If OUT, the effects will occur on
the outgoing audio stream.
Attributes:
IN: TODO: type description here.
OUT: TODO: type description here.
"""
ENUM_IN = 'in'
OUT = 'out'
|
import sys
import numpy as np
a=[]
with open(sys.argv[1], 'r') as f:
for line in f:
cols = line.split()
if len(cols) == 4 or len(cols) == 3:
a.append(float(cols[2]))
a=np.asarray(a)
a*=1e-3 # convert to milliseconds
print(f'avg={np.average(a):.4f} max={np.max(a):.4f} min={np.min(a):.4f} std={np.std(a):.4f}')
|
from resource import getrusage,RUSAGE_SELF
def array_test():
test = {}
test['1'] = ['A']
#test['1'] = 'A'
test['2'] = 'B'
test['1'].append('C')
print (test)
print (getrusage(RUSAGE_SELF).ru_maxrss)
if __name__ == "__main__":
import sys
array_test()
|
from router_solver import *
import compilador.objects.symbol
from compilador.objects.symbol import *
# CLASE MEMORY SEGMENT
# Objeto que objetos en un segmento de memoria unico
class MemorySegment(object):
####################### INITS #######################
def __init__(self, name, size, initial_position):
self.name = name # Nombre de la instancia de memoria
self.size = size # tamaño de la instancia de memoria
self.__memory = dict() # Dicionario de memoria con simbolos
self.__memory_values = dict() # Dicionario de memoria con valores reales
self.__subsegment_size = size // 7 # Tamaño de cada sub segmento por tipo
self.initial_position = initial_position # Dirección inicial global
self.ints = 0 # dirección inicial de INTs
self.flts = self.__subsegment_size # Dirección inicial de FLTs
self.strs = self.__subsegment_size * 2 # Dirección inical de STRs
self.chars = self.__subsegment_size * 3 # Dirección inical de CHARs
self.bools = self.__subsegment_size * 4 # Dirección inicial de BOOLs
self.nulls = self.__subsegment_size * 5 # Dirección inicial de NULLs
self.frogs = self.__subsegment_size * 6 # Dirección inicial de FROG
self.spare_memory_ints = (
self.__subsegment_size
) # Tamaño de memoria segmento INT
self.spare_memory_flts = (
self.__subsegment_size
) # Tamaño de memoria segmento FLT
self.spare_memory_strs = (
self.__subsegment_size
) # Tamaño de memoria segmento STR
self.spare_memory_chars = (
self.__subsegment_size
) # Tamaño de memoria segmento CHAR
self.spare_memory_bools = (
self.__subsegment_size
) # Tamaño de memoria segmento BOOL
self.spare_memory_nulls = (
self.__subsegment_size
) # Tamaño de memoria segmento NULL
self.spare_memory_frogs = (
self.__subsegment_size
) # Tamaño de memoria segmento FROG
####################### SETS #######################
# Validaciones antes de asignar dirección de memoria al simbolo
def insert_symbol(self, symbol):
s_type = symbol.type
# Genera dirección inicial de subsegmento
initial_position = self.__get_memory_inital_direction(s_type)
# Genera dirección en segmento de memoria
symbol_position = self.__get_symbol_position(s_type)
# Genera tamaño que tomara en memoria
s_size = symbol.memory_size()
# Valida que la memoria que necesita no exceda la disponible
if symbol_position + s_size - 1 < initial_position + self.__subsegment_size:
# Asigna y el simbolo a memoria
self.__assign_memory(symbol, symbol_position)
# Resta memoria que necesita de la memoria disponible
self.__substract_memory(symbol)
return True
print("ERROR: Memory exceded for in " + self.name + " for type" + s_type)
sys.exit()
# Asigna memoria a una variable
def __assign_memory(self, symbol, symbol_position):
if type(symbol) == Symbol:
# Asigna dirección de segmento local
symbol.segment_direction = symbol_position
# Asigna dirección global
symbol.global_direction = self.initial_position + symbol_position
# Guarda simbolo en memoria
self.__memory[symbol_position] = symbol
# Guarda valor en memoria
self.__memory_values[symbol_position] = symbol.value
# Si es constante o tipo FROG su valor es su nombre
if self.name == "Constant Segment" or symbol.type == "FROG":
symbol.value = symbol.name
self.__memory_values[symbol_position] = symbol.name
else:
pass
# Resta tamaño de simbolo a memoria disponible del subsegmento del tipo
def __substract_memory(self, symbol):
s_type = symbol.type
s_size = symbol.memory_size()
if s_type == "INT":
self.spare_memory_ints -= s_size
elif s_type == "FLT":
self.spare_memory_flts -= s_size
elif s_type == "STR":
self.spare_memory_strs -= s_size
elif s_type == "CHAR":
self.spare_memory_chars -= s_size
elif s_type == "BOOL":
self.spare_memory_bools -= s_size
elif s_type == "NULL":
self.spare_memory_nulls -= s_size
elif s_type == "FROG":
self.spare_memory_frogs -= s_size
####################### GETS #######################
# Regresa la dirección inicial del sub segmento por tipo
def __get_memory_inital_direction(self, s_type):
type_inital_position = {
"INT": self.ints,
"FLT": self.flts,
"STR": self.strs,
"CHAR": self.chars,
"BOOL": self.bools,
"NULL": self.nulls,
"FROG": self.frogs,
}
return type_inital_position[s_type]
# Regresa el tamaño restante de memoria en el subsegmento por tipo
def __get_spare_memory(self, s_type):
left_memory = {
"INT": self.spare_memory_ints,
"FLT": self.spare_memory_flts,
"STR": self.spare_memory_strs,
"CHAR": self.spare_memory_chars,
"BOOL": self.spare_memory_bools,
"NULL": self.spare_memory_nulls,
"FROG": self.spare_memory_frogs,
}
return left_memory[s_type]
# Regresa la dirección local a la cual se asignara un simbolo dependiendo de su tipo
def __get_symbol_position(self, s_type):
return (
self.__subsegment_size
- self.__get_spare_memory(s_type)
+ self.__get_memory_inital_direction(s_type)
)
####################### SEARCH #######################
# Regresa simbolo en una dirección
def search_symbol(self, direction):
direction = direction - self.initial_position
return self.__memory.get(direction, None)
# Regresa valor en una dirección
def search_value(self, direction):
direction = direction - self.initial_position
return self.__memory_values.get(direction, None)
####################### MODIFY #######################
# Modifica el valor en una dirección
def modify_value(self, direction, value):
direction = direction - self.initial_position
self.__memory_values[direction] = value
# Se asigna dirección a simbolo de arreglo[indice] y guarda su valor en memoria
def modify_address(self, symbol, address):
if address not in self.__memory.keys():
self.__assign_memory(symbol, address)
####################### SAVE MEMORY #######################
# Regresa un diccionario temporal con el valor en cada dirección
def save_local_memory(self):
local_data = {}
for space in self.__memory:
local_data[space] = self.__memory[space].value
return local_data
# Borra la dirección del simbolo y borra el valor
def erase_local_memory(self):
for space in self.__memory:
self.__memory[space].segment_direction = None
self.__memory[space].global_direction = None
self.__memory_values[space] = None
# Regresa la memoria a lo que se tenía cuando se congelo
def backtrack_memory(self, frozen_memory):
for k, v in frozen_memory.items():
self.__memory[k].value = v
self.__memory[k].segment_direction = k
self.__memory[k].global_direction = k + self.initial_position
####################### PRINTS #######################
# Imprime segmento de memoria
def print_memory_segment(self):
print("##### MEMORY ", self.name, " ##########")
for space in self.__memory:
self.__memory[space].print_symbol()
print("SAVED_VALUE:", self.__memory_values[space])
print("................")
|
import xgboost as xgb
import sys
import os
import numpy as np
svmFile = sys.argv[1]
pairGenesFile = sys.argv[2]
modelNam = sys.argv[3]
treesCount = int(sys.argv[4])
pairsOfGenesArr = []
f = open(pairGenesFile,"r")
for line in f:
line = line.strip('\n')
pairsOfGenesArr.append(line)
dval = xgb.DMatrix(svmFile)
bst = xgb.Booster({'nthread':10}) #init model
bst.load_model(modelNam)
ypred = bst.predict(dval,ntree_limit=treesCount)
i = 0
for val in ypred:
genes = pairsOfGenesArr[i]
print(genes+'\t'+str(val))
i=i+1
f.close()
|
import json
import logging
from threading import Thread
def flatten(api_dictionary, separator='.'):
"""
Flatten nested API dictionary. If merge keys of each nested level with given
separator - default is '.'
:param api_dictionary: nested api dictionary object with handlers
:param separator: string with which dictionary keys will be merges, default is '.'
:return: flattened dictionary with only one level of nest
"""
for key, value in list(api_dictionary.items()):
if type(value) == dict:
flatten(value)
api_dictionary.pop(key)
for key_2, value_2 in value.items():
api_dictionary[key + separator + key_2] = value_2
return api_dictionary
class Stream:
def __init__(self, igor_server, client, stream_id, on_close_callback):
self.client = client
self.__igor_server = igor_server
self.__on_close_callback = on_close_callback
self.stream_id = stream_id
self.closed = False
def send(self, data=None):
if self.closed:
raise Exception('Cannot write to closed stream')
serialized_message = json.dumps({
'streamId': self.stream_id,
'data': data
})
self.__igor_server.server.send_message(self.client, serialized_message)
def send_error(self, error):
if self.closed:
raise Exception('Cannot write to closed stream')
serialized_message = json.dumps({
'streamId': self.stream_id,
'error': str(error)
})
self.__igor_server.server.send_message(self.client, serialized_message)
def close(self):
if self.closed:
raise Exception('Stream already closed')
self.closed = True
self.__igor_server.server.send_message(self.client, json.dumps({
'streamId': self.stream_id,
'close': True
}))
self.__on_close_callback(self.__igor_server, self.stream_id)
class ProcessOutput:
"""
Output stream class for processes
"""
def __init__(self, process_id, igor_server, on_finish_callback):
self.__igor_server = igor_server
self.process_id = process_id
self.on_message_received = lambda *args: None
self.__on_finish_callback = on_finish_callback
self.closed = False
def send(self, action, data=None, client_id=None):
if self.closed:
raise Exception('Cannot write to closed output')
serialized_message = json.dumps({
'streamId': self.process_id,
'action': action,
'data': data
})
if client_id == None:
self.__igor_server.server.send_message_to_all(serialized_message)
else:
client = self.__igor_server.clients.get(client_id, None)
if client == None:
raise Exception('No client with given id exist. Id= "' + client_id + '"')
else:
self.__igor_server.server.send_message(client, serialized_message)
def send_error(self, error, client_id=None):
if self.closed:
raise Exception('Cannot write to closed output')
serialized_message = json.dumps({
'streamId': self.process_id,
'error': str(error)
})
if client_id is None:
self.__igor_server.server.send_message_to_all(serialized_message)
else:
client = self.__igor_server.clients.get(client_id, None)
if client is None:
raise Exception('No client with given id exist. Id= "' + client_id + '"')
else:
self.__igor_server.server.send_message(client, serialized_message)
def finish(self):
if self.closed:
raise Exception('Output already closed')
self.closed = True
self.__igor_server.server.send_message_to_all(json.dumps({
'streamId': self.process_id,
'close': True
}))
self.__on_finish_callback(self.__igor_server, self.process_id)
async def handler_wrapper(handler_function, stream, data, session, scope):
"""
Wrapper around handler function. Automatically closes stream and handles errors
"""
try:
handler_function(stream, data, session=session, scope=scope)
stream.close()
except SystemExit as system_exit:
# System exit exception should be passed heigher
stream.close()
raise system_exit
except Exception as error:
logging.error(error)
stream.send_error(str(error))
stream.close()
raise error
class IgorProcess(Thread):
"""
Base class for all proceses
"""
scope = None
process_id = None
output = None
def __init__(self):
Thread.__init__(self)
|
"""
"""
import logging
import pickle
import numpy as np
import quantities as pq
try:
import h5py
except ImportError as err:
HAVE_H5PY = False
else:
HAVE_H5PY = True
from neo.core import (objectlist, Block, Segment, AnalogSignal, SpikeTrain,
Epoch, Event, IrregularlySampledSignal, ChannelIndex,
Unit)
from neo.io.baseio import BaseIO
from neo.core.baseneo import MergeError
logger = logging.getLogger('Neo')
def disjoint_groups(groups):
"""`groups` should be a list of sets"""
groups = groups[:] # copy, so as not to change original
for group1 in groups:
for group2 in groups:
if group1 != group2:
if group2.issubset(group1):
groups.remove(group2)
elif group1.issubset(group2):
groups.remove(group1)
return groups
class NeoHdf5IO(BaseIO):
"""
Class for reading HDF5 format files created by Neo version 0.4 or earlier.
Writing to HDF5 is not supported by this IO; we recommend using NixIO for this.
"""
supported_objects = objectlist
readable_objects = objectlist
name = 'NeoHdf5 IO'
extensions = ['h5']
mode = 'file'
is_readable = True
is_writable = False
def __init__(self, filename):
if not HAVE_H5PY:
raise ImportError("h5py is not available")
BaseIO.__init__(self, filename=filename)
self._data = h5py.File(filename, 'r')
self.object_refs = {}
def read_all_blocks(self, lazy=False, merge_singles=True, **kargs):
"""
Loads all blocks in the file that are attached to the root (which
happens when they are saved with save() or write_block()).
If `merge_singles` is True, then the IO will attempt to merge single channel
`AnalogSignal` objects into multichannel objects, and similarly for single `Epoch`,
`Event` and `IrregularlySampledSignal` objects.
"""
assert not lazy, 'Do not support lazy'
self.merge_singles = merge_singles
blocks = []
for name, node in self._data.items():
if "Block" in name:
blocks.append(self._read_block(node))
return blocks
def read_block(self, lazy=False, **kargs):
"""
Load the first block in the file.
"""
assert not lazy, 'Do not support lazy'
return self.read_all_blocks(lazy=lazy)[0]
def _read_block(self, node):
attributes = self._get_standard_attributes(node)
if "index" in attributes:
attributes["index"] = int(attributes["index"])
block = Block(**attributes)
for name, child_node in node['segments'].items():
if "Segment" in name:
block.segments.append(self._read_segment(child_node, parent=block))
if len(node['recordingchannelgroups']) > 0:
for name, child_node in node['recordingchannelgroups'].items():
if "RecordingChannelGroup" in name:
block.channel_indexes.append(
self._read_recordingchannelgroup(child_node, parent=block))
self._resolve_channel_indexes(block)
elif self.merge_singles:
# if no RecordingChannelGroups are defined, merging
# takes place here.
for segment in block.segments:
if hasattr(segment, 'unmerged_analogsignals'):
segment.analogsignals.extend(
self._merge_data_objects(segment.unmerged_analogsignals))
del segment.unmerged_analogsignals
if hasattr(segment, 'unmerged_irregularlysampledsignals'):
segment.irregularlysampledsignals.extend(
self._merge_data_objects(segment.unmerged_irregularlysampledsignals))
del segment.unmerged_irregularlysampledsignals
return block
def _read_segment(self, node, parent):
attributes = self._get_standard_attributes(node)
segment = Segment(**attributes)
signals = []
for name, child_node in node['analogsignals'].items():
if "AnalogSignal" in name:
signals.append(self._read_analogsignal(child_node, parent=segment))
if signals and self.merge_singles:
segment.unmerged_analogsignals = signals # signals will be merged later
signals = []
for name, child_node in node['analogsignalarrays'].items():
if "AnalogSignalArray" in name:
signals.append(self._read_analogsignalarray(child_node, parent=segment))
segment.analogsignals = signals
irr_signals = []
for name, child_node in node['irregularlysampledsignals'].items():
if "IrregularlySampledSignal" in name:
irr_signals.append(self._read_irregularlysampledsignal(child_node, parent=segment))
if irr_signals and self.merge_singles:
segment.unmerged_irregularlysampledsignals = irr_signals
irr_signals = []
segment.irregularlysampledsignals = irr_signals
epochs = []
for name, child_node in node['epochs'].items():
if "Epoch" in name:
epochs.append(self._read_epoch(child_node, parent=segment))
if self.merge_singles:
epochs = self._merge_data_objects(epochs)
for name, child_node in node['epocharrays'].items():
if "EpochArray" in name:
epochs.append(self._read_epocharray(child_node, parent=segment))
segment.epochs = epochs
events = []
for name, child_node in node['events'].items():
if "Event" in name:
events.append(self._read_event(child_node, parent=segment))
if self.merge_singles:
events = self._merge_data_objects(events)
for name, child_node in node['eventarrays'].items():
if "EventArray" in name:
events.append(self._read_eventarray(child_node, parent=segment))
segment.events = events
spiketrains = []
for name, child_node in node['spikes'].items():
raise NotImplementedError('Spike objects not yet handled.')
for name, child_node in node['spiketrains'].items():
if "SpikeTrain" in name:
spiketrains.append(self._read_spiketrain(child_node, parent=segment))
segment.spiketrains = spiketrains
segment.block = parent
return segment
def _read_analogsignalarray(self, node, parent):
attributes = self._get_standard_attributes(node)
# todo: handle channel_index
sampling_rate = self._get_quantity(node["sampling_rate"])
t_start = self._get_quantity(node["t_start"])
signal = AnalogSignal(self._get_quantity(node["signal"]),
sampling_rate=sampling_rate, t_start=t_start,
**attributes)
signal.segment = parent
self.object_refs[node.attrs["object_ref"]] = signal
return signal
def _read_analogsignal(self, node, parent):
return self._read_analogsignalarray(node, parent)
def _read_irregularlysampledsignal(self, node, parent):
attributes = self._get_standard_attributes(node)
signal = IrregularlySampledSignal(times=self._get_quantity(node["times"]),
signal=self._get_quantity(node["signal"]),
**attributes)
signal.segment = parent
return signal
def _read_spiketrain(self, node, parent):
attributes = self._get_standard_attributes(node)
t_start = self._get_quantity(node["t_start"])
t_stop = self._get_quantity(node["t_stop"])
# todo: handle sampling_rate, waveforms, left_sweep
spiketrain = SpikeTrain(self._get_quantity(node["times"]),
t_start=t_start, t_stop=t_stop,
**attributes)
spiketrain.segment = parent
self.object_refs[node.attrs["object_ref"]] = spiketrain
return spiketrain
def _read_epocharray(self, node, parent):
attributes = self._get_standard_attributes(node)
times = self._get_quantity(node["times"])
durations = self._get_quantity(node["durations"])
labels = node["labels"].value.astype('U')
epoch = Epoch(times=times, durations=durations, labels=labels, **attributes)
epoch.segment = parent
return epoch
def _read_epoch(self, node, parent):
return self._read_epocharray(node, parent)
def _read_eventarray(self, node, parent):
attributes = self._get_standard_attributes(node)
times = self._get_quantity(node["times"])
labels = node["labels"].value.astype('U')
event = Event(times=times, labels=labels, **attributes)
event.segment = parent
return event
def _read_event(self, node, parent):
return self._read_eventarray(node, parent)
def _read_recordingchannelgroup(self, node, parent):
# todo: handle Units
attributes = self._get_standard_attributes(node)
channel_indexes = node["channel_indexes"].value
channel_names = node["channel_names"].value
if channel_indexes.size:
if len(node['recordingchannels']):
raise MergeError("Cannot handle a RecordingChannelGroup which both has a "
"'channel_indexes' attribute and contains "
"RecordingChannel objects")
raise NotImplementedError("todo") # need to handle node['analogsignalarrays']
else:
channels = []
for name, child_node in node['recordingchannels'].items():
if "RecordingChannel" in name:
channels.append(self._read_recordingchannel(child_node))
channel_index = ChannelIndex(None, **attributes)
channel_index._channels = channels
# construction of the index is deferred until we have processed
# all RecordingChannelGroup nodes
units = []
for name, child_node in node['units'].items():
if "Unit" in name:
units.append(self._read_unit(child_node, parent=channel_index))
channel_index.units = units
channel_index.block = parent
return channel_index
def _read_recordingchannel(self, node):
attributes = self._get_standard_attributes(node)
analogsignals = []
irregsignals = []
for name, child_node in node["analogsignals"].items():
if "AnalogSignal" in name:
obj_ref = child_node.attrs["object_ref"]
analogsignals.append(obj_ref)
for name, child_node in node["irregularlysampledsignals"].items():
if "IrregularlySampledSignal" in name:
obj_ref = child_node.attrs["object_ref"]
irregsignals.append(obj_ref)
return attributes['index'], analogsignals, irregsignals
def _read_unit(self, node, parent):
attributes = self._get_standard_attributes(node)
spiketrains = []
for name, child_node in node["spiketrains"].items():
if "SpikeTrain" in name:
obj_ref = child_node.attrs["object_ref"]
spiketrains.append(self.object_refs[obj_ref])
unit = Unit(**attributes)
unit.channel_index = parent
unit.spiketrains = spiketrains
return unit
def _merge_data_objects(self, objects):
if len(objects) > 1:
merged_objects = [objects.pop(0)]
while objects:
obj = objects.pop(0)
try:
combined_obj_ref = merged_objects[-1].annotations['object_ref']
merged_objects[-1] = merged_objects[-1].merge(obj)
merged_objects[-1].annotations['object_ref'] = combined_obj_ref + \
"-" + obj.annotations[
'object_ref']
except MergeError:
merged_objects.append(obj)
for obj in merged_objects:
self.object_refs[obj.annotations['object_ref']] = obj
return merged_objects
else:
return objects
def _get_quantity(self, node):
value = node.value
unit_str = [x for x in node.attrs.keys() if "unit" in x][0].split("__")[1]
units = getattr(pq, unit_str)
return value * units
def _get_standard_attributes(self, node):
"""Retrieve attributes"""
attributes = {}
for name in ('name', 'description', 'index', 'file_origin', 'object_ref'):
if name in node.attrs:
attributes[name] = node.attrs[name]
for name in ('rec_datetime', 'file_datetime'):
if name in node.attrs:
attributes[name] = pickle.loads(node.attrs[name], encoding='bytes')
annotations = pickle.loads(node.attrs['annotations'], encoding='bytes')
attributes.update(annotations)
# avoid "dictionary changed size during iteration" error
attribute_names = list(attributes.keys())
for name in attribute_names:
if isinstance(attributes[name], (bytes, np.bytes_)):
attributes[name] = attributes[name].decode('utf-8')
if isinstance(name, bytes):
attributes[name.decode('utf-8')] = attributes[name]
attributes.pop(name)
return attributes
def _resolve_channel_indexes(self, block):
def disjoint_channel_indexes(channel_indexes):
channel_indexes = channel_indexes[:]
for ci1 in channel_indexes:
# this works only on analogsignals
signal_group1 = {tuple(x[1]) for x in ci1._channels}
for ci2 in channel_indexes: # need to take irregularly sampled signals
signal_group2 = {tuple(x[1]) for x in ci2._channels} # into account too
if signal_group1 != signal_group2:
if signal_group2.issubset(signal_group1):
channel_indexes.remove(ci2)
elif signal_group1.issubset(signal_group2):
channel_indexes.remove(ci1)
return channel_indexes
principal_indexes = disjoint_channel_indexes(block.channel_indexes)
for ci in principal_indexes:
ids = []
by_segment = {}
for (index, analogsignals, irregsignals) in ci._channels:
# note that what was called "index" in Neo 0.3/0.4 is "id" in Neo 0.5
ids.append(index)
for signal_ref in analogsignals:
signal = self.object_refs[signal_ref]
segment_id = id(signal.segment)
if segment_id in by_segment:
by_segment[segment_id]['analogsignals'].append(signal)
else:
by_segment[segment_id] = {'analogsignals': [signal], 'irregsignals': []}
for signal_ref in irregsignals:
signal = self.object_refs[signal_ref]
segment_id = id(signal.segment)
if segment_id in by_segment:
by_segment[segment_id]['irregsignals'].append(signal)
else:
by_segment[segment_id] = {'analogsignals': [], 'irregsignals': [signal]}
assert len(ids) > 0
if self.merge_singles:
ci.channel_ids = np.array(ids)
ci.index = np.arange(len(ids))
for seg_id, segment_data in by_segment.items():
# get the segment object
segment = None
for seg in ci.block.segments:
if id(seg) == seg_id:
segment = seg
break
assert segment is not None
if segment_data['analogsignals']:
merged_signals = self._merge_data_objects(segment_data['analogsignals'])
assert len(merged_signals) == 1
merged_signals[0].channel_index = ci
merged_signals[0].annotations['object_ref'] = "-".join(
obj.annotations['object_ref']
for obj in segment_data['analogsignals'])
segment.analogsignals.extend(merged_signals)
ci.analogsignals = merged_signals
if segment_data['irregsignals']:
merged_signals = self._merge_data_objects(segment_data['irregsignals'])
assert len(merged_signals) == 1
merged_signals[0].channel_index = ci
merged_signals[0].annotations['object_ref'] = "-".join(
obj.annotations['object_ref']
for obj in segment_data['irregsignals'])
segment.irregularlysampledsignals.extend(merged_signals)
ci.irregularlysampledsignals = merged_signals
else:
raise NotImplementedError() # will need to return multiple ChannelIndexes
# handle non-principal channel indexes
for ci in block.channel_indexes:
if ci not in principal_indexes:
ids = [c[0] for c in ci._channels]
for cipr in principal_indexes:
if ids[0] in cipr.channel_ids:
break
ci.analogsignals = cipr.analogsignals
ci.channel_ids = np.array(ids)
ci.index = np.where(np.in1d(cipr.channel_ids, ci.channel_ids))[0]
|
import requests
from flask import *
from flask_bcrypt import Bcrypt
from flask_login import LoginManager, UserMixin, login_user, current_user, logout_user, login_required
from flask_bootstrap import Bootstrap
from forms import RegistrationForm, LoginForm, ContactForm, HelpForm
import psycopg2
from passwords2 import psql_user, psql_pw, psql_host, psql_port, psql_database, email_auth, email_data
app = Flask(__name__)
app.config["SECRET_KEY"] = "enter-a-hard-to-guess-string"
bcrypt = Bcrypt(app)
bootstrap = Bootstrap(app)
login_manager = LoginManager(app)
login_manager.login_view = "login"
@login_manager.user_loader
def load_user(username):
res = query("SELECT * FROM users WHERE username = '{}'".format(username))
return User(res[0])
class User(UserMixin):
def __init__(self, res_row):
self.id = res_row[0]
self.user_handle = res_row[0]
self.password_hash = res_row[1]
###############################################################################
# Accessible Routes #
###############################################################################
@app.route('/', methods=["GET", "POST"])
def index():
""" Index Page with a Search Bar that allows partial reloading by Jquery. """
search = [1, ""] # default id for search to enable partial Jquery reloading
form = HelpForm()
if form.validate_on_submit():
help_name = form.help_name.data
help_email = form.help_email.data
help_comment = form.help_comment.data
send_help_email(help_name, help_email, help_comment)
return redirect(url_for('index'))
return render_template("index.html", search=search, form = form)
@app.route('/query')
@login_required
def query():
""" Query Page with a Search Bar that allows partial reloading by Jquery and an overview of all saved queries that allow partial reloading as well. """
search = [1, ""] # default id for search to enable partial Jquery reloading
saved = query("SELECT id, words, che FROM search_words WHERE username='{}'".format(current_user.user_handle))
matched = query("SELECT che FROM search_words WHERE EXISTS (SELECT che FROM liquidations WHERE liquidations.che = search_words.che AND username='{}')".format(current_user.user_handle))
matches = []
matches_liq_detail = []
for match in matched:
matches.append(match[0])
if current_user.is_authenticated:
matched_liq_detail = query("SELECT * FROM liquidations WHERE EXISTS (SELECT che FROM search_words WHERE search_words.che = liquidations.che AND username='{}')".format(current_user.user_handle))
for match1 in matched_liq_detail:
matches_liq_detail.append(match1)
return render_template("query.html", search=search, matches=matches, saved=saved, matches_liq_detail = matches_liq_detail)
@app.route("/contact", methods=["GET", "POST"])
@login_required
def contact():
""" Contact Page with all entered contacts that allows adding a new contact or edit/delete existing contacts (partial reloading). """
form = ContactForm()
if form.validate_on_submit():
if form.new_sms_contact.data == "" and form.new_email_contact.data == "":
flash("You need to enter at least one sms or email address")
else:
query_no_fetch("INSERT INTO contacts VALUES (default, '{}', '{}', '{}', '{}')".format(current_user.user_handle, make_safe(str(form.new_sms_contact.data)),make_safe(form.new_email_contact.data), make_safe(form.new_role_contact.data)))
contacts = query("SELECT * FROM contacts WHERE username='{}'".format(current_user.user_handle))
return redirect(url_for("contact"))
contacts = query("SELECT * FROM contacts WHERE username='{}'".format(current_user.user_handle))
return render_template("contact.html", form=form, contacts = contacts)
@app.route("/register", methods=["GET", "POST"])
def register():
if current_user.is_authenticated:
return redirect(url_for("index"))
form = RegistrationForm()
if form.validate_on_submit():
registration_worked = register_user(form)
if registration_worked:
return redirect(url_for("login"))
return render_template("register.html", form=form)
@app.route("/login", methods=["GET", "POST"])
def login():
if current_user.is_authenticated:
return redirect(url_for("query"))
form = LoginForm()
if form.validate_on_submit():
if is_login_successful(form) == "correct_credentials":
return redirect(url_for("query"))
elif is_login_successful(form) == "username_not_found":
flash("Username not found!")
elif is_login_successful(form) == "wrong_password":
flash("Wrong password!")
return render_template("login.html", form=form)
@app.route("/logout")
def logout():
logout_user()
return redirect(url_for("index"))
@app.route("/impressum")
def impressum():
return render_template("impressum.html")
###############################################################################
# Inaccessible Helper Routes #
###############################################################################
@app.route("/newquerysearch", methods=['POST'])
def newquerysearch():
""" Helper route for partial reloading of a new company search (section 3.html).
Uses zefix.ch to match the entry and then reads CHE from the response to query in the liquidation page. """
id = request.form["id"]
search = request.form["search"]
if (len(search) >= 2):
results = []
res2 = []
try:
r = requests.post("https://www.zefix.ch/ZefixREST/api/v1/firm/search.json", json={"name": search,"languageKey":"de","maxEntries":4})
x = r.json()["list"]
for y in x:
name = y["name"]
uid = y["uid"]
che = uid[0:3] + "-" + uid[3:6] + "." + uid[6:9] + "." + uid[9:12]
if len(res2) < 1:
res = query("SELECT * FROM liquidations WHERE che='{}'".format(che))
if len(res) > 0:
res2.append(res)
else:
results.append([name, che])
else:
results.append([name, che])
except:
pass
if len(res2) > 0:
results.append(res2[0])
if len(results) == 0:
search = [id, search, [], False, 0]
return render_template("section3.html", search=search)
if isinstance(results[-1][0], str):
r = False
else:
r = True
search = [id, search, results, r, len(results)]
else:
search = [id, search, [], False, 0]
return render_template("section3.html", search=search)
@app.route("/savequery", methods=["POST"])
def savequery():
""" Helper route for new company save. After saving the values in psql the page is reloaded. """
id = request.form["id"]
cheAndEntry = request.form["cheAndEntry"]
cE = cheAndEntry.split("*,-&")
che = cE[0]
search_entry = cE[1]
query_no_fetch("INSERT INTO search_words VALUES (default, '{}', '{}', '{}')".format(current_user.user_handle, make_safe(search_entry), che))
return render_template("reload.html")
@app.route("/deletequery", methods=["POST"])
def deletequery():
""" Helper route to delete a company save with partial reloading (empty part from section2.html). """
id = request.form["id"]
query_no_fetch("DELETE FROM search_words WHERE id='{}'".format(id))
saved_del = [False]
return render_template("section2.html", contact=saved_del)
@app.route("/editcontact", methods=["POST"])
def editcontact():
""" Helper route to edit a contact with partial reloading (section2.html). """
id = request.form["id"]
phone = request.form["phone"]
email = request.form["email"]
role = request.form["role"]
query_no_fetch("UPDATE contacts SET phone='{}', email='{}', role='{}' WHERE id='{}'".format(make_safe(str(phone)), make_safe(email), make_safe(role), id))
contact = [id, current_user.user_handle, phone, email, role]
return render_template("section2.html", contact=contact)
@app.route("/deletecontact", methods=["POST"])
def deletecontact():
""" Helper route to delete a contact with partial reloading (empty part from section2.html). """
id = request.form["id"]
query_no_fetch("DELETE FROM contacts WHERE id='{}'".format(id))
contact = [False]
return render_template("section2.html", contact=contact)
###############################################################################
# Helper Functions for accessible Routes #
###############################################################################
def register_user(form_data):
def user_handle_already_taken(user_handle):
res = query("SELECT COUNT(*) FROM users WHERE username='{}'".format(make_safe(user_handle)))
if res[0][0] > 0:
return True
else:
return False
if user_handle_already_taken(form_data.user_handle.data):
flash("That user handle is already taken!")
return False
hashed_password = bcrypt.generate_password_hash(form_data.password.data).decode('utf-8')
query_no_fetch("INSERT INTO users VALUES ('{}', '{}')".format(make_safe(form_data.user_handle.data), make_safe(hashed_password)))
return True
def is_login_successful(form_data):
username = form_data.user_handle.data
password = form_data.password.data
res = query("SELECT * FROM users where username='{}'".format(make_safe(username)))
if len(res) == 0:
return "username_not_found"
if bcrypt.check_password_hash(res[0][1].encode('utf-8'), password.encode('utf-8')):
user = User(res[0])
login_user(user)
return "correct_credentials"
return "wrong_password"
def send_help_email(help_name, help_email, help_comment):
who = f"{help_name} <{help_email}>"
result = requests.post("https://api.eu.mailgun.net/v3/mail.schuldenrufe.ch/messages",
auth=("api", email_auth),
data={"from": who, "text": help_comment, "to": email_data, "subject": "Contact request Schuldenrufe.ch",})
return result
###############################################################################
# Helper Functions for PSQL #
###############################################################################
def make_safe(s):
if s == None:
return "null"
return s.replace("'", "''")
def get_psql_connection():
return psycopg2.connect(
user = psql_user,
password = psql_pw,
host = psql_host,
port = psql_port,
database = psql_database
)
def query(cmd):
return _query(cmd, True)
def query_no_fetch(cmd):
_query(cmd, False)
def _query(cmd, fetch):
connection = get_psql_connection()
cursor = connection.cursor()
cursor.execute(cmd)
connection.commit()
if(fetch):
res = cursor.fetchall()
cursor.close()
connection.close()
return res
cursor.close()
connection.close()
return
###############################################################################
# Run #
###############################################################################
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000)
|
#!/usr/bin/env python3
import sys
import re
#the allele frequency used in the map file is from the ExAC
#different population are all considered here
#currently only limited to one population, if the cases is mixed,
#I will use the overall freqency in the population
def get_af(info, population_in):
if population_in == "EAS":
exac_eas_list = re.findall(';ExAC_EAS=(.+?);', info)
if exac_eas_list[0] != ".":
exac_eas = exac_eas_list[0]
else:
exac_eas = 0
return exac_eas
elif population_in == "EUR":
exac_af_list_1 = re.findall(';ExAC_NFE=(.+?);', info)
if exac_af_list_1[0] != ".":
exac_nfe = exac_af_list_1[0]
else:
exac_nfe = 0
exac_af_list_2 = re.findall(';ExAC_FIN=(.+?);', info)
if exac_af_list_2[0] != ".":
exac_fin = exac_af_list_1[0]
else:
exac_fin = 0
exac_eur = float(exac_nfe) + float(exac_fin)
return exac_eur
elif population_in == "AMR":
exac_amr_list = re.findall(';ExAC_AMR=(.+?);', info)
if exac_amr_list[0] != ".":
exac_amr = exac_amr_list[0]
else:
exac_amr = 0
return exac_amr
elif population_in == "AFR":
exac_afr_list = re.findall(';ExAC_AFR=(.+?);', info)
if exac_afr_list[0] != ".":
exac_afr = exac_afr_list[0]
else:
exac_afr = 0
return exac_afr
elif population_in == "SAS":
exac_sas_list = re.findall(';ExAC_SAS=(.+?);', info)
if exac_sas_list[0] != ".":
exac_sas = exac_sas_list[0]
else:
exac_sas = 0
return exac_sas
elif population_in == "ALL":
exac_all_list = re.findall(';ExAC_ALL=(.+?);', info)
if exac_all_list[0] != ".":
exac_all = exac_all_list[0]
else:
exac_all = 0
return exac_all
#the order of input and out put file
input = sys.argv[1]
population = sys.argv[2]
out_map = sys.argv[3]
out_tped = sys.argv[4]
if population not in ["ALL", "EAS", "EUR", "AMR", "AFR", "SAS"]:
print("\n***invalid population input***\n")
else:
map_file = open(out_map, 'w')
tped_file = open(out_tped, 'w')
for line in open(input):
genotype = []
if line[0] == "#" or line[0] == "X" or line[0] == "Y" or line[0] == "c":
continue
data = line.strip().split("\t")
if data[2] != ".":
var_id = data[2]
else:
var_id = ":".join([data[0], data[1]])
gene = re.findall(';Gene.refGene=(.+?);', data[7].strip())
af = get_af(data[7], population)
for i in range(9, len(data)):
if ":lowGQ:" not in data[i]:
gt = data[i].strip().split(":")
if gt[0] == "./.":
genotype.append("-9")
genotype.append("-9")
elif gt[0] == "0/1":
genotype.append("0")
genotype.append("1")
elif gt[0] == "1/1":
genotype.append("1")
genotype.append("1")
elif gt[0] == "0/0":
genotype.append("0")
genotype.append("0")
else:
haplo = gt[0].strip().split("|")
genotype.append(haplo[0])
genotype.append(haplo[1])
else:
genotype.append("-9")
genotype.append("-9")
if gene[0] != "." or gene[0] != "unknown":
map_file.write(" ".join([gene[0], var_id, str(af)]) + "\n")
tped_file.write(var_id + " " + " ".join(genotype) + "\n")
map_file.close()
tped_file.close()
|
import os
import json
import matplotlib.pyplot as plt
with open('policy.json', 'r') as f:
file = json.load(f)
x1 = file['pg']['x']
y1 = file['pg']['y']
x2 = file['pg_baseline']['x']
y2 = file['pg_baseline']['y']
title = 'policy gradient'
plt.plot(x1, y1, linewidth=3, label='w/o baseline')
plt.plot(x2, y2, linewidth=3, label='w/ baseline')
plt.title(title, fontsize=14)
plt.xlabel("Steps", fontsize=10)
plt.ylabel("Avg Reward", fontsize=10)
plt.legend()
plt.savefig('policy_gradient.png')
|
"""
Finding patters in Data: multi-domain graphs
"""
import argparse
from forensics.patterns import run_analysis
from forensics import seed
# step 1: seed random data
# step 2: seed POI's data
# step 3: cypher queries to seed
# step 4: cypher queries to run forensics analysis
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Forensics analysis using neo4j')
parser.add_argument('--seed', type=bool, default=False)
parser.add_argument('--pattern', type=str, default="*")
args = parser.parse_args()
seed_data = args.seed
pattern = args.pattern
if seed_data:
seed.seed()
run_analysis(pattern)
|
#!/usr/bin/env python3
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import argparse
import logging
from dataclasses import dataclass
import github
from packaging.version import Version
from pants_release.common import CONTRIBUTORS_PATH, VERSION_PATH, die, sorted_contributors
from pants_release.git import git, git_fetch, github_repo
from pants.util.strutil import softwrap
logger = logging.getLogger(__name__)
def create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="Prepare the changelog for a release.")
parser.add_argument(
"--new",
required=True,
type=Version,
help="The version for the new release, e.g. `2.0.0.dev1` or `2.0.0rc2`.",
)
parser.add_argument(
"--release-manager",
required=True,
help="The GitHub username of the person managing this release",
)
parser.add_argument(
"--log-level",
default="WARNING",
)
parser.add_argument(
"--publish",
action="store_true",
help=softwrap(
"""
Publish the changes: create a branch, commit, push, and create a pull request. Ensure
`gh` (https://cli.github.com) is installed and authenticated.
"""
),
)
return parser
@dataclass(frozen=True)
class ReleaseInfo:
version: Version
slug: str
branch: str
@staticmethod
def determine(new_version: Version) -> ReleaseInfo:
slug = f"{new_version.major}.{new_version.minor}.x"
# Use the main branch for all dev releases, and for the first alpha (which creates a stable branch).
use_main_branch = new_version.is_devrelease or (
new_version.pre
and "a0" == "".join(str(p) for p in new_version.pre)
and new_version.micro == 0
)
branch = "main" if use_main_branch else slug
return ReleaseInfo(version=new_version, slug=slug, branch=branch)
def update_contributors() -> None:
CONTRIBUTORS_PATH.write_text(
"Created as part of the release process.\n\n"
+ "".join(f"+ {c}\n" for c in sorted_contributors(git_range="HEAD"))
)
def update_version(release_info: ReleaseInfo) -> None:
VERSION_PATH.write_text(f"{release_info.version}\n")
def commit_and_pr(
repo: github.Repository.Repository,
release_info: ReleaseInfo,
release_manager: str,
) -> None:
title = f"Prepare {release_info.version}"
branch = f"automation/release/{release_info.version}"
# starting from HEAD, because we checked out the relevant branch
git("checkout", "-b", branch)
git("add", str(VERSION_PATH), str(CONTRIBUTORS_PATH))
git("commit", "-m", title)
git("push", "origin", "HEAD")
pr = repo.create_pull(
title=title,
body="",
base=release_info.branch,
head=branch,
)
pr.add_to_labels("automation:release-prep", "category:internal")
pr.add_to_assignees(release_manager)
def main() -> None:
args = create_parser().parse_args()
logging.basicConfig(level=args.log_level)
if args.new < Version("2.18.0.dev0"):
die(
softwrap(
"""
This script shouldn't be used for releases pre-2.18.x.
Follow the release docs for the relevant release.
E.g. https://www.pantsbuild.org/v2.17/docs/release-process
"""
)
)
# connect to github first, to fail faster if credentials are wrong, etc.
gh_repo = github_repo() if args.publish else None
release_info = ReleaseInfo.determine(args.new)
git("checkout", git_fetch(release_info.branch))
update_contributors()
update_version(release_info)
if args.publish:
assert gh_repo is not None
commit_and_pr(gh_repo, release_info, args.release_manager)
if __name__ == "__main__":
main()
|
import torch
import torch.nn
import os
import numpy as np
import matplotlib.pyplot as plt
import glob
from sklearn.svm import SVC
from sklearn.metrics import plot_confusion_matrix
from sklearn import preprocessing
from torchvision.transforms import ToTensor
from PIL import Image
from joblib import dump
# import pretrained model:
from facenet_pytorch import InceptionResnetV1
# generate claases
face_classes = ['face_' + str(i) for i in range(0,21)]
# load pretrained face recognition model
resnet = InceptionResnetV1(pretrained='vggface2').eval()
# load data
path = os.path.dirname(os.path.realpath(__file__)) + '/faces/'
X_train = []
y_train = []
X_test = []
y_test = []
for i in range(0,21):
f_c = 'face_' + str(i)
print(f_c)
training_path = path + 'training_data/' + f_c + '/*'
for file in glob.glob(training_path):
img = Image.open(file)
img = img.resize((160, 160))
img = ToTensor()(img)
# calculate embeddings
img_embedding = resnet(img.unsqueeze(0))
X_train.append(img_embedding.detach().numpy().ravel())
y_train.append(i)
test_path = path + 'test_data/' + f_c + '/*'
for file in glob.glob(test_path):
img = Image.open(file)
img = img.resize((160, 160))
img = ToTensor()(img)
# calculate embeddings
img_embedding = resnet(img.unsqueeze(0))
X_test.append(img_embedding.detach().numpy().ravel())
y_test.append(i)
X_train = np.array(X_train)
y_train = np.array(y_train)
X_test = np.array(X_test)
y_test = np.array(y_test)
# train SVC on embeddings
model = SVC(kernel='linear')
model.fit(X_train, y_train)
# test SVC + plot confusion matrix
plot_confusion_matrix(model, X_test, y_test)
plt.show()
# save model
if input('Save model? (y/n)') == 'y':
dump(model, 'face_model.joblib')
|
# -*- coding: utf-8 -*-
"""Tests for Windows Restore Point rp.log files."""
import unittest
from dtformats import rp_log
from tests import test_lib
class RestorePointLogFileTest(test_lib.BaseTestCase):
"""Windows Restore Point rp.log file tests."""
# pylint: disable=protected-access
def testDebugPrintFileFooter(self):
"""Tests the _DebugPrintFileFooter function."""
output_writer = test_lib.TestOutputWriter()
test_file = rp_log.RestorePointLogFile(output_writer=output_writer)
data_type_map = test_file._GetDataTypeMap('rp_log_file_footer')
file_footer = data_type_map.CreateStructureValues(
creation_time=1)
test_file._DebugPrintFileFooter(file_footer)
def testDebugPrintFileHeader(self):
"""Tests the _DebugPrintFileHeader function."""
output_writer = test_lib.TestOutputWriter()
test_file = rp_log.RestorePointLogFile(output_writer=output_writer)
data_type_map = test_file._GetDataTypeMap('rp_log_file_header')
file_header = data_type_map.CreateStructureValues(
description='Description'.encode('utf-16-le'),
event_type=1,
restore_point_type=2,
sequence_number=3)
test_file._DebugPrintFileHeader(file_header)
def testReadFileFooter(self):
"""Tests the _ReadFileFooter function."""
output_writer = test_lib.TestOutputWriter()
test_file = rp_log.RestorePointLogFile(output_writer=output_writer)
test_file_path = self._GetTestFilePath(['rp.log'])
self._SkipIfPathNotExists(test_file_path)
with open(test_file_path, 'rb') as file_object:
test_file._file_size = 536
test_file._ReadFileFooter(file_object)
def testReadFileHeader(self):
"""Tests the _ReadFileHeader function."""
output_writer = test_lib.TestOutputWriter()
test_file = rp_log.RestorePointLogFile(output_writer=output_writer)
test_file_path = self._GetTestFilePath(['rp.log'])
self._SkipIfPathNotExists(test_file_path)
with open(test_file_path, 'rb') as file_object:
test_file._ReadFileHeader(file_object)
def testReadFileObject(self):
"""Tests the ReadFileObject function."""
output_writer = test_lib.TestOutputWriter()
test_file = rp_log.RestorePointLogFile(
debug=True, output_writer=output_writer)
test_file_path = self._GetTestFilePath(['rp.log'])
self._SkipIfPathNotExists(test_file_path)
test_file.Open(test_file_path)
if __name__ == '__main__':
unittest.main()
|
"""
Heber Cooke 10/24/2019
Chapter 5 Exercise 6
This program takes a number and converts it to base 10
"""
conversion = {"0":0, "1":1, "2":2, "3":3, "4":4, "5":5, "6":6, "7":7, \
"8":8, "9":9, "A":10, "B":11, "C":12, "D":13, "E":14, "F":15}
def decimalToRep(num,base):
s = 0
ex = len(num) -1
for digit in num:
digit = int(conversion.get(digit))
s = s + int(digit) * int(base) ** ex
ex = ex -1
return(s)
def main():
while True:
number = input("Enter Q to quit, Enter A for automatic, Enter a number: ").upper()
# fancy print format for the output
if number == "A" or number =="a":
print("%-20s%-8s%-9s" % ("65F6 base 16 is:",decimalToRep("65F6",16), "Base 10"))
print("%-20s%-8s%-9s" % ("876 base 8 is:", decimalToRep("876",8), "Base 10"))
print("%-20s%-8s%-9s" % ("76543 base 7 is:", decimalToRep("76543",7), "Base 10"))
print("%-20s%-8s%-9s" % ("654 base 6 is:", decimalToRep("654",6), "Base 10"))
print("%-20s%-8s%-9s" % ("543 base 5 is:", decimalToRep("543",5), "Base 10"))
print("%-20s%-8s%-9s" % ("432 base 4 is:", decimalToRep("432",4), "Base 10"))
print("%-20s%-8s%-9s" % ("3212 base 3 is:", decimalToRep("3212",3), "Base 10"))
print("%-20s%-8s%-9s" % ("1011110 base 2 is:", decimalToRep("1011110",2), "Base 10"))
elif number =="Q" or number =="q":
break
else:
base = input("Enter a base: ")
print(decimalToRep(number,base))
main()
|
from os import error
from flask import Flask, flash
from flask import render_template, redirect, url_for, request, abort, jsonify
from models.Modelos import *
from flask_sqlalchemy import SQLAlchemy
from pathlib import Path
from werkzeug.utils import secure_filename
import os.path
import sys
import shutil
import smtplib, ssl
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import random
import array
import re
# para enviar correos
port = 465 # For SSL
correo = '4atech.am4zonas@gmail.com' # nuestro correo
password = 'am4zonas123' # la contraseña de nuestro correo
context = ssl.create_default_context() # creamos un ssl
message = MIMEMultipart("alternative") # el correo a enviar
db = SQLAlchemy() # nuestro ORM
#Datos para hacer revisiones
#Letras Minusculas
LOCASE_CHARACTERS = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k','l', 'm', 'n', 'o', 'p', 'q','r', 's', 't', 'u', 'v', 'w', 'x', 'y','z']
#Letras Mayusculas
UPCASE_CHARACTERS = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L','M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
#Digitos
DIGITS = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
#symbolos
SYMBOLS = ['@', '#', '$', '%', '=', ':', '?', '.', '/', '|', '~', '>', '*', '(', ')', '<']
#Expresion regular para correo
regex = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'
"""
Funcion encargada del registro de los usuarios para el sistema
"""
def registrar_usuario():
# si no recibimos una solicitud post, mostramos el formulario
if request.method != 'POST':
return render_template('usuario/registrar_usuario.html')
# en otro caso, obtenemos la información enviada
nombre_usuario = request.form['nombre_usuario']
apellidoP = request.form['apellidoP']
apellidoM = request.form['apellidoM']
correo_usuario = request.form['correo_usuario']
contrasenia = crear_contrasenia()
telefono = request.form['telefono']
# Hacemos una revision de que tanto nombre y apellidos contengan unicamente letras
if(not check_no_symbols(nombre_usuario) or not check_no_symbols(apellidoM) or not check_no_symbols(apellidoP)):
return render_template('usuario/registrar_usuario.html', error='El nombre y/o apellidos no pueden llevar números o símbolos.')
# Revisamos que el correo tenga un formato valido
if(not check_mail(correo_usuario)):
return render_template('usuario/registrar_usuario.html', error='Correo electrónico con un formato invalido.')
# Revisamos que el numero tenga una longitud valida
if(len(telefono) != 10):
return render_template('usuario/registrar_usuario.html', error='El número telefónico debe tener una longitud de 10 números.')
# El tipo corresponde a si un usuario es comprador o vendedor,
# por defecto el valor es False lo cual indica que se trata de un usuario comprador
tipo = False
# Si se selecciona la opcion de vendedor entonces el valor cambiara a True
if request.form.get('type-user') == 'vendedor': tipo = True
mensaje = ''
# Una vez obtenemos los datos de nuestro formulario haremos la
# inserscion de la informacion en nuestra tabla de Usuario
try:
#Si el usuario ya se encuentra registrado la consulta se completara y volvera a cargar la pagina
usuarioRegistrado = db.session.query(Usuario).filter(Usuario.correo == request.form['correo_usuario']).one()
return render_template('usuario/registrar_usuario.html', error='Este usuario ya se encuentra registrado, por favor introduzca un correo diferente.')
except Exception as e:
# En caso de que no ocurra entonces podemos hacer la insercion de datos
nuevo_usuario = Usuario(correo_usuario,nombre_usuario,apellidoP,apellidoM,contrasenia,telefono,tipo)
db.session.add(nuevo_usuario)
db.session.commit()
# Envíamos el correo de que se ha realizado la compra exitosamente,
# en este correo incluiremos la contraseña del usuario
with smtplib.SMTP_SSL("smtp.gmail.com", port, context=context) as server:
server.login("4atech.am4zonas@gmail.com", password)
try:
server.login(correo,password)
nuevo_mensaje = """\
<html>
<body>
<p>¡Hola, {}!<br>
Nos complace que haya decidido formar parte de esta comunidad.<br>
Ha sido registrado exitosamente.<br>
Puede entrar a su cuenta usando su correo con la contraseña: {}<br>
Atentamente, el equipo de 4AT-ech.
</p>
</body>
</html>
""".format(nuevo_usuario.nombre,contrasenia)
message['From'] = correo
message['To'] = nuevo_usuario.correo
message['Subject'] = 'Registro exitoso en Am4zonas'
message.attach(MIMEText(nuevo_mensaje, 'html'))
server.sendmail(correo,nuevo_usuario.correo,message.as_string())
except Exception as e:
flash('No hemos podido enviar su correo con su contraseña. Sin embargo, su contraseña es: ' + contrasenia)
# Una vez enviado el correo, lo que haremos sera regresar al inicio
# para que el usuario se pueda logear su contraseña
return redirect(url_for('index'))
#----------------------------------Funciones auxiliares-----------------------------------------------------
#Funcion auxiliar que ayuda en la creacion de una contraseña segura
def crear_contrasenia():
# Longitud maxima de la contraseña, puede cambiarse el tamaño en cualquier momento
MAX_LEN = 12
# Creamos los arreglos de caracteres que queremos usar
# Juntamos todos los caracteres de arriba en un solo arreglo
COMBINED_LIST = DIGITS + UPCASE_CHARACTERS + LOCASE_CHARACTERS + SYMBOLS
# Seleccinamos por lo menos un caracter de los arreglos de arriba
rand_digit = random.choice(DIGITS)
rand_upper = random.choice(UPCASE_CHARACTERS)
rand_lower = random.choice(LOCASE_CHARACTERS)
rand_symbol = random.choice(SYMBOLS)
# combinamos los caracteres que generamos
# sin embargo solo contamos con 4, pero queremos
# que nuestra contraseña tenga 12 caracteres
temp_pass = rand_digit + rand_upper + rand_lower + rand_symbol
# Ahora que tenemos un arreglo con cuatro caracteres
# lo que haremos sera tomar los ultimos max_len -4 faltantes
# En este caso serian 8
for x in range(MAX_LEN - 4):
temp_pass = temp_pass + random.choice(COMBINED_LIST)
# Convertimos de manera temporal nuestra contraseña en un arreglo
# y mezclamos con el fin de evitar asi algun patron
temp_pass_list = array.array('u', temp_pass)
random.shuffle(temp_pass_list)
# Una vez tenemos este arreglo solo queda concatenar
# la lista y devolver nuestra contraseña
contrasenia = ""
for x in temp_pass_list:
contrasenia = contrasenia + x
return contrasenia
#Funcion para revisar que una cadena tenga unicamente letras y no simbolos
def check_no_symbols(campo):
campo = split(campo)
for char in campo:
if char not in LOCASE_CHARACTERS and char not in UPCASE_CHARACTERS:
return False
return True
#Funcion auxiliar para convertir un string en un arreglo de chars
def split(word):
return [char for char in word]
#funcion auxiliar para revisar que un correo electronico este bien escrito
def check_mail(email):
#Revisamos que la cadena cumpla con la expresion regular
if(re.fullmatch(regex, email)):
return True
return False
|
import sys
class Tree(object):
def __init__(self,a,b,c):
self.a = None
self.b = None
self.c = None
self.level = 0
self.parent = None
self.data = [a,b,c]
def get_stream():
a,b,c = sys.stdin.readline().split()
return[int(a),int(b),int(c)]
def generate_node(root):
if root.data[0] > root.data[1]:
A = Tree(root.data[0]-root.data[1],2*root.data[1],root.data[2])
else :
A = Tree(2*root.data[0],root.data[1]-root.data[0],root.data[2])
if root.data[0] > root.data[2]:
B = Tree(root.data[0]-root.data[2],root.data[1],2*root.data[2])
else:
B = Tree(2*root.data[0],root.data[1],root.data[2] - root.data[0])
if root.data[1] > root.data[2]:
C = Tree(root.data[0],root.data[1]-root.data[2],2*root.data[2])
else:
C = Tree(root.data[0],2*root.data[1],root.data[2] - root.data[1])
A.parent = root
B.parent = root
C.parent = root
A.level = A.parent.level + 1
B.level = A.level
C.level = A.level
root.a = A
root.b = B
root.c = C
l = get_stream()
root = Tree(l[0],l[1],l[2])
stack = []
stack.append(root)
level = 0
solutions = []
def generate_tree():
current_level = 0
i = 1
cmp = 0
found = False
A = stack.pop(0)
x = 1
while True:
x += 1
if x > 3**10 :
print('Ok')
exit(0)
if current_level != A.level and not found:
current_level = A.level
elif (current_level != A.level and found) or A.level > 11:
break
if equal_bet(A):
found = True
solutions.append(A)
generate_node(A)
stack.append(A.a)
stack.append(A.b)
stack.append(A.c)
A = stack.pop(0)
def equal_bet(node):
if (node.data[0] == 0 or node.data[1] == 0 or node.data[2] == 0):
return True
else:
return False
generate_tree()
def get_schema(A):
l = []
ptr = A
while ptr :
l.append(ptr.data)
ptr = ptr.parent
return l
l = []
if solutions:
l1 = get_schema(solutions[0])
while l1:
l.append(l1.pop())
for i in l:
print(i[0],i[1],i[2])
else:
print("Ok")
|
class configs:
HEADER = '\033[95m'
MD5HASH = 'd44cb8546e7e57c21caa064ee5007c8a'
PASSWD = 'your_pass_here'
PATH = 'your_path_here'
|
import subprocess
def validate_connect(address):
p = subprocess.Popen(['iwgetid', '-a'], stdout=subprocess.PIPE)
output, err = p.communicate()
rc = p.returncode
mac_address = str(output).split()[3]
mac_address = mac_address[0:17]
if address == mac_address:
print(mac_address)
return True
return False
|
# quotient 몫
# remainder 나머지
import sys
N = int(sys.stdin.readline())
Q5 = N // 5
R5 = N % 5
if R5 % 3 == 0: # 5로 나눈 나머지가 3의 배수로 나누어떨어지면 5a + 3b로 a+b를 출력
print(Q5 + (R5 // 3))
else: # 나누어 떨어지지 않는다면, 5로 나눈 나머지를 3으로 나누면 0, 1, 2 중 하나만 나올 수 있음
if R5 % 3 == 1 and Q5 >= 1: # 5로 나눈 나머지를 3으로 나눈 나머지가 1이면
print((Q5 - 1) + ((R5 // 3) + 2)) # 몫에서 5를 빼서 나머지와 더해 6을 만들고 나눈다
elif R5 % 3 == 2 and Q5 >= 2: # 나머지가 2이면
print((Q5 - 2) + ((R5 // 3) + 4)) # 몫에서 10을 빼서 12를 만들고 3으로 나눈다.
else:
print('-1')
# 다른 풀이법
# import sys
# N = int(sys.stdin.readline())
# three = 0
# five = N//5
# N %= 5
# while five >= 0:
# if N % 3 == 0:
# three = N//3
# N %= 3
# break
# five -= 1 # 5를 하나씩 올리고 5의 몫에서 -1 하며 나누어떨어질때까지 한다.
# N += 5
#
# if(N == 0):
# print(three + five)
# else:
# print(-1)
|
import random
from math import log, sqrt, pow
from statistics import stdev
from python_charts import draw_histogram
def get_random():
x = random.uniform(-1, 1)
y = random.uniform(-1, 1)
while sqrt(get_exp((x, y))) >= 1 or x + y == 0.0:
x = random.uniform(-1, 1)
y = random.uniform(-1, 1)
return x, y
def get_exp(point):
return pow(point[0], 2) + pow(point[1], 2)
def get_new_r():
v = get_random()
return get_exp(v)
def get_points():
v = get_random()
r2 = get_exp(v)
return get_y(v[0], r2), get_y(v[1], r2)
def get_y(v, r2):
x = log(r2) * (-2.0)
y = sqrt(x / r2)
return v * y
def normalize(values):
average = sum(values) / len(values)
variance = sqrt(
sum(
[pow(val - average, 2) for val in values]
)
)
return [(val - average) / variance for val in values]
def main():
total_repetitions = 1000000
data = [get_points() for point in range(total_repetitions)]
data_merged = [item for y_tuple in data for item in y_tuple]
draw_histogram(data_merged)
if __name__ == "__main__":
main()
|
"""
The flask application package.
"""
from flask import Flask, render_template
app = Flask(__name__)
app.secret_key = 'mytravelapp'
# Make the WSGI interface available at the top level so wfastcgi can get it.
wsgi_app = app.wsgi_app
# Configurations
app.config.from_object('settings')
# Define the database object which is imported
# by modules and controllers
from Travel.models.Repository import Repository
repo = Repository(app.config['DATABASE_URI'])
from Travel.models.User import User
app_user = User('','','','',0)
# Sample HTTP error handling
@app.errorhandler(404)
def not_found(error):
return 'Could not find what you were looking for', 404
from Travel.views import *
|
from flask import Blueprint, jsonify, render_template, Flask, url_for
import procesos.bancolombia_castigada as bancolombia_castigada
import os
# my_resourses = os.path.join('static','images')
sercice_uis_api = Blueprint('sercice_uis_api', __name__, static_folder='static',template_folder='templates')
myApp = Flask(__name__)
# myApp.config['my_resourses'] = my_resourses
@myApp.route("/")
@sercice_uis_api.route("/help_masivos_bancolombia_castigada")
def help_masivos_bancolombia_castigada():
# return "listo listo"
return render_template('index_help_masivos.html')
|
from __future__ import with_statement
import os
from fabric.api import *
from fabric.contrib.console import confirm
# PROJECT_PATH = '/var/www/freesprache'
env.passwords = {
'Yoomsoft@yoomsoft.oicp.net:22': 'pinux@911', # Yoomsoft_Office
}
env.hosts = [
'Yoomsoft@yoomsoft.oicp.net', # Yoomsoft_Office
]
def deploy():
# code_dir = '/Users/thomaspan/www/freesprache'
code_dir = '/home/Yoomsoft/Code/freesprache'
# with cd(PROJECT_PATH):
# with settings(warn_only=True):
# if run("test -d htdocs").failed:
# run("mkdir htdocs")
#
# with cd(RESOURCE_PATH):
# run("wget yoomsoft.oicp.net/static-smp.rar")
# run("rar x static-smp.rar")
# run("rm -rf static-smp.rar")
with settings(warn_only=True):
if local("test -d %s" % code_dir).failed:
# run("git clone user@vcshost:/path/to/repo/.git %s" % code_dir)
run("git clone https://github.com/thomaspan-ym/freesprache.git %s" % code_dir)
with lcd(code_dir):
run("git pull")
run("touch app.wsgi")
# with settings(warn_only=True):
# if run("test -d %s" % PROJECT_PATH).failed:
# run("svn checkout %s %s --username Thomas --password pinux@911" % (SVN_URL, SVN_PATH))
#
# with cd(PROJECT_PATH):
# # run("svn switch %s" % SVN_URL)
# run("svn update -r HEAD --force")
|
# -*- coding: utf-8 -*-
from collections import deque
class Solution:
def validateStackSequences(self, pushed, popped):
i, j, stack = 0, 0, deque()
while i < len(pushed) or j < len(popped):
if j < len(popped) and stack and stack[-1] == popped[j]:
stack.pop()
j += 1
elif i < len(pushed):
stack.append(pushed[i])
i += 1
else:
return False
return i == len(pushed) and j == len(popped)
if __name__ == "__main__":
solution = Solution()
assert solution.validateStackSequences([1, 2, 3, 4, 5], [4, 5, 3, 2, 1])
assert not solution.validateStackSequences([1, 2, 3, 4, 5], [4, 3, 5, 1, 2])
|
from django.contrib import admin
from django.urls import path,include
from .views import *
from django.views.generic.base import RedirectView
urlpatterns = [
#including path of questionbank
path('',include('questionbank.urls')),
path('',indexPage.as_view(),name="indexPage"),
#redirecting to admin using RedirectView
path('admin/', RedirectView.as_view(),name="admin"),
path('registration',userReg.as_view(),name="RegisterPage"),
path('login',loginPage.as_view(),name="Loginpage"),
path('logout',logout.as_view(),name="LogoutPage"),
path('back',back.as_view(),name='back'),
]
|
from django.db import models
# Create your models here.
class Med(models.Model):
Medicine_batch_no = models.IntegerField()
Medicine_name = models.CharField(null=True,max_length=50)
Medicine_Company = models.CharField(null=True,max_length=50)
Medical_quantity = models.IntegerField()
med_purchase_date = models.DateField()
Med_type = models.CharField(null=True,max_length=50)
Med_price = models.IntegerField()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
def filter_list(list,minLengthSize):
"""Una funcion para filtrar las palabras de una lista que no tengan mas de 'n' caracteres"""
result=[]
for att in list[:]:
if len(att)>minLengthSize:
result.append(att)
return result
def parse_listOfObjects_to_listOfStrings(list):
"""Una funcion para convertir una lista de objectos en una lista de strings"""
result=[]
for item in list[:]:
result.append(str(item))
return result
def get_valid_integer(inputValue):
try:
return int(inputValue)
except ValueError as e:
print("El valor para el numero maximo de caracteres soportados para el filtrado debe ser un numero")
print(e)
return -1
messageList="Proporcione la lista de la cual se busca encontrar la cadena mas larga, debe estar en el formato: palabra1,palabra2,palabra3...palabraN; ejemplo: unapalabra,otrapalabra,aquellapalabra,maspalabras - "
messageLength="Proporciona la longitud que debe sobrepasar una palabra para evitar ser filtrada - "
listAsString=input(messageList)
minLengthSize=get_valid_integer(input(messageLength))
if len(listAsString)<=0:
print("No se ha proporcionado una lista")
elif minLengthSize<=-1:
print("No se ha proporcionado un numero valido para el filtrado de palabras")
else:
listOfStrings=listAsString.split(',')
list=parse_listOfObjects_to_listOfStrings(listOfStrings)
if len(list)>0:
print("Las palabras que cuentan con mas de: ", minLengthSize, "caracteres son: ",filter_list(list,minLengthSize))
|
import dump
import gevent
from itm import ITM, UCWrapper
from collections import defaultdict
from numpy.polynomial.polynomial import Polynomial
import logging
log = logging.getLogger(__name__)
class Async_FWrapper(UCWrapper):
def __init__(self, channels, pump, poly, importargs):
self.curr_round = 1
self.delay = 0
self.leaks = []
self.todo = []
self.pump = pump
# TODO keep the round here until something happens
# alternate theory: the round won't change unless something exists todo
# in future rounds
#self.adv_callme(self.curr_round)
self.total_queue_ever = 0
UCWrapper.__init__(self, 'wrap', 'me', channels, poly, importargs)
def party_clock_round(self, sender):
self.write( 'w2p', (sender, self.curr_round))
def print_todo(self):
p_dict = [(f.__name__,args) for f,args in self.todo]
print('\n\033[1m', str(p_dict), '\033[0m\n')
def fschedule(self, sender, f, args, imp):
log.debug('\033[1mFschedule\033[0m import: {}, sender: {}'.format(imp, sender))
# add to the runqueue
self.todo.append( (f,args) )
self.total_queue_ever += 1
log.debug('total_queue_ever: {}'.format(self.total_queue_ever))
# leaks the schedule
idx = len(self.todo)-1
self.leaks.append( (sender, ('schedule', idx, f.__name__), 0) )
self.print_todo()
# add to the delay and return control to sender
self.delay += 1
self.write('w2f', (sender, ('OK',)) )
def pschedule(self, sender, f, args):
log.debug('\033[1mPschedule\033[0m {}'.format(sender))
# add to runqueue
self.todo.append( (f,args) )
self.total_queue_ever += 1
log.debug('total_queue_ever: {}'.format(self.total_queue_ever))
# leak the schedule
idx = len(self.todo)-1
self.leaks.append( (sender, ('schedule', idx, f.__name__), 0) )
# add to delay and return control to sender
self.delay += 1
self.write('w2p', (sender, ('OK',)) )
def adv_delay(self, t, imp):
self.assertimp(imp, t)
self.delay += t
self.write('w2a', "OK" )
def adv_execute(self, i):
self.print_todo()
print('i', i)
f,args = self.todo.pop(i)
self.print_todo()
f(*args)
def leak(self, sender, msg, imp):
log.debug("Leaking information, sender={}, msg={}".format(sender, msg))
self.leaks.append( (sender, msg, imp) )
def poll(self, imp):
self.assertimp(imp, 1)
if self.delay > 0:
self.delay -= 1
self.write('w2a', ('poll',) )
else:
if len(self.todo): self.adv_execute(0)
else: self.pump.write("dump")
def env_msg(self, d):
msg = d.msg
imp = d.imp
if msg[0] == 'poll':
self.poll(imp)
else:
self.pump.write("dump")
def func_msg(self, d):
msg = d.msg
imp = d.imp
sender,msg = msg
if msg[0] == 'schedule':
self.fschedule(sender, msg[1], msg[2], imp)
elif msg[0] == 'leak':
self.leak(sender, msg[1], imp)
else:
self.pump.write("dump")
# TODO revisit this to see if adversary can delay callme actions
def party_callme(self):
self.todoappend( (lambda: self.write('w2a', ('shotout',)), ()) )
self.write('w2p', ('OK',) )
def party_msg(self, d):
msg = d.msg
imp = d.imp
sender,msg = msg
if msg[0] == 'schedule':
self.pschedule(msg[1], msg[2])
elif msg[0] == 'callme':
self.party_callme()
elif msg[0] == 'leak':
self.leak(sender, msg, imp)
else:
self.pump.write("dump")
def adv_callme(self):
self.todoappend( (lambda: self.write('w2a', ('shoutout',)), ()) )
self.write('w2a', ('OK',) )
def adv_get_leaks(self):
total_import = 0
output = []
for leak in self.leaks:
sender,msg,imp = leak
total_import += imp
output.append( (sender, msg, imp) )
self.write( 'w2a', output, total_import )
self.leaks = []
def adv_msg(self, d):
msg = d.msg
imp = d.imp
#print('msg', msg)
if msg[0] == 'delay':
self.adv_delay(msg[1], imp)
elif msg[0] == 'exec':
self.adv_execute(msg[1])
elif msg[0] == 'callme':
self.adv_callme()
elif msg[0] == 'get-leaks':
self.adv_get_leaks()
else:
self.pump.write("dump")
|
# PART ONE
number_steps = 0
def step_with_offset(i):
step = step_list[i]
next_i = i + step
step_list[i] = step + 1
return next_i
with open('input.txt') as input_file:
step_list = map(int, input_file.read().split())
i = 0
while i < len(step_list):
i = step_with_offset(i)
number_steps += 1
print('Part One: {}'.format(number_steps))
# PART TWO
number_steps = 0
def step_with_offset(i):
step = step_list[i]
next_i = i + step
if step >= 3:
step_list[i] = step - 1
else:
step_list[i] = step + 1
return next_i
with open('input.txt') as input_file:
step_list = map(int, input_file.read().split())
i = 0
while i < len(step_list):
i = step_with_offset(i)
number_steps += 1
print('Part Two: {}'.format(number_steps))
|
import sys
import click
from os.path import dirname, abspath, isdir
from utils.functions import (
create_readme,
create_output_directory,
get_files,
get_optional_props,
get_props_dict,
get_props_list,
get_props_match,
get_required_props,
read_tsx_file
)
def create_tsx_file_readme(path):
tsx_file_content = read_tsx_file(path)
# Retrieve regex match of all props variables in component content
tsx_variable_block = get_props_match(tsx_file_content)
if tsx_variable_block is None:
print(f'no props in file: {path}')
return
props_list = get_props_list(tsx_variable_block)
props_dict = get_props_dict(props_list)
optional_props = get_optional_props(props_dict)
required_props = get_required_props(props_dict)
create_readme(required_props, optional_props, path)
def create_readmes_from_directory(path):
input_file_type = 'tsx'
raw_tsx_files = get_files(path, input_file_type)
tsx_files = list(filter(lambda x: 'stories' not in x, raw_tsx_files))
for tsx_file in tsx_files:
create_tsx_file_readme(tsx_file)
def take_input_directory(input_directory):
print(input_directory)
if not isdir(input_directory):
print('Not a directory. Use -f flag for a file.')
return
print(create_readmes_from_directory(input_directory))
create_readmes_from_directory(input_directory)
def take_input_file(input_file):
if isdir(input_file):
print('Not a file. Use -d flag for a directory.')
return
create_tsx_file_readme(input_file)
@click.command()
@click.option('-f', '--file', 'input_file', type=click.Path(exists=True), help='Individual file')
@click.option('-d', '--directory', 'input_directory', type=click.Path(exists=True), help='Directory path')
def user_input(input_file, input_directory):
create_output_directory()
if input_file:
take_input_file(input_file)
return
if input_directory:
take_input_directory(input_directory)
return
create_readmes_from_directory('./input')
if __name__ == "__main__":
# pylint: disable=no-value-for-parameter
user_input()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.