text stringlengths 8 6.05M |
|---|
# Copyright 2016 Husky Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle
from pyhusky.common.operation import Operation, OperationParam
from pyhusky.common.serializers import Serializer, PickleSerializer
from pyhusky.frontend import config
from pyhusky.frontend import session
from pyhusky.frontend.datareceiver import Receiver
from pyhusky.frontend.huskylist import PyHuskyList, HDFS, MongoDB
# three types of lists:
# HuskyList, PyHuskyList, HuskyObjList
def pyhusky_start(master_host=None, master_port=None, params={}):
# create a config file
config.conf = config.Config(master_host, master_port, params)
# Reigster receiver
Receiver.register()
# Set serializer {MarshalSerializer(), PickleSerializer(), AutoSerializer(), CompressedSerializer(PickleSerializer())}
Serializer.serializer = PickleSerializer()
def pyhusky_stop():
config.conf = None
session.end_session()
def load(path):
# hlist = HuskyListStr()
# In this case the list represents a list of std::string
hlist = PyHuskyList()
param = {
"Type": "cpp",
"Path": path,
OperationParam.list_str: hlist.list_name
}
if path.startswith("nfs:"):
param["Protocol"] = "nfs"
elif path.startswith("hdfs"):
param["Protocol"] = "hdfs"
else:
raise Exception("ERROR: Cannot resolve the protocol of the load path")
hlist.pending_op = Operation("Functional#load_py", param, [])
return hlist
def hdfs(host=None, port=None):
assert host is not None and port is not None
return HDFS(host, port)
def mongodb(host=None, port=None):
assert host is not None and port is not None
return MongoDB(host, port)
def parallelize(data):
if isinstance(data, list):
hlist = PyHuskyList()
pdata = cPickle.dumps(data)
param = {OperationParam.data_str: pdata,
OperationParam.list_str: hlist.list_name}
hlist.pending_op = Operation("Functional#parallelize_py", param, [])
return hlist
else:
return NotImplemented
|
import requests
resp=requests.post('https://textbelt.com/text',{
'phone':'phoneno.',
'message':'Follow coderart__',
'key':'textbelt',
})
print(resp.json())
|
def add(x,y):ins
"""This function adds 2 numbers"""
return x+y
def subtract(x,y):
"""This function subtracts 2 numbers"""
return x-y
def multiply(x,y):
"""This function multiplies 2 numbers"""
return x*y
def divide(x,y):
"""This function divides 2 numbers"""
return x/y
def power(x,y):
"""This function gives power"""
return pow(x,y)
#take input from user
print("Select Operation")
print("1.Add")
print("2.Subtract")
print("3.Multiply")
print("4.Divide")
print("5.Power")
choice=input("Enter Choice 1/2/3/4/5:")
num1= int(input("Enter the first number:"))
num2= int(input("Enter the second number:"))
#Printing Output Based On Input
if choice =='1':
print (num1,"+", num2,"=", add(num1,num2))
elif choice == '2':
print(num1,"-", num2,"=", subtract(num1,num2))
elif choice == '3':
print(num1,"x", num2,"=", multiply(num1,num2))
elif choice == '4':
print(num1,"/", num2,"=", divide(num1,num2))
elif choice == '5':
print(num1,"^", num2,"=", power(num1,num2))
else:
print ("Invalid Input")
|
def authenticate(username, password):
users = {"dillonzhang":"password"}
return username in users and users[username] == password
|
from django.apps import AppConfig
class DeckshareConfig(AppConfig):
name = 'deckShare'
|
#!/usr/bin/env python3
import os
with open('loremipsum', encoding='utf-8') as fil:
words = fil.read().split()
for word in words:
if len(word) > 4:
print(word)
|
"""
4. Написать программу, в которой реализовать две функции.
В первой должен создаваться простой текстовый файл.
Если файл с таким именем уже существует, выводим соответствующее сообщение.
Необходимо открыть файл и подготовить два списка: с текстовой и числовой информацией.
Для создания списков использовать генераторы. Применить к спискам функцию zip().
Результат выполнения этой функции должен должен быть обработан и записан в файл таким образом,
чтобы каждая строка файла содержала текстовое и числовое значение.
Вызвать вторую функцию. В нее должна передаваться ссылка на созданный файл.
Во второй функции необходимо реализовать открытие файла и простой построчный вывод содержимого.
Вся программа должна запускаться по вызову первой функции.
"""
import os
import random
import string
LENGTH = 10
def get_random_char():
return random.choice(string.ascii_lowercase)
def get_random_string(length):
random_list = [get_random_char() for i in range(length)]
return ''.join(random_list)
def create_text_file(name):
if os.path.isfile(name):
print('Файл с таким именем уже существует')
return False
with open(name, 'w', encoding='utf-8') as file:
numbers = [random.randint(0, 100) for _ in range(LENGTH)]
strings = [get_random_string(LENGTH) for _ in range(LENGTH)]
file.writelines([f'{number} {text}\n' for number, text in zip(numbers, strings)])
return file
def print_text_file(desc):
with open(desc.name, 'r', encoding='utf-8') as read_file:
for line in read_file:
print(line)
descriptor = create_text_file('new_file_task4.txt')
if descriptor:
print_text_file(descriptor)
|
# Generated by Django 2.2.11 on 2020-06-07 20:36
import ckeditor_uploader.fields
from django.db import migrations, models
import django.db.models.deletion
import polotenca.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Brend',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=None, max_length=120, null=True, unique=True)),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит')),
],
options={
'verbose_name': 'Производитель',
'verbose_name_plural': 'Производитель',
},
),
migrations.CreateModel(
name='FillerWeight',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=None, max_length=120, null=True, unique=True)),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит')),
],
options={
'verbose_name': 'Плотность',
'verbose_name_plural': 'Плотность',
},
),
migrations.CreateModel(
name='Polotenca',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=120, null=True, unique=True, verbose_name='Название')),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит(Не трогать)')),
('key_words', models.CharField(blank=True, max_length=120, null=True, verbose_name='Ключи')),
('image', models.ImageField(blank=True, default=None, null=True, upload_to=polotenca.models.image_folder, verbose_name='Фотка')),
('image_link', models.CharField(blank=True, max_length=120, null=True, verbose_name='Фотка ссылка')),
('price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Цена ')),
('price_old', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Старая цена ')),
('description', ckeditor_uploader.fields.RichTextUploadingField(blank=True, default=None, null=True, verbose_name='Текст')),
('description_short', ckeditor_uploader.fields.RichTextUploadingField(blank=True, default=None, null=True, verbose_name='Текст(короткий)')),
('discount', models.IntegerField(default=0, verbose_name='Скидка')),
('is_active', models.BooleanField(default=True, verbose_name='В наличии')),
('new_product', models.BooleanField(default=False, verbose_name='Новинка')),
('top', models.BooleanField(default=False, verbose_name='В топе(на гл.странице)')),
('slider', models.BooleanField(default=False, verbose_name='Слайдер(на гл.странице)')),
('comments', models.TextField(blank=True, default=None, null=True)),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Дата последнего обновления')),
('brend', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='polotenca.Brend', to_field='name', verbose_name='Бренд')),
],
options={
'verbose_name': 'Полотенца',
'verbose_name_plural': 'Полотенца',
},
),
migrations.CreateModel(
name='Size',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=None, max_length=120, null=True, unique=True)),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит')),
],
options={
'verbose_name': 'Размер',
'verbose_name_plural': 'Размер',
},
),
migrations.CreateModel(
name='Tkan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=None, max_length=120, null=True, unique=True)),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит')),
],
options={
'verbose_name': 'Матерриал',
'verbose_name_plural': 'Матерриал',
},
),
migrations.CreateModel(
name='Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=None, max_length=120, null=True, unique=True)),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит')),
],
options={
'verbose_name': 'Тип назначения',
'verbose_name_plural': 'Тип назначения',
},
),
migrations.CreateModel(
name='PolotencaImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, default=None, null=True, upload_to=polotenca.models.image_gallary_folder)),
('slug', models.SlugField(blank=True, default=None, null=True, verbose_name='Транслит')),
('is_main', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('product', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='polotenca.Polotenca')),
],
),
migrations.AddField(
model_name='polotenca',
name='consist',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='polotenca.Tkan', to_field='name', verbose_name='Состав'),
),
migrations.AddField(
model_name='polotenca',
name='filler_weight',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='polotenca.FillerWeight', to_field='name', verbose_name='Плотность'),
),
migrations.AddField(
model_name='polotenca',
name='size',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='polotenca.Size', to_field='name', verbose_name='Размер'),
),
migrations.AddField(
model_name='polotenca',
name='type',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='polotenca.Type', to_field='name', verbose_name='Тип'),
),
]
|
# Generated by Django 3.1 on 2020-10-21 18:29
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Fraternity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('email', models.EmailField(max_length=254, unique=True)),
('password', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('puid', models.PositiveIntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=200)),
('email', models.EmailField(blank=True, max_length=254)),
('phone', phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128, region=None)),
('major', models.CharField(blank=True, max_length=200)),
('gpa', models.DecimalField(decimal_places=2, max_digits=3, null=True)),
('hometown', models.CharField(blank=True, max_length=200)),
('grade', models.CharField(blank=True, choices=[('FR', 'Freshman'), ('SO', 'Sophomore'), ('JR', 'Junior'), ('SR', 'Senior & Above')], max_length=2)),
],
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('date', models.DateField(db_index=True)),
('attendees', models.ManyToManyField(to='rush.Student')),
('host', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rush.fraternity')),
],
),
]
|
__author__ = 'hori'
# -*- coding: utf-8 -*-
# モジュールのimport
from bottle import route, get, post, run, template, request
# ルーティングの設定
@route('/hello')
def hello():
# テンプレートの描画
return template('Hello {{string}}', string='World')
# 動的ルーティングの設定
@route('/greeting/<name>')
def greeting(name):
# テンプレートの描画
# try:
# name
# except NameError:
# name = 'world'
return template('Hello {{name}}.', name=name)
@route('/show_header')
def show_header():
#ヘッダ情報の表示
headers_list = ["<p> %s = %s </p>" % (k, v) for k, v in request.headers.items()]
return "".join(headers_list)
@route('/show_cookie')
def show_cookie():
count = request.cookies.get('count')
return template('count={{count}}', count=count)
@get('/show_query')
def show_query():
keyword = request.query.keyword
return template('keyword={{keyword}}', keyword=keyword)
@post('/show_form')
def show_form():
name = request.forms.get('name')
return template('name={{name}}', name=name)
# ビルトイン開発用サーバの起動
if __name__ == '__main__':
run(host='0.0.0.0', port='8080', debug=True, reloader=True)
|
#!/usr/bin/python
#encoding:utf-8
import urllib
import os
def Schedule(a,b,c):
'''''
a:已经下载的数据块
b:数据块的大小
c:远程文件的大小
'''
per = 100.0 * a * b / c
if per > 100 :
per = 100
print '%.2f%%' % per
a='2.7.12'
url = 'https://www.python.org/ftp/python/'+str(a)+'/python-2.7.12.msi'
#local = url.split('/')[-1]
local = os.path.join('d:/','Python-2.7.12.msi')
urllib.urlretrieve(url,local,Schedule)
|
from idc import BADADDR, INF_BASEADDR, SEARCH_DOWN, FUNCATTR_START, FUNCATTR_END
import idc
import idaapi
import datetime
from OffsetGenerator import *
from AddressGenerator import *
import AddressGenerator
j_CReplInfo32__AddVar = 0x1F75E0
CReplInfo32_AddVar = 0x2056A0
IgnoreOffsetList = ["mReplicatedSpellCanCastBitsUpper1", "mMaxMP", "mMP", "mReplicatedSpellCanCastBitsLower1", "ReplicatedSecondWordSpellCanCastBitsLower1"]
def CreateAddresses():
GenerateAddresses()
def CreateCharDataOffsets():
GenerateList(j_CReplInfo32__AddVar)
GenerateList(CReplInfo32_AddVar)
temp = OrderedDict(sorted(Offsets.items(), key=lambda t: t[1]))
IntegerOffsets = ["mEvolveFlag", "mEvolvePoints", "mNumNeutralMinionsKilled", "mInputLocks", "mHealthBarCharacterIDForIcon", "mSARState", "mPARState", "mNetworkId", "mTeam"]
BoolOffsets = ["mIsDead", "isVisible", "mPAREnabled", "mSAREnabled", "mEmpoweredBitField", "mIsUntargetableToEnemies", "mIsUntargetableToAllies", "mIsTargetable", "mIsTargetableToTeamFlags", "mSkillUpLevelDeltaReplicate"]
print("class object ")
print("{")
print("public:")
lastValue = 0
counter = 0
for key,val in temp.items():
if key not in IgnoreOffsetList:
padAmount = val - lastValue
if padAmount > 4:
print("\t" + "unsigned char pad_{}".format(counter) + "[0x" + "%0.2X" % padAmount + "]")
counter = counter + 1
if key in IntegerOffsets:
print("\t" + "int " + key + " 0x" + "%0.2X " % val)
elif key in BoolOffsets:
print("\t" + "bool " + key + " 0x" + "%0.2X " % val)
else:
print("\t" + "float " + key + " 0x" + "%0.2X " % val)
lastValue = val
def Initialize():
#CreateAddresses()
CreateCharDataOffsets()
#UPDATE TO USE A KEYPRESS AND COMBINE WITH SIGSEARCHMAKE AND SPLIT THOSE UP INTO MODULES TO BE REUSED //JUST MAKE ONE MAIN SCRIPT AND HAVE THE OTHERS BE MODULES
Initialize() |
from flask import Flask, render_template, session, redirect, url_for, request
app = Flask(__name__)
@app.route("/about")
def about():
return render_template("about.html")
@app.route("/hello")
def hello():
return render_template("hello.html")
@app.route("/home")
@app.route("/")
def home():
return render_template("home.html")
@app.route("/name")
@app.route("/name/<lastname>/<firstname>")
def name(lastname = "", firstname = ""):
d = {'lastname':lastname,
'firstname': firstname}
return render_template("name.html", dic = d)
@app.route("/reset")
def reset():
session['n'] = 0
return redirect("/home")
#return redirect(url_for("int))
@app.route("/login", methods = ["GET", "POST"])
def login():
if request.method == "GET":
return render_template("login.html")
else:
return "not GET"
if __name__ == "__main__":
app.debug = True
app.secret_key = "ping"
app.run(port = 8000)
|
#!/usr/bin/python
import numpy as np
from math import ceil, floor, sqrt
import random
import matplotlib.pyplot as plt
import sys
from scipy.cluster.vq import kmeans2, ClusterError
from points import generate_random_points, generate_clustered_points, dataset_generator
from timeit import default_timer
import datetime
NUM_POINTS = 2000
NUM_CLUSTERS = 10
K_CONST = int(ceil(sqrt(NUM_POINTS/2)))
# K_CONST = 5
NUM_ITER = 100
num_iter_counter = 0
MINIT = 'points'
def cluster_points(X, mu):
clusters = {}
for x in X:
bestmukey = min([(i[0], np.linalg.norm(x-mu[i[0]]))
for i in enumerate(mu)], key=lambda t: t[1])[0]
try:
clusters[bestmukey].append(x)
except KeyError:
clusters[bestmukey] = [x]
return clusters
def reevaluate_centers(mu, clusters):
newmu = []
keys = sorted(clusters.keys())
for k in keys:
newmu.append(np.mean(clusters[k], axis=0))
return newmu
def has_converged(mu, oldmu):
return (set([tuple(a) for a in mu]) == set([tuple(a) for a in oldmu]))
def find_centers(X, K):
global num_iter_counter
# Initialize to K random centers
oldmu = random.sample(X, K)
mu = random.sample(X, K)
while not(has_converged(mu, oldmu)) and num_iter_counter < NUM_ITER:
oldmu = mu
# Assign all points in X to clusters
clusters = cluster_points(X, mu)
# Reevaluate centers
mu = reevaluate_centers(oldmu, clusters)
num_iter_counter = num_iter_counter + 1
return(mu, clusters)
def init_board(N):
# X = np.array([(random.uniform(-1, 1), random.uniform(-1, 1)) for i in range(N)])
X = generate_clustered_points(0, 420, 0, 280, NUM_POINTS, 30, 40, NUM_CLUSTERS)
return X
def plot_points(centers, clusters):
#generate random color sequence for clusters'
# colors = np.random.rand(K_CONST)
colors = ["#%06x" % random.randint(0, 0xFFFFFF) for _ in range(K_CONST)]
#process points for plotting, add points to scatter, and show plot
center_xs, center_ys = zip(*centers)
for key, value in clusters.iteritems():
color = colors[key]
xs_to_plot = []
ys_to_plot = []
for (x, y) in value:
xs_to_plot.append(x)
ys_to_plot.append(y)
plt.scatter(xs_to_plot, ys_to_plot, c=[color]*len(xs_to_plot), s=20)
plt.scatter(center_xs, center_ys, c=colors, s=100)
plt.show()
def plot_points2(points, centroid, label):
colors = ["#%06x" % random.randint(0, 0xFFFFFF) for _ in range(K_CONST)]
center_xs, center_ys = zip(*centroid)
plt.title('Centroid/Cluster Overlay')
plt.scatter(center_xs, center_ys, c=colors, s=100)
plt.subplot(212)
plt.title('Points')
for counter, (x, y) in enumerate(points):
plt.scatter(x, y, c=colors[label[counter]], s=20)
plt.show()
def heatmap_data_converter(points, centroid, label):
#should be of form (x,y), weight, radius
heatmap_data = []
for i, (x, y) in enumerate(centroid):
weight = 0
cluster_points = []
for j, (a, b) in enumerate(points):
if label[j] == i:
cluster_points.append((a,b))
weight += 1
heatmap_data.append({'value': weight, 'radius': farthest_distance(cluster_points, (x, y)), 'x': x, 'y': y})
return heatmap_data
def get_heatmap_data():
#initialize points
points = init_board(NUM_POINTS)
print "scipy kmeans2 implementation"
#try kmeans2 until there are no cluster empty warnings
centroid, label = None, None
num_tries = 0
while centroid is None or label is None:
num_tries = num_tries + 1
try:
centroid, label = kmeans2(points, float(K_CONST), iter=NUM_ITER, minit='random', missing='raise')
except ClusterError:
pass
print "Centroid: " + str(centroid)
print "Label: " + str(label)
print "Total # Tries: " + str(num_tries)
print "K: " + str(K_CONST)
# plot_points2(points, centroid, label)
return heatmap_data_converter(points, centroid, label)
def farthest_distance(list_of_points, anchor):
max_distance = 0
a = np.array(anchor)
for (x, y) in list_of_points:
b = np.array((x, y))
this_dis = np.linalg.norm(a-b)
max_distance = max(max_distance, this_dis)
return int(max_distance)
def average_distance(list_of_points, anchor):
if len(list_of_points) == 0:
print '0 array in average_distance function'
return 0
total_distance = 0
a = np.array(anchor)
for (x, y) in list_of_points:
b = np.array((x, y))
total_distance += np.linalg.norm(a-b)
avg_distance = total_distance/len(list_of_points)
return int(avg_distance)
def find_distance(x, y):
return np.linalg.norm(np.array(x) - np.array(y))
def sigma(points, label, anchor, cluster_num):
list_of_points = []
for i, (x, y) in enumerate(points):
if label[i] == cluster_num:
list_of_points.append(np.array((x, y)))
return average_distance(list_of_points, anchor)
# calculates the Davies-Bouldin Index for a clustering
# the Davies-Bouldin Index evaluates intra-cluster similarity and inter-cluster differences
def db_index(points, centroid, label):
num_clusters = len(centroid)
running_sum = 0
for i, ci in enumerate(centroid):
max_val = 0
sig_i = sigma(points, label, ci, i)
for j, cj in enumerate(centroid):
if (j == i):
continue
sig_j = sigma(points, label, cj, j)
d = find_distance(ci, cj)
kicker = (sig_i + sig_j) / d
max_val = max(kicker, max_val)
running_sum += max_val
return running_sum / num_clusters
if __name__ == '__main__':
if len(sys.argv) > 2:
if sys.argv[1] == '-one': #random generation of a single creative point cluster...the standard way to run
#initialize points
points = init_board(NUM_POINTS)
print points
#run k-means and find list of centers and corresponding clusters
# print "Old kmeans implementation"
# centers, clusters = find_centers(points, K_CONST)
# print "Number of Iterations: " + num_iter_counter
print "scipy kmeans2 implementation"
#try kmeans2 until there are no cluster empty warnings
centroid, label = None, None
num_tries = 0
while centroid is None or label is None:
num_tries = num_tries + 1
try:
centroid, label = kmeans2(points, K_CONST, iter=NUM_ITER, minit='points', missing='raise')
except ClusterError:
pass
print "Centroid: " + str(centroid)
print "Label: " + str(label)
print "Total # Tries: " + str(num_tries)
print "K: " + str(K_CONST)
plot_points2(points, centroid, label)
print heatmap_data_converter(points, centroid, label)
print "Davies-Bouldin Index: " + str(db_index(points, centroid, label) + " (lower values are better)")
# print centers, clusters, num_iter_counter
# plot_points(centers, clusters)
elif (sys.argv[1] == '-sim') and (len(sys.argv) == 4):
read_file_name = str(sys.argv[2])
write_file_name = str(sys.argv[3])
read_file = open(read_file_name, 'r')
write_file = open(write_file_name, 'w+')
num_lines_read_file = sum(1 for line in read_file)
read_file.seek(0)
tic = default_timer()
total_num_tries = 0
total_num_clusters = 0
total_num_points = 0
db_running_sum = 0
for idx, line in enumerate(read_file):
line_points_data = np.array(eval(line))
if (len(line_points_data) < 2): # don't run kmeans if no points in data
print 'kmeans skipped because not enough data in this creative'
continue
num_data_points = len(line_points_data)
k_to_use = int(ceil(sqrt(num_data_points/2)))
centroid, label = None, None
num_tries = 0
while centroid is None or label is None:
num_tries = num_tries + 1
try:
centroid, label = kmeans2(line_points_data, k_to_use, iter=NUM_ITER, minit='points', missing='raise') # minit could be 'random' or 'point'...point gets rid of magnitude of 2 error
except ClusterError:
pass
write_file.write('--------------Creative ' + str(idx) + '--------------\n')
write_file.write("Centroid: " + str(centroid) + '\n')
write_file.write("Label: " + str(label) + '\n')
total_num_tries += num_tries
total_num_clusters += k_to_use
total_num_points += num_data_points
# db_running_sum += db_index(line_points_data, centroid, label)
print ('At creative ' + str(idx) + '/' + str(num_lines_read_file))
print ('Elapsed Time: ' + str(default_timer() - tic))
print ('Num Data Points: ' + str(num_data_points))
print ('Num_tries: ' + str(num_tries))
read_file.close()
write_file.close()
toc = default_timer()
stats_file = open('stats_file.txt', 'a')
stats_file.write('--------------' + str(datetime.datetime.utcnow()) + '---------------\n')
stats_file.write('Testing statistics for <' + read_file_name + '>\n')
stats_file.write('Results file <' + write_file_name + '>\n')
stats_file.write('Total number of original points: ' + str(total_num_points) + '\n')
stats_file.write('Total number of original creatives: ' + str(idx) + '\n')
stats_file.write('Total Elapsed Time: ' + str(toc-tic) + ' seconds \n')
stats_file.write('Total number of kmeans intiailzation tries: ' + str(total_num_tries) + '\n')
stats_file.write('Total number of clusters generated: ' + str(total_num_clusters) + '\n')
stats_file.write('Maximum number of iterations: ' + str(NUM_ITER) + '\n')
stats_file.write('minit: ' + str(MINIT) + '\n')
stats_file.write('Average Davies-Bouldin Index: ' + str(db_running_sum/num_lines_read_file) + ' (lower is better)\n')
stats_file.write('\n')
stats_file.close()
elif sys.argv[1] == '-dump' and (len(sys.argv) == 4): # file, then number of points to be generated
dataset_generator(str(sys.argv[2]), int(sys.argv[3]))
else:
print '-one: standard single cluster generator. Will plot points.'
print '-sim <data_file_name> <write_file_name>: simulates reading data from <data_file_name>, dumping clusters into <write_file_name>, and appending statistics to stats_file.txt'
print '-dump <data_file_name> <num_points>: generates num_points data and writes into data_file_name'
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Tests for the engine utils module
"""
"""
Session.py
Created by Tomas HJ Knapen on 2009-11-26.
Copyright (c) 2009 TK. All rights reserved.
"""
from psychopy import visual, core, event, misc, logging
import pygame
#from pygame.locals import *
from scipy.io import wavfile
import datetime
import os
import pickle as pkl
import pandas as pd
import pyaudio, wave
import numpy as np
#import pygaze
#from pygaze import libscreen
#from pygaze import eyetracker
from .. import config
class Session(object):
"""Session is a main class that creates screen and file properties"""
def __init__(self, subject_initials, index_number, **kwargs):
super(Session, self).__init__()
self.subject_initials = subject_initials
self.index_number = index_number
self.clock = core.Clock()
self.outputDict = {'parameterArray': [], 'eventArray' : []}
self.events = []
self.stopped = False
self.logging = logging
self.create_output_filename()
# engine = kwargs.pop('engine', 'pygaze')
# self.create_screen(engine=engine, **kwargs)
self.start_time = self.clock.getTime()
def create_screen(self, engine='psychopy', **kwargs):
#Set arguments from config file or kwargs
for argument in ['size', 'full_screen', 'background_color', 'gamma_scale',
'physical_screen_size', 'physical_screen_distance',
'max_lums', 'wait_blanking', 'screen_nr', 'mouse_visible',
'monitor_name']:
value = kwargs.pop(argument, config.get('screen', argument))
setattr(self, argument, value)
if engine == 'pygaze':
from pygaze import libscreen
setattr(pygaze.settings, 'FULLSCREEN', self.full_screen)
self.display = libscreen.Display(disptype='psychopy',
dispsize=self.size,
fgc=(255,0,0),
bgc=list((255*bgl for bgl in self.background_color)),
screennr=int(self.screen_nr),
mousevisible=self.mouse_visible,
fullscr=self.full_screen,
allowStencil=True)
self.screen = pygaze.expdisplay
self.screen.waitBlanking = self.wait_blanking
elif engine == 'psychopy':
self.screen = visual.Window(size=self.size,
fullscr=self.full_screen,
screen=int(self.screen_nr),
allowGUI=True,
units='pix',
allowStencil=True,
rgb=self.background_color,
waitBlanking=self.wait_blanking,
useFBO=True,
winType='pyglet',
monitor=self.monitor_name)
self.screen.setMouseVisible(self.mouse_visible)
event.Mouse(visible=self.mouse_visible, win=self.screen)
self.screen.setColor(self.background_color)
self.screen.background_color = self.background_color
self.screen_pix_size = self.size
self.screen_height_degrees = 2.0 * 180.0/np.pi * np.arctan((self.physical_screen_size[1]/2.0)/self.physical_screen_distance)
self.pixels_per_degree = (self.size[1]) / self.screen_height_degrees
self.centimeters_per_degree = self.physical_screen_size[1] / self.screen_height_degrees
self.pixels_per_centimeter = self.pixels_per_degree / self.centimeters_per_degree
self.screen.flip()
def stop(self):
self.stopped = True
def create_output_filename(self, data_directory = 'data'):
"""create output file"""
now = datetime.datetime.now()
opfn = now.strftime("%Y-%m-%d_%H.%M.%S")
if not os.path.isdir(data_directory):
os.mkdir(data_directory)
self.output_file = os.path.join(data_directory, self.subject_initials + '_' + str(self.index_number) + '_' + opfn )
def open_input_file(self):
"""
This method opens a pickle file that has input data in it.
we assume the input data consists of two arrays - one for parameters and one for timings.
the two arrays' rows will be trials.
"""
self.input_file_name = self.index_number + '.pkl'
ipf = open(self.input_file_name)
self.input_data = pkl.load(ipf)
ipf.close()
def create_input_data(self, save = False):
"""
This method should be provided by subclasses that create input data on the fly
"""
pass
def parse_input_data(self):
"""
We assume that the pickle file used as input will be an array,
the rows of which will be the requested trials.
"""
self.nr_trials = len(self.input_data)
def close(self):
"""close screen and save data"""
pygame.mixer.quit()
self.screen.close()
parsopf = open(self.output_file + '_outputDict.pkl', 'a')
pkl.dump(self.outputDict, parsopf)
parsopf.close()
# also output parameters as tsv
opd = pd.DataFrame.from_records(self.outputDict['parameterArray'])
opd.to_csv(path_or_buf=self.output_file + '.tsv', sep='\t', encoding='utf-8')
def play_sound(self, sound_index = '0'):
"""docstring for play_sound"""
if type(sound_index) == int:
sound_index = str(sound_index)
# assuming 44100 Hz, mono channel np.int16 format for the sounds
stream_data = self.sounds[sound_index]
self.frame_counter = 0
def callback(in_data, frame_count, time_info, status):
data = stream_data[self.frame_counter:self.frame_counter+frame_count]
self.frame_counter += frame_count
return (data, pyaudio.paContinue)
# open stream using callback (3)
stream = self.pyaudio.open(format=pyaudio.paInt16,
channels=1,
rate=44100,
output=True,
stream_callback=callback)
stream.start_stream()
# stream.write(stream_data)
def play_np_sound(self, sound_array):
# assuming 44100 Hz, mono channel np.int16 format for the sounds
self.frame_counter = 0
def callback(in_data, frame_count, time_info, status):
data = sound_array[self.frame_counter:self.frame_counter+frame_count]
self.frame_counter += frame_count
return (data, pyaudio.paContinue)
# open stream using callback (3)
stream = self.pyaudio.open(format=pyaudio.paInt16,
channels=1,
rate=44100,
output=True,
stream_callback=callback)
stream.start_stream()
def deg2pix(self, deg):
return deg * self.pixels_per_degree
class MRISession(Session):
def __init__(self,
subject_initials,
index_number,
tr=2,
simulate_mri_trigger=True,
mri_trigger_key=None,
*args,
**kwargs):
super(MRISession, self).__init__(subject_initials, index_number, *args, **kwargs)
self.simulate_mri_trigger = simulate_mri_trigger
if mri_trigger_key is None:
self.mri_trigger_key = config.get('mri', 'mri_trigger_key')
else:
self.mri_trigger_key = mri_trigger_key
self.time_of_last_tr = self.clock.getTime()
self.tr = tr
self.current_tr = 0
self.target_trigger_time = self.start_time + self.tr
def mri_trigger(self):
self.time_of_last_tr = self.clock.getTime()
self.current_tr += 1
self.target_trigger_time = self.start_time + (self.current_tr + 1) * self.tr
logging.critical('Registered MRI trigger')
#
# class EyelinkSession(Session):
# """docstring for EyelinkSession"""
# def __init__(self, subject_initials, index_number, tracker_on=0, *args, **kwargs):
#
# super(EyelinkSession, self).__init__(subject_initials, index_number, *args, **kwargs)
#
# for argument in ['n_calib_points', 'sample_rate', 'calib_size', 'x_offset']:
# value = kwargs.pop(argument, config.get('eyetracker', argument))
# setattr(self, argument, value)
#
# # set pygaze settings
# pygaze.settings.full_screen = self.full_screen
# pygaze.settings.BGC = list(((np.array(self.background_color) + 1.0)/2.0 * 255).astype(int))
# if hasattr(self, 'foreground_color'):
# pygaze.settings.FGC = self.foreground_color
# else:
# pygaze.settings.FGC = list(((-np.array(self.background_color) + 1.0)/2.0 * 255).astype(int))
# pygaze.settings.DISPSIZE = self.screen.size
# pygaze.settings.SCREENSIZE = self.physical_screen_size
# pygaze.settings.SCREENDIST = self.physical_screen_distance
#
# if tracker_on == 1:
# self.create_tracker(tracker_on=True,
# calibration_type='HV%d'%self.n_calib_points,
# sample_rate=self.sample_rate, *args, **kwargs)
# if self.tracker != None:
# self.tracker_setup(*args, **kwargs)
# elif tracker_on == 2:
# # self.create_tracker(auto_trigger_calibration = 1, calibration_type = 'HV9')
# # if self.tracker_on:
# # self.tracker_setup()
# # how many points do we want:
# n_points = self.n_calib_points
# # create tracker
# self.create_tracker(auto_trigger_calibration=0,
# calibration_type='HV%d'%self.n_calib_points,
# sample_rate=self.sample_rate, *args, **kwargs)
#
# calibration_targets, validation_targets, point_indices = self._setup_custom_calibration_points()
#
# # and send these targets to the custom calibration function:
# self.custom_calibration(calibration_targets=calibration_targets,
# validation_targets=validation_targets,point_indices=point_indices,
# n_points=self.n_calib_points,randomize_order=True,repeat_first_target=True)
# # reapply settings:
# self.tracker_setup()
# else:
# self.create_tracker(tracker_on=False)
#
# def create_tracker(self,
# tracker_on=True,
# sensitivity_class=0,
# split_screen=False,
# screen_half='L',
# auto_trigger_calibration=1,
# calibration_type='HV9',
# sample_rate=1000):
# """
# tracker sets up the connection and inputs the parameters.
# only start tracker after the screen is taken, its parameters are set,
# and output file names are created.
# """
#
# self.eyelink_temp_file = self.subject_initials[:2] + '_' + str(self.index_number) + '_' + str(np.random.randint(99)) + '.edf'
#
# if tracker_on:
# # create actual tracker
# # try:
# self.tracker = eyetracker.EyeTracker(self.display, trackertype='eyelink', resolution=self.display.dispsize, data_file=self.eyelink_temp_file, bgc=self.display.bgc)
# self.tracker_on = True
# # except:
# # print('\ncould not connect to tracker')
# # self.tracker = None
# # self.tracker_on = False
# # self.eye_measured, self.sample_rate, self.CR_mode, self.file_sample_filter, self.link_sample_filter = 'N', sample_rate, 1, 1, 1
#
# # return
# else:
# # not even create dummy tracker
# self.tracker = None
# self.tracker_on = False
# return
#
# self.apply_settings(sensitivity_class=sensitivity_class,
# split_screen=split_screen,
# screen_half=screen_half,
# auto_trigger_calibration=auto_trigger_calibration,
# calibration_type=calibration_type,
# sample_rate=sample_rate)
#
# def custom_calibration(self,
# calibration_targets,
# validation_targets,
# point_indices,
# n_points,
# randomize_order=0,
# repeat_first_target=1):
#
# # send the messages:
# self.tracker.send_command('calibration_type = HV%d'%n_points )
# self.tracker.send_command('generate_default_targets = NO')
# self.tracker.send_command('randomize_calibration_order %d'%randomize_order)
# self.tracker.send_command('randomize_validation_order %d'%randomize_order)
# self.tracker.send_command('cal_repeat_first_target %d'%repeat_first_target)
# self.tracker.send_command('val_repeat_first_target %d'%repeat_first_target)
#
# if repeat_first_target:
# n_points+=1
#
# self.tracker.send_command('calibration_samples=%d'%n_points)
# self.tracker.send_command('calibration_sequence=%s'%point_indices)
# self.tracker.send_command('calibration_targets = %s'%calibration_targets)
#
# self.tracker.send_command('validation_samples=%d'%n_points)
# self.tracker.send_command('validation_sequence=%s'%point_indices)
# self.tracker.send_command('validation_targets = %s'%validation_targets)
#
#
# def apply_settings(self, sensitivity_class = 0, split_screen = False, screen_half = 'L', auto_trigger_calibration = True, sample_rate = 1000, calibration_type = 'HV9', margin = 60):
#
# # set EDF file contents
# self.tracker.send_command("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON")
# # self.tracker.send_command("file_sample_filter = LEFT,RIGHT,GAZE,SACCADE,BLINK,MESSAGE,AREA")#,GAZERES,STATUS,HTARGET")
# self.tracker.send_command("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET")
# # set link da (used for gaze cursor)
# self.tracker.send_command("link_event_filter = LEFT,RIGHT,FIXATION,FIXUPDATE,SACCADE,BLINK")
# self.tracker.send_command("link_sample_data = GAZE,GAZERES,AREA,HREF,PUPIL,STATUS")
# self.tracker.send_command("link_event_data = GAZE,GAZERES,AREA,HREF,VELOCITY,FIXAVG,STATUS")
# # set furtheinfo
# self.tracker.send_command("screen_pixel_coords = 0 0 %d %d" %(self.screen_pix_size[0], self.screen_pix_size[1]))
# self.tracker.send_command("pupil_size_diameter = %s"%('YES'));
# self.tracker.send_command("heuristic_filter %d %d"%([1, 0][sensitivity_class], 1))
# self.tracker.send_command("sample_rate = %d" % sample_rate)
#
# # settings tt address saccade sensitivity - to be set with sensitivity_class parameter. 0 is cognitive style, 1 is pursuit/neurological style
# self.tracker.send_command("saccade_velocity_threshold = %d" %[30, 22][sensitivity_class])
# self.tracker.send_command("saccade_acceleration_threshold = %d" %[9500, 5000][sensitivity_class])
# self.tracker.send_command("saccade_motion_threshold = %d" %[0.15, 0][sensitivity_class])
#
# # self.tracker.send_command("file_sample_control = 1,0,0")
# self.tracker.send_command("screen_phys_coords = %d %d %d %d" %(-self.physical_screen_size[0] / 2.0, self.physical_screen_size[1] / 2.0, self.physical_screen_size[0] / 2.0, -self.physical_screen_size[1] / 2.0))
# self.tracker.send_command("simulation_screen_distance = " + str(self.physical_screen_distance))
#
# if auto_trigger_calibration:
# self.tracker.send_command("enable_automatic_calibration = YES")
# else:
# self.tracker.send_command("enable_automatic_calibration = NO")
#
# # for binocular stereo-setup need to adjust the calibration procedure to sample only points on the left/right side of the screen. This allows only HV9 calibration for now.
# # standard would be:
# # self.tracker.).send_command("calibration_targets = 320,240 320,40 320,440 40,240 600,240 40,40 600,40, 40,440 600,440")
# # ordering of points:
# # ;; FOR 9-SAMPLE ALGORITHM:
# # ;; POINTS MUST BE ORDERED ON SCREEN:
# # ;; 5 1 6
# # ;; 3 0 4
# # ;; 7 2 8
#
# # ;; ordering for points in bicubic ("HV13", 13 pt) cal
# # ;; Point order: 6 2 7
# # ;; 10 11
# # ;; 4 1 5
# # ;; 12 13
# # ;; 8 3 9
# if split_screen:
# self.tracker.send_command("calibration_type = HV9")
# self.tracker.send_command("generate_default_targets = NO")
#
# sh, nsw = self.screen.size[1], self.screen.size[0]/2
# points = np.array([[nsw/2, sh/2], [nsw/2, sh-margin], [nsw/2, margin], [margin, sh/2], [nsw-margin, sh/2], [margin, sh - margin], [nsw - margin, sh - margin], [margin, margin], [nsw - margin, margin]])
# if screen_half == 'R':
# points[:,0] += nsw
# points_string = ''
# for p in points:
# points_string += "%s,%s " % tuple(p)
# points_string = points_string[:-1] # delete last space
# self.tracker.send_command("calibration_targets = " % points_string)
# self.tracker.send_command("validation_targets = " % points_string)
# else:
# self.tracker.send_command("calibration_type = " + calibration_type)
#
# def tracker_setup(self, sensitivity_class = 0, split_screen = False, screen_half = 'L', auto_trigger_calibration = True, calibration_type = 'HV9', sample_rate = 1000):
# if self.tracker.connected():
#
# self.tracker.calibrate()
#
# # re-set all the settings to be sure of sample rate and filter and such that may have been changed during the calibration procedure and the subject pressing all sorts of buttons
# self.apply_settings(sensitivity_class = sensitivity_class, split_screen = split_screen, screen_half = screen_half, auto_trigger_calibration = auto_trigger_calibration, calibration_type = calibration_type, sample_rate = sample_rate )
#
# # we'll record the whole session continuously and parse the data afterward using the messages sent to the eyelink.
# self.tracker.start_recording()
# # for that, we'll need the pixel size and the like.
# self.tracker.log('degrees per pixel ' + str(self.pixels_per_degree) )
# # now, we want to know how fast we're sampling, really
# # self.eye_measured, self.sample_rate, self.CR_mode, self.file_sample_filter, self.link_sample_filter = self.tracker.getModeData()
# self.sample_rate = sample_rate
#
# def drift_correct(self, position = None):
# """docstring for drift_correct"""
# if self.tracker.connected():
# if position == None: # standard is of course centered on the screen.
# position = [self.screen.size[0]/2,self.screen.size[1]/2]
# while 1:
# # Does drift correction and handles the re-do camera setup situations
# error = self.tracker.doDriftCorrect(position[0],position[1],1,1)
# if error != 27:
# break;
# else:
# self.tracker_setup()
#
# def eye_pos(self):
# if self.tracker:
# return self.tracker.sample() # check for new sample update
# # if(dt != None):
# # # Gets the gaze position of the latest sample,
# # if dt.isRightSample():
# # gaze_position = dt.getRightEye().getGaze()
# # return gaze_position[0],gaze_position[1] # self.screen.size[1]-
# # elif dt.isLeftSample():
# # gaze_position = dt.getLeftEye().getGaze()
# # return gaze_position[0],gaze_position[1] # self.screen.size[1]-
# # return 0,self.screen.size[1]-0
# else:
# pygame.event.pump()
# (x,y) = pygame.mouse.get_pos()
# y = self.screen.size[1]-y
# return x,y
#
# def detect_saccade(self, algorithm_type = 'velocity', threshold = 0.25, direction = None, fixation_position = None, max_time = 1.0 ):
# """
# detect_saccade tries to detect a saccade based on position (needs fixation_position argument) or velocity (perhaps a direction argument?) information.
# It can be 'primed' with a vector giving the predicted direction of the impending saccade.
# detect_saccade looks for a saccade between call_time (= now) and max_time+call_time
# """
# no_saccade = True
# start_time = core.getTime()
# if algorithm_type == 'velocity':
# sample_array = np.zeros((max_time * self.sample_rate, 2), dtype = np.float32)
# velocity_array = np.zeros((max_time * self.sample_rate, 2), dtype = np.float32)
# f = np.array([1,1,2,3], dtype = np.float32)/7.0
# nr_samples = 1
# sample_array[0,:] = self.eye_pos()
# velocity_array[0,:] = 0.001, 0.001
# if direction != None: # make direction a unit vector if it is an argument to this function
# direction = direction / np.linalg.norm(direction)
#
# while no_saccade:
# saccade_polling_time = core.getTime()
# sample_array[nr_samples][:] = self.eye_pos()
# if (sample_array[nr_samples-1][0] != sample_array[nr_samples][0]) or (sample_array[nr_samples-1][1] != sample_array[nr_samples][1]):
# velocity_array[nr_samples][:] = sample_array[nr_samples][:] - sample_array[nr_samples-1][:]
# if nr_samples > 3:
# # scale velocities according to x and y median-based standard deviations, as in engbert & mergenthaler, 2006
# med_scaled_velocity = velocity_array[:nr_samples]/np.mean(np.sqrt(((velocity_array[:nr_samples] - np.median(velocity_array[:nr_samples], axis = 0))**2)), axis = 0)
# if direction != None:
# # scale the velocity array according to the direction in the direction argument before thresholding
# # assuming direction is a x,y unit vector specifying the expected direction of the impending saccade
# if np.inner(med_scaled_velocity[nr_samples], direction) > threshold:
# no_saccade = False
# if np.linalg.norm(med_scaled_velocity[-1]) > threshold:
# no_saccade = False
# nr_samples += 1
# if ( saccade_polling_time - start_time ) > max_time:
# no_saccade = False
#
# if algorithm_type == 'position' or not self.tracker:
# if fixation_position == None:
# fixation_position = np.array(self.eye_pos())
# while no_saccade:
# saccade_polling_time = core.getTime()
# ep = np.array(self.eye_pos())
# # print ep, fixation_position, threshold, np.linalg.norm(ep - fixation_position) / self.pixels_per_degree
# if (np.linalg.norm(ep - fixation_position) / self.pixels_per_degree) > threshold:
# # eye position is outside the safe zone surrounding fixation - swap the buffers to change saccade target position
# no_saccade = False
# # print '\n'
# if ( saccade_polling_time - start_time ) > max_time:
# no_saccade = False
#
# if algorithm_type == 'eyelink':
# while no_saccade:
# self.tracker.wait_for_saccade_start()
# saccade_polling_time = core.getTime()
# # ev =
# # if ev == 5: # start of a saccade
# # no_saccade = False
# # if ( saccade_polling_time - start_time ) > max_time:
# # no_saccade = False
#
# return saccade_polling_time
#
#
# def close(self):
# if self.tracker:
# if self.tracker.connected():
# self.tracker.stop_recording()
# # inject local file name into pygaze tracker and then close.
# self.tracker.local_data_file = self.output_file + '.edf'
# self.tracker.close()
# super(EyelinkSession, self).close()
#
# def play_sound(self, sound_index = '1'):
# """docstring for play_sound"""
# super(EyelinkSession, self).play_sound(sound_index = sound_index)
# if self.tracker != None:
# self.tracker.log('sound ' + str(sound_index) + ' at ' + str(core.getTime()) )
#
# def _setup_custom_calibration_points(self):
# # order should be with 5 points: center-up-down-left-right
# # order should be with 9 points: center-up-down-left-right-leftup-rightup-leftdown-rightdown
# # order should be with 13: center-up-down-left-right-leftup-rightup-leftdown-rightdown-midleftmidup-midrightmidup-midleftmiddown-midrightmiddown
# # so always: up->down or left->right
# # it is setup to do a 9 or 5 point circular calibration, at reduced ecc
#
# # create 4 x levels:
# width = self.calib_size * self.size[1]
# x_start = (self.size[0]-width)/2
# x_end = self.size[0]-(self.size[0]-width)/2
# x_range = np.linspace(x_start,x_end,5) + self.x_offset
# y_start = (self.size[1]-width)/2
# y_end = self.size[1]-(self.size[1]-width)/2
# y_range = np.linspace(y_start,y_end,5)
#
# # set calibration targets
# cal_center = [x_range[2],y_range[2]]
# cal_left = [x_range[0],y_range[2]]
# cal_right = [x_range[4],y_range[2]]
# cal_up = [x_range[2],y_range[0]]
# cal_down = [x_range[2],y_range[4]]
# cal_leftup = [x_range[1],y_range[1]]
# cal_rightup = [x_range[3],y_range[1]]
# cal_leftdown = [x_range[1],y_range[3]]
# cal_rightdown = [x_range[3],y_range[3]]
#
# # create 4 x levels:
# width = self.eyelink_calib_size*0.75 * self.size[1]
# x_start = (self.size[0]-width)/2
# x_end = self.size[0]-(self.size[0]-width)/2
# x_range = np.linspace(x_start,x_end,5) + self.x_offset
# y_start = (self.size[1]-width)/2
# y_end = self.size[1]-(self.size[1]-width)/2
# y_range = np.linspace(y_start,y_end,5)
#
# # set calibration targets
# val_center = [x_range[2],y_range[2]]
# val_left = [x_range[0],y_range[2]]
# val_right = [x_range[4],y_range[2]]
# val_up = [x_range[2],y_range[0]]
# val_down = [x_range[2],y_range[4]]
# val_leftup = [x_range[1],y_range[1]]
# val_rightup = [x_range[3],y_range[1]]
# val_leftdown = [x_range[1],y_range[3]]
# val_rightdown = [x_range[3],y_range[3]]
#
# # get them in the right order
# if self.n_calib_points == 5:
# cal_xs = np.round([cal_center[0],cal_up[0],cal_down[0],cal_left[0],cal_right[0]])
# cal_ys = np.round([cal_center[1],cal_up[1],cal_down[1],cal_left[1],cal_right[1]])
# val_xs = np.round([val_center[0],val_up[0],val_down[0],val_left[0],val_right[0]])
# val_ys = np.round([val_center[1],val_up[1],val_down[1],val_left[1],val_right[1]])
# elif self.n_calib_points == 9:
# cal_xs = np.round([cal_center[0],cal_up[0],cal_down[0],cal_left[0],cal_right[0],cal_leftup[0],cal_rightup[0],cal_leftdown[0],cal_rightdown[0]])
# cal_ys = np.round([cal_center[1],cal_up[1],cal_down[1],cal_left[1],cal_right[1],cal_leftup[1],cal_rightup[1],cal_leftdown[1],cal_rightdown[1]])
# val_xs = np.round([val_center[0],val_up[0],val_down[0],val_left[0],val_right[0],val_leftup[0],val_rightup[0],val_leftdown[0],val_rightdown[0]])
# val_ys = np.round([val_center[1],val_up[1],val_down[1],val_left[1],val_right[1],val_leftup[1],val_rightup[1],val_leftdown[1],val_rightdown[1]])
# #xs = np.round(np.linspace(x_edge,self.size[0]-x_edge,self.n_calib_points))
# #ys = np.round([self.ywidth/3*[1,2][pi%2] for pi in range(self.n_calib_points)])
#
# # put the points in format that eyelink wants them, which is
# # calibration_targets / validation_targets: 'x1,y1 x2,y2 ... xz,yz'
# calibration_targets = ' '.join(['%d,%d'%(cal_xs[pi],cal_ys[pi]) for pi in range(self.n_calib_points)])
# # just copy calibration targets as validation for now:
# #validation_targets = calibration_targets
# validation_targets = ' '.join(['%d,%d'%(val_xs[pi],val_ys[pi]) for pi in range(self.n_calib_points)])
#
# # point_indices: '0, 1, ... n'
# point_indices = ', '.join(['%d'%pi for pi in range(self.n_calib_points)])
#
# return calibration_targets, validation_targets, point_indices
#
#
# class StarStimSession(EyelinkSession):
# """StarStimSession adds starstim EEG trigger functionality to the EyelinkSession.
# It assumes an active recording, using NIC already connected over bluetooth.
# Triggers land in the file that's already set up and recording.
# """
# def __init__(self, subject_initials, index_number, connect_to_starstim = False, TCP_IP = '10.0.1.201', TCP_PORT = 1234):
# super(StarStimSession, self).__init__(subject_initials, index_number)
# self.setup_starstim_connection(TCP_IP = TCP_IP, TCP_PORT = TCP_PORT, connect_to_starstim = connect_to_starstim)
#
# def setup_starstim_connection(self, TCP_IP = '10.0.1.201', TCP_PORT = 1234, connect_to_starstim = True):
# """setup_starstim_connection opens a connection to the starstim to its standard ip address
# and standard (trigger) port. For controlling the recordings etc, we need tcp port 1235, it seems.
# more on that later.
# """
# if connect_to_starstim:
# self.star_stim_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# self.star_stim_socket.connect((TCP_IP, TCP_PORT))
# self.star_stim_connected = True
# else:
# self.star_stim_connected = False
#
# def close_starstim_connection(self):
# if self.star_stim_connected:
# self.star_stim_socket.close()
#
# def send_starstim_trigger(self, trigger = 1):
# if self.star_stim_connected:
# self.star_stim_socket.sendall('<TRIGGER>%i</TRIGGER>'%trigger)
#
# def close(self):
# super(StarStimSession, self).close()
# if self.star_stim_connected:
# self.close_starstim_connection()
#
#
# class SoundSession(Session):
#
#
# def __init__(self, *args, **kwargs):
# self.setup_sound_system()
# super(SoundSession, self).__init__(*args, **kwargs)
#
# def setup_sound_system(self):
# """initialize pyaudio backend, and create dictionary of sounds."""
# self.pyaudio = pyaudio.PyAudio()
# self.sound_files = subprocess.Popen('ls ' + os.path.join(os.environ['EXPERIMENT_HOME'], 'sounds', '*.wav'), shell=True, stdout=subprocess.PIPE).communicate()[0].split('\n')[0:-1]
# self.sounds = {}
# for sf in self.sound_files:
# self.read_sound_file(file_name = sf)
#
# def read_sound_file(self, file_name, sound_name = None):
# """Read sound file from file_name, and append to self.sounds with name as key"""
# if sound_name == None:
# sound_name = os.path.splitext(os.path.split(file_name)[-1])[0]
#
# rate, data = wavfile.read(file_name)
# # create stream data assuming 2 channels, i.e. stereo data, and use np.float32 data format
# stream_data = data.astype(np.int16)
#
# # check data formats - is this stereo sound? If so, we need to fix it.
# wf = wave.open(file_name, 'rb')
# if wf.getnchannels() == 2:
# stream_data = stream_data[::2]
#
# self.sounds.update({sound_name: stream_data})
#
# def test_MRISession_simulation():
# from .trial import Trial
#
# session = MRISession('GdH', 1)
# session.create_screen()
#
# trial = Trial(session=session)
# trial.draw()
#
# core.wait(2)
# trial.draw()
#
# logging.console.setLevel(logging.DEBUG)
# logging.info('Current TR: %s\n\rTime last TR: %s' % (session.current_tr, session.time_of_last_tr, ))
# assert(session.current_tr > 0)
#
|
# -*- coding: utf-8 -*-
import base64
from blueapps.utils.logger import logger
from config import APP_CODE, SECRET_KEY
def get_execute_script(client, username, bk_biz_id, script_id, ip_list, script_param, script_content):
fast_execute_data = {
"bk_app_code": APP_CODE,
"bk_app_secret": SECRET_KEY,
"bk_username": username,
"account": "root",
"bk_biz_id": bk_biz_id,
"ip_list": ip_list
}
if script_content:
fast_execute_data['script_content'] = str(base64.b64encode(script_content.encode("utf-8")), "utf-8")
if script_param:
fast_execute_data['script_param'] = str(base64.b64encode(script_param.encode("utf-8")), "utf-8")
if script_id:
fast_execute_data['script_id'] = script_id
logger.info('获取蓝鲸API中的fast_execute_script方法的作业实例')
# 执行脚本
info_script = client.job.fast_execute_script(fast_execute_data)
# 作业实例ID
job_instance_id = None
if info_script.get("result"):
# 获取作业实例ID
job_instance_id = info_script.get("data").get("job_instance_id")
return job_instance_id
|
from numpy.lib.shape_base import split
duration = ['2h 50m', '7h 25m', '19h 0m', '5h 25m', '4h 45m', '2h 25m', '15h 30m', '21h 5m', '25h 30m', '7h 50m', '13h 15m', '2h 35m', '2h 15m', '12h 10m', '2h 35m', '26h 35m', '4h 30m', '22h 35m', '23h 0m', '20h 35m']
duration_hours = []
duration_mins = []
for i in range(len(duration)):
duration_hours.append(int(duration[i].split(sep = "h")[0])) # Extract hours from duration
duration_mins.append(int(duration[i].split(sep = "m")[0].split()[-1]))
#d= (duration[0].split(sep="m")[0].split())
#duration_mins.append(d)
print(duration_hours) |
class Perceptron():
def __init__(self,X_array,Y_array):
self.X_array=X_array
self.Y_array=Y_array
self.W_len=len(X_array[0])
def learn(self,learning_rate=0.1,learning_times=1000):
import theano
import theano.tensor as T
import numpy as np
X=T.dmatrix('X')
Y=T.dvector('Y')
W=theano.shared(np.random.randn(self.W_len),name="W")
b=theano.shared(0.1,name="b")
hx=T.dot(X,W)+b
cost_function=T.sum(-Y*hx)
gW,gb=T.grad(cost_function,[W,b])
self.predicting_function=theano.function(inputs=[X],outputs=hx)
learning_function=theano.function(inputs=[X,Y],outputs=cost_function,updates=[(W,W-learning_rate*gW),(b,b-learning_rate*gb)])
for i in range(learning_times):
p=np.array([-1 if x<=0 else 1 for x in self.predicting_function(self.X_array)])
input_x=np.array(self.X_array)[self.Y_array != p]
input_y=np.array(self.Y_array)[self.Y_array != p]
print(str(i+1)+'/'+str(learning_times),'error:',learning_function(input_x,input_y))
W_learnt=np.hstack((b.get_value(),W.get_value()))
for i in range(self.W_len):
print('w'+str(i),'=',str(W.get_value()[i]))
print('b','=',b.get_value())
return W_learnt
def predict(self,p_array):
return [-1 if x<=0 else 1 for x in self.predicting_function(p_array)]
if __name__=='__main__':
m = [[0,1],[1,1],[1,2],[4,6],[5,5],[5,6]]
n = [-1,-1,-1,1,1,1]
a=Perceptron(m,n)
a.learn(0.1,15)
print(n)
print(a.predict(m)) |
# -*- coding: UTF-8 -*-
"""Lisää skriptin alkuun ja loppuun tarvittavat rivit kuvan luomiseen. Ottaa
parametreina syöte- ja tulostetiedostojen nimet."""
import sys
if __name__ == '__main__':
infile = open(sys.argv[1])
outfile = open(sys.argv[2], "w")
outfile.write("from math import *\n")
outfile.write("import sys\n")
outfile.write("sys.path.append(\"../commons/kuva/\")\n")
outfile.write("from kuva import *\n")
outfile.write("import kuvaaja\n")
outfile.write("import lukusuora\n")
outfile.write("import geom\n")
outfile.write("aloitaKuva()\n")
outfile.write("if True:\n")
for line in infile:
outfile.write(" " + line)
outfile.write("\nlopetaKuva()\n")
outfile.close()
|
import math
class Haversine:
"""
use the haversine class to calculate the distance between
two lon/lat coordnate pairs.
output distance available in kilometers, meters, miles, and feet.
example usage: Haversine([lon1,lat1],[lon2,lat2]).feet
Distance of the user's location and weather's stations is calucated to
call the `weather api` get the weather details.
please ref this
`link <https://nathanrooy.github.io/posts/2016-09-07/haversine-with-python/>`_
to know more of its formula by its author Nathan A. Rooy.
"""
def __init__(self, coord1, coord2):
lon1, lat1 = coord1
lon2, lat2 = coord2
R = 6371000 # radius of Earth in meters
phi_1 = math.radians(lat1)
phi_2 = math.radians(lat2)
delta_phi = math.radians(lat2 - lat1)
delta_lambda = math.radians(lon2 - lon1)
a = (
math.sin(delta_phi / 2.0) ** 2
+ math.cos(phi_1) * math.cos(phi_2) * math.sin(delta_lambda / 2.0) ** 2
)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
self.meters = R * c # output distance in meters
self.km = self.meters / 1000.0 # output distance in kilometers
self.miles = self.meters * 0.000621371 # output distance in miles
self.feet = self.miles * 5280 # output distance in feet
|
SUCCESS = '0'
ERROR = '-1'
PARAMETER_ERROR = '-2'
# UN_AUTH_ERROR = '-4'
RESULT_SUCCESS = '请求成功'
RESULT_ERROR = '请求错误'
|
#!/usr/bin/env python
#----------------------------------------------------------------------
# Description:
# Author: Carsten Richter <carsten.richter@desy.de>
# Created at: Do 2. Jul 14:27:28 CEST 2015
# Computer: haso227r
# System: Linux 3.13.0-55-generic on x86_64
#
# Copyright (c) 2015 Carsten Richter All rights reserved.
#----------------------------------------------------------------------
if __name__ == "__main__":
import os
import atlasccd
from pyqtgraph.Qt import QtCore, QtGui
import argparse
parser = argparse.ArgumentParser(description=
"Image visualization for agilent atlas ccd `.img` files using "
"pyqtgraph and fabio packages")
parser.add_argument("-s", "--sort", action="store_true",
help="sort chosen files")
parser.add_argument("imgfile", nargs="*", default=".",
help="list of files to show")
args = parser.parse_args()
if len(args.imgfile)==1 and os.path.isdir(args.imgfile[0]):
timer = QtCore.QTimer()
timer.timeout.connect(atlasccd.scan_folder(args.imgfile[0]))
timer.start(5000)
else:
data = atlasccd.open_img_stack(args.imgfile)
iw = atlasccd.show_img(data)
iw.labels = args.imgfile
QtGui.QApplication.instance().exec_()
|
import os
import numpy as np
from machine_learning.aux import constants, helpers
from machine_learning.aux.constants import get_processed_data_file_header_segregation
from machine_learning.aux.helpers import read_dataset_csv_file_as_np_arrays
from machine_learning.aux.persist import load_model
from machine_learning.preprocessing.classifier_stage_1.prepare_dataset import get_training_labels as get_stage_1_training_labels, \
merge_and_label_processed_csv_files
def get_training_labels():
"""
Mapping from as-causes to training labels for stage 2 classifier
Labels -> 0, APSP
1, BL
2, CE
3, DFL
4, LRSSI
5, PWR_STATE
"""
mapping = dict()
mapping[constants.ASCause.apsp] = 0
mapping[constants.ASCause.bl] = 1
mapping[constants.ASCause.ce] = 2
mapping[constants.ASCause.dfl] = 3
mapping[constants.ASCause.lrssi] = 4
mapping[constants.ASCause.pwr_state] = 5
return mapping
def get_training_label_proportions():
"""
Mapping from as-causes to training label proportions for stage 2 classifier
"""
label_mapping = get_training_labels()
mapping = dict()
mapping[label_mapping[constants.ASCause.apsp]] = 575
mapping[label_mapping[constants.ASCause.bl]] = 500
mapping[label_mapping[constants.ASCause.ce]] = 449
mapping[label_mapping[constants.ASCause.dfl]] = 114
mapping[label_mapping[constants.ASCause.lrssi]] = 600
mapping[label_mapping[constants.ASCause.pwr_state]] = 449
return mapping
def get_processed_csv_file_names(directory_path):
"""
Read all the file names present in the given directory
"""
__supported_extensions = ['.csv', ]
processed_csv_file_names = list()
listdir = os.listdir(directory_path)
for file in listdir:
if os.path.splitext(file)[1] in __supported_extensions:
processed_csv_file_names.append(file)
# sort so that we always read in a predefined order
# key: smallest file first
processed_csv_file_names.sort(key = lambda f: os.path.getsize(os.path.join(directory_path, f)))
return processed_csv_file_names
def identify_pscans_using_stage_1_classifier(infile, outfile, classifier_filepath, for_training):
"""
Removes periodic scan instances from the training dataset using stage 1 classifier
"""
def remove_unassociated_pscans(_dataframe, _pred):
pass
def remove_pscans(_dataframe, pred):
# stage 1 training labels
stage_1_training_labels = get_stage_1_training_labels()
indexes_to_drop = list()
for idx, label in enumerate(pred):
if label == stage_1_training_labels[constants.ASCause.pscan_unassoc]:
indexes_to_drop.append(idx)
for idx, label in enumerate(pred):
if label == stage_1_training_labels[constants.ASCause.pscan_assoc]:
indexes_to_drop.append(idx)
indexes_to_keep = set(range(_dataframe.shape[0])) - set(indexes_to_drop)
return _dataframe.take(list(indexes_to_keep))
infile = os.path.abspath(infile)
outfile = os.path.abspath(outfile)
classifier_filepath = os.path.abspath(classifier_filepath)
classifier = load_model(classifier_filepath)
# read the data to predict labels for
features_x, _ = read_dataset_csv_file_as_np_arrays(filepath = infile, for_training = False)
y_pred = classifier.predict(features_x)
# some insight
n_pred = y_pred.shape[0]
print('• Periodic Scans prediction count: {}'.format(np.bincount(y_pred.astype(int))))
print('• Periodic Scans prediction proportion: {}'.format(np.divide(np.bincount(y_pred.astype(int)), n_pred)))
# read in the datafile
dataframe = helpers.read_csv_file(infile)
print('• Dataframe shape before dropping identified pscans:', dataframe.shape)
# remove per_scans
dataframe = remove_pscans(dataframe, y_pred)
print('• Dataframe shape after dropping identified pscans:', dataframe.shape)
# write the file back
if for_training:
head_features, head_training, head_properties = get_processed_data_file_header_segregation(for_training = True)
header = head_features + head_properties + head_training
else:
head_features, head_properties = get_processed_data_file_header_segregation(for_training = False)
header = head_features + head_properties
dataframe.to_csv(outfile, columns = header, header = True, index = False, mode = 'w')
if __name__ == '__main__':
merge_and_label_processed_csv_files(
'/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase__python/machine_learning/data/test_4_andr.csv',
get_training_labels(),
for_training = False
)
identify_pscans_using_stage_1_classifier(
'/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase__python/machine_learning/data/test_4_andr.csv',
'/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase__python/machine_learning/data/test_4_andr_reduced.csv',
'/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase__python/machine_learning/saved_models/classifier_stage_1/random_forest.pkl',
for_training = False
)
# create_training_dataset(
# '/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase/machine_learning/data/classifier_stage_2/reduced_dataset.csv',
# '/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase/machine_learning/data/classifier_stage_2/training_dataset.csv',
# get_training_label_proportions()
# )
pass
|
import os
from selenium import webdriver
from selenium.webdriver.ie.options import Options
class TestSuite(object):
arg = None
"""docstring for TestSuite"""
def __init__(self, arg = None):
super(TestSuite, self).__init__()
self.arg = arg
def testSuite(self):
sts = 0
optsIe = Options()
optsIe.ignore_protected_mode_settings = True
optsIe.ignore_zoom_level = True
optsIe.require_window_focus = True
print("=============== SUITE v0.0.4 =======================")
print("Browsers firefox, chrome, ie called....")
firefox = webdriver.Firefox()
chrome = webdriver.Chrome()
ie = webdriver.Ie(ie_options=optsIe)
print("")
print("EXECUTING")
if (sts == 0):
try:
firefox.get('http://www.google.com')
print('firefox...')
firefox.close()
chrome.get('http://www.google.com')
print('chrome...')
chrome.close()
print('ie...')
ie.get('http://www.google.com')
ie.close()
except Exception as e:
raise e
sts = 1
if(sts == 1):
print("DONE")
else:
print("FAIL") |
# Quando se deseja fazer um tratamento de exceção, usamos o Try e Except
#exemplo
a = 2
b = 0
try:
print(a/b)
except:
print("não é permitido divisão por 0") |
from webapp2_extras.appengine.auth.models import User
from google.appengine.ext import ndb
class User(User):
"""
Universal user model. Can be used with App Engine's default users API,
own auth or third party authentication methods (OpenId, OAuth etc).
based on https://gist.github.com/kylefinley
"""
#: Creation date.
created = ndb.model.DateTimeProperty(auto_now_add=True)
#: Modification date.
updated = ndb.model.DateTimeProperty(auto_now=True)
#: User defined unique name, also used as key_name.
username = ndb.model.StringProperty(required=True)
#: User Name
name = ndb.model.StringProperty()
#: User Last Name
last_name = ndb.model.StringProperty()
#: User email
email = ndb.model.StringProperty(required=True)
#: Password, only set for own authentication.
password = ndb.model.StringProperty(required=True)
#: User Country
country = ndb.model.StringProperty()
#: Authentication identifier according to the auth method in use. Examples:
#: * own|username
#: * gae|user_id
#: * openid|identifier
#: * twitter|username
#: * facebook|username
auth_id = ndb.model.StringProperty(repeated=True)
# auth_id = model.StringProperty()
# Flag to persist the auth across sessions for third party auth.
auth_remember = ndb.model.BooleanProperty(default=False)
# TODO: use these methods for authentication and reset password
# @classmethod
# def get_by_username(cls, username):
# return cls.query(cls.username == username).get()
#
# @classmethod
# def get_by_auth_id(cls, auth_id):
# return cls.query(cls.auth_id == auth_id).get()
#
class Order(ndb.Model):
STATUS = [
('open', 'Open'),
('closed', 'Closed')]
TYPES = [
('thai', 'Thai'),
('other', 'Other'),
]
status = ndb.model.StringProperty(default='open')
place_type = ndb.model.StringProperty(default='thai')
place_name = ndb.model.StringProperty()
time = ndb.model.StringProperty()
placed_by = ndb.model.StringProperty()
added_by = ndb.model.UserProperty()
added_at = ndb.model.DateTimeProperty(auto_now_add=True)
def is_open(self):
return self.status == 'open'
def close(self):
self.status = 'closed'
def is_thai(self):
return self.place_type == 'thai'
def __str__(self):
return "%s at %s by %s" % (self.place_name, self.time, self.placed_by)
class Item(ndb.Model):
SPICINESS = [
('mild', 'Mild'),
('mild+', 'Mild +'),
('medium', 'Medium'),
('medium+', 'Medium +'),
('hot', 'Hot'),
('hot+', 'Hot +')]
MEATS = [
('chicken', 'Chicken'),
('beef', 'Beef'),
('pork', 'Pork'),
('shrimp', 'Shrimp'),
('veggie', 'Veggie')]
order_key = ndb.model.KeyProperty(kind=Order)
item_name = ndb.model.StringProperty()
your_name = ndb.model.StringProperty()
spiciness = ndb.model.StringProperty()
meat = ndb.model.StringProperty()
added_by = ndb.model.UserProperty()
added_at = ndb.model.DateTimeProperty(auto_now_add=True)
def __str__(self):
if self.spiciness:
return "%s %s %s" % (self.item_name, self.meat, self.spiciness)
else:
return self.item_name
|
# The actual implementation is in Rust code. Here is a thin wrapper layer
# to provide type annotations and allow broader input data type.
from typing import Union, IO, AnyStr, Tuple, Any
import io
from pathlib import Path
from .defity import __version__ # noqa: F401
from . import defity as _mod
CHUNK_SIZE = 2048
def from_file(file: Union[Path, str, IO]) -> str:
"""Return MIME type of a file, from its path, or from file-like object.
The file must be opened in binary mode.
Example:
>>> import defity
>>> defity.from_file('path/to/landscape.png')
'image/png'
>>> with open('path/to/landscape.png', 'rb') as f:
... defity.from_file(f)
...
'image/png'
"""
# The Rust function receives a PathBuf, not &str, but PyO3 will
# automatically convert for us.
# See: https://github.com/PyO3/pyo3/blob/0.14/src/conversions/path.rs#L16
_guard_file_arg(file)
if isinstance(file, (Path, str)):
return _mod.from_file(str(file))
# File-like object
# Make sure to read from the beginning of file.
if file.seekable():
file.seek(0)
chunk: AnyStr = file.read(CHUNK_SIZE)
if file.seekable():
file.seek(0)
return _mod.from_bytes(chunk.encode() if isinstance(chunk, str) else chunk)
def from_bytes(buf: bytes) -> str:
"""Return MIME type from content in form of bytes-like type.
Example:
>>> import defity
>>> defity.from_bytes(b'some-binary-content')
'image/png'
"""
_guard_buf_arg(buf)
# We accept many input data types just for user's convenience. We still convert
# it to immutable bytes to pass down to Rust function.
return _mod.from_bytes(bytes(buf))
def is_file_of_type(file: Union[Path, str, IO], mimetype: Union[str, Tuple[str, ...]]):
"""Test if given file is of one of given MIME types.
The file must be opened in binary mode.
Example:
>>> import defity
>>> defity.is_file_of_type('path/to/landscape.png', 'image/png')
True
>>> with open('path/to/landscape.png', 'rb') as f:
... defity.from_file(f, ('image/png', 'image/jpeg', 'application/pdf'))
...
True
"""
_guard_file_arg(file)
if isinstance(mimetype, str):
types = (mimetype,)
elif isinstance(mimetype, tuple) and all(isinstance(t, str) for t in mimetype):
types = mimetype
else:
raise TypeError('mimetype argument must be a string or tuple of strings.')
if isinstance(file, (Path, str)):
return _mod.is_file_of_type(str(file), types)
# File-like object
# Make sure to read from the beginning of file.
if file.seekable():
file.seek(0)
chunk: AnyStr = file.read(CHUNK_SIZE)
if file.seekable():
file.seek(0)
return _mod.is_bytes_of_type(chunk.encode() if isinstance(chunk, str) else chunk, types)
def is_bytes_of_type(buf: bytes, mimetype: Union[str, Tuple[str, ...]]):
"""Test if given file content is of one of given MIME types."""
_guard_buf_arg(buf)
if isinstance(mimetype, str):
types = (mimetype,)
elif isinstance(mimetype, tuple) and all(isinstance(t, str) for t in mimetype):
types = mimetype
else:
raise TypeError('mimetype argument must be a string or tuple of strings.')
return _mod.is_bytes_of_type(buf, types)
def _guard_file_arg(file: Any):
if not isinstance(file, (Path, str, io.RawIOBase, io.BufferedIOBase, io.TextIOBase)):
raise TypeError('Input object must be a file path or file-like object')
def _guard_buf_arg(buf: Any):
if not isinstance(buf, (bytes, bytearray, memoryview)):
raise TypeError('Data must be of bytes, bytearray or memoryview type')
|
# flask app
from app import app
# flask login dependency
from flask_login import LoginManager
# flask-login config
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
login_manager.init_app(app)
# User object loader func
@login_manager.user_loader
def user_loader(id):
from app.auth.models import User
return User.query.get(id)
|
def solution(N, P, Q):
answer = list()
if N == 1:
for idx in range(len(P)):
answer.append(0)
return answer
primes = sieves(N)
semiPrimesList = semiPrimes(primes, N)
prefSumArr = prefixSum(semiPrimesList)
for idx, values in enumerate(P):
answer.append(prefSumArr[Q[idx]] - prefSumArr[values-1])
return answer
#construct array of primes up to N/2
def sieves(N):
sieve = [True] * ((N+2)//2)
sieve[0] = sieve[1] = False
i = 2
while (i * i <= ((N+1)//2)):
if(sieve[i]):
k = i*i
while(k<=((N+1)//2)):
sieve[k] = False
k += i
i +=1
return sieve
#construct array of semiprimes up to N
def semiPrimes(sieve, N):
semiPrimes = [0] * (N+1)
i = 2
while(i*i <= N):
if(sieve[i]):
secondMultiplicator = i
while(secondMultiplicator <= N//2 and i * secondMultiplicator <= N):
if(sieve[secondMultiplicator]):
semiPrimes[secondMultiplicator * i] = True
secondMultiplicator += 1
else:
secondMultiplicator +=1
i += 1
return semiPrimes
#create a prefixsum array of semiprimes up to N
def prefixSum(semiPrimesList):
lenSemis = (len(semiPrimesList))
prefixSumArray = [0] * lenSemis
for i in range(lenSemis):
if(semiPrimesList[i]):
prefixSumArray[i] = prefixSumArray[i-1] + 1
else:
prefixSumArray[i] = prefixSumArray[i-1]
return prefixSumArray |
import torch
from torch import Tensor
from torch.nn import Module, functional
from torch.nn.modules.utils import _ntuple
class PeriodicPadNd(Module):
def forward(self, x: Tensor) -> Tensor:
return functional.pad(x, self.padding, 'circular')
def extra_repr(self) -> str:
return '{}'.format(self.padding)
class PeriodicPad2d(PeriodicPadNd):
def __init__(self, padding) -> None:
super(PeriodicPad2d, self).__init__()
self.padding = _ntuple(4)(padding)
class PeriodicPad3d(PeriodicPadNd):
def __init__(self, padding) -> None:
super(PeriodicPad3d, self).__init__()
self.padding = _ntuple(6)(padding)
def main():
p = PeriodicPad2d(2)
x = torch.arange(9).float().reshape(1, 1, 3, 3)
print(x)
y = p(x)
print(y)
if __name__ == '__main__':
main()
|
# Copyright (c) 2012 Stuart Pernsteiner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .binaryreader import BinaryReader
class PSDPrimitiveReaderMixin(object):
def read_psd_unicode(self, alignment=4):
"""Read a PSD-format Unicode string."""
length = self.read_int(4)
string = u''.join(unichr(self.read_int(2)) for i in xrange(length))
if alignment is not None:
self.skip_padding(length * 2, alignment)
return string
def read_psd_string(self, alignment=4):
"""Read a PSD-format 8-bit string (called a "Pascal string" in the
documentation).
"""
length = self.read_int(1)
string = self.read_raw(length)
if alignment is not None:
self.skip_padding(1 + length, alignment)
return string
def read_fixed(self):
"""Read a 32-bit fixed-point number, with 24 fractional bits."""
return self.read_int(4) / float(1 << 24)
def read_point(self):
"""Read a 2D point in fixed-point format."""
y = self.read_fixed()
x = self.read_fixed()
return {'x': x, 'y': y}
def skip_section(self, alignment=None):
"""Skip an entire section, by reading the 4-byte section length and
skipping that many bytes.
"""
length = self.read_uint(4)
self.skip(length)
if alignment is not None:
self.skip_padding(length, alignment)
def read_section_end(self):
"""Read a section length and compute the position of the end of the
section.
"""
length = self.read_uint(4)
return self.pos + length
|
def factorial(n):
a= n
b= 1
while (a>1):
b=b*a
a=a-1
return b
x="s"
while (x=="s"):
n= int(input("Pon un entero"))
h= factorial(n)
if (n<0):
print("Porfavor poner un numero positivo")
else:
print("el factorial de ",n," es ",h)
x=input("Quieres intentar con un numero diferente? s/n ")
|
print('this world,mad world,fuck the world lol')
|
# lstm generate txt
import keras
import numpy as np
path = keras.utils.get_file('nietzsche.txt', origin='https://s3.amazonaws.com/text-datasets/nietzsche.txt')
text = open(path).read().lower()
print('corpus length:',len(text))
maxlen = 60
step = 3
sentences = []
next_char = []
for i in range(0,len(text)-maxlen,step):
sentences.append(text[i:i+maxlen])
next_char.append(text[i+maxlen])
chars = sorted(list(set(text)))
|
# from django.contrib.auth.tokens import default_token_generator
#
# token = default_token_generator.make_token(user)
|
def backward(s,i=0):
if i < len(s):
print(s[0:i+1])
backward(s,i+1)
# backward('abcdef')
def forward(s,j=0):
if j > -len(s):
print(s[j-1:])
forward(s,j-1)
forward('abcdef') |
from django.db import models
from datetime import datetime, timedelta
from django.contrib.auth.models import User
def week_later():
return datetime.now()+timedelta(days=7)
class WeeklyTask(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
task = models.CharField(max_length=500)
points = models.IntegerField(default=0)
start_datetime = models.DateTimeField(default=datetime.now, blank=True)
end_datetime = models.DateTimeField(default=week_later, blank=True)
total_progress = models.IntegerField(default=1)
user_progress = models.IntegerField(default=0)
is_finished = models.BooleanField(default=False)
def __str__(self):
return self.task
|
from bs4 import BeautifulSoup
import requests
import urllib.parse
import sys
def download(src_file, file_name):
response = requests.get(src_file).content
# open file
with open(file_name, "wb") as file:
# write to file
file.write(response)
file.close()
def fetch_javascript(url):
# get base url
parsed_uri = urllib.parse.urlparse(url)
domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
# download "root" url
response = requests.get(url).content
soup = BeautifulSoup(response, "lxml")
# fetch javascript links
for link in soup.find_all('script', src=True):
# get src file
src_file = link['src']
if (src_file[0] == "/"):
src_file = domain + src_file[1:]
else:
src_file = domain + src_file[1:]
print(src_file)
# download file
download(src_file, src_file.split("/")[-1])
def fetch_raw(url):
download(url, str(url.split("/")[-1]) + ".html")
# User Input
if ( sys.argv[2] == "--raw" ):
fetch_raw(sys.argv[1])
elif (sys.argv[2] == "--javascript"):
fetch_javascript(sys.argv[1])
|
for _ in range(int(input())):
n,l,x = map(int,input().split())
r = n-l
pairs = min(l,r)
total_cost = pairs*x
print(total_cost)
|
from django.contrib import admin
from api.models import DRFPost
# DataFlair
# admin.site.register(DRFPost) |
point = (100, 200)
print(point[0])
print(point[1])
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import os.path as op
import sys
sys.path.insert(0, op.join(op.dirname(__file__), ".."))
from goatools.obo_parser import GODag
from goatools.mapslim import mapslim
if __name__ == '__main__':
# read the two mini GO trees (full and slim)
go_dag = GODag(op.join(op.dirname(__file__), "data/mini_obo.obo"))
goslim_dag = GODag(op.join(op.dirname(__file__), "data/mini_slim_obo.obo"))
#
# This tests the map2slim algorithm with a very small example GO DAG
# and an even smaller GOslim DAG.
# The tree and the expected results can be seen at the original
# map2slim.pl documentation here:
# http://search.cpan.org/~cmungall/go-perl/scripts/map2slim
# an image of the graph:
# http://geneontology.cvs.sourceforge.net/viewvc/geneontology/go-dev/go-perl/doc/map2slim.gif
#
# Expected results
#
# GO ID MAPS TO SLIM ID ALL SLIM ANCESTORS
# ===== =============== ==================
# 5 2+3 2,3,1
# 6 3 only 3,1
# 7 4 only 4,3,1
# 8 3 only 3,1
# 9 4 only 4,3,1
# 10 2+3 2,3,1
expected_results = {
'GO:0000005': (set(['GO:0000002', 'GO:0000003']),
set(['GO:0000001', 'GO:0000002', 'GO:0000003'])),
'GO:0000006': (set(['GO:0000003']),
set(['GO:0000001', 'GO:0000003'])),
'GO:0000007': (set(['GO:0000004']),
set(['GO:0000001', 'GO:0000003', 'GO:0000004'])),
'GO:0000008': (set(['GO:0000003']),
set(['GO:0000001', 'GO:0000003'])),
'GO:0000009': (set(['GO:0000004']),
set(['GO:0000001', 'GO:0000003', 'GO:0000004'])),
'GO:0000010': (set(['GO:0000002', 'GO:0000003']),
set(['GO:0000001', 'GO:0000002', 'GO:0000003']))
}
tests_succeed = True
for go_term, (exp_direct, exp_all) in expected_results.items():
print(("Testing for term '" + go_term + "' ..."), end=' ', file=sys.stderr)
direct_anc, all_anc = mapslim(go_term, go_dag, goslim_dag)
if direct_anc != exp_direct or all_anc != exp_all:
tests_succeed = False
print(("failed."), file=sys.stderr)
else:
print(("success!"), file=sys.stderr)
if tests_succeed:
print("All test passed successfully!")
sys.exit(0)
else:
print(("[ERROR] At least one test failed."), file=sys.stderr)
sys.exit(1)
|
a = int(input("Digite o numero "))
contador = 1
fat = 1
while contador <= a:
fat = fat * contador
contador = contador + 1
print (a,"!", fat) |
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 8 10:06:50 2021
@author: arthur
"""
##判斷每個元素是否 有兩個以上街角
##Falie 沒辦法判斷是否封閉
import numpy as np
def search_set(elm, elm_list):
cnt = 0
nerbys = [[1, 0], [-1, 0], [0, 1], [0, -1]]
for nerby in nerbys:
setSearchKey = elm + nerby
if setSearchKey.tolist() in elm_list.tolist():
cnt = cnt+1
if cnt >= 2:
return 0
return 1
def containsCycle(grid):
a = np.array(grid)
c = np.array(grid)
c = np.reshape(c, (-1))
unique = []
for i in c:
if i not in unique:
unique.append(i)
elm_list = np.argwhere(a == i)
if len(elm_list) >= 4:
j = 0
for elm in elm_list:
if search_set(elm, elm_list):
elm_list = np.delete(elm_list, j, axis=0)
j = j-1
if len(elm_list) < 4:
break
j = j+1
else:
return 1
return 0
if __name__ == '__main__':
grid = [["d","f","d","b","b","d","f","c","e"],
["f","c","e","d","c","f","b","d","c"],
["f","f","e","e","d","a","f","b","c"],
["b","b","a","a","c","e","c","c","c"],
["e","d","d","b","f","c","c","f","a"],
["c","a","e","a","b","a","b","f","f"],
["c","d","a","d","d","b","e","b","c"],
["b","b","e","a","c","e","a","c","e"],
["f","e","d","e","b","c","b","f","c"],
["f","c","c","c","e","a","c","f","d"],
["a","c","a","c","a","f","d","f","d"],
["b","f","e","c","c","a","a","e","b"],
["f","a","c","e","f","d","f","f","e"],
["d","e","c","a","d","f","c","c","b"]]
# a = np.array(grid)
# elm_list = np.argwhere(a == 'a')
# for i ,elm in enumerate(elm_list):
# print(i, elm)
# t = np.delete(elm_list, 0, axis=0)
# a = np.array(grid)
# elm_list = np.argwhere(a == 'c')
# print(search_set([0,0], elm_list))
# elm = [1, 1]
##
print(containsCycle(grid))
## containsCycle(grid) |
import time
import io
import picamera
import picamera.array
import numpy as np
import cv2
import socket
import sys
width=640
height=480
w_led=32
h_led=32
sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
server_address=('10.0.186.75',8000)
sock.connect(server_address)
def outputs():
while True:
stream=io.BytesIO()
yield stream
data=np.fromstring(stream.getvalue(),dtype=np.uint8)
img=cv2.imdecode(data,0)
blur=cv2.GaussianBlur(img,(5,5),0)
retotsu,threshot=cv2.threshold(blur,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
resized=cv2.resize(threshot,(w_led,h_led))
st="img"
for i in range(0,w_led):
for j in range(0,h_led):
px=resized[i,j]
if px<128:
st+='1'
if px>127:
st+='0'
sock.sendall(st)
print('Sent')
stream.truncate()
with picamera.PiCamera() as camera:
camera.resolution=(width,height)
camera.framerate=80
camera.capture_sequence(outputs(),'jpeg',use_video_port=True)
|
# This code is a partial mod of of the Adafruit PyPotal lib
# https://github.com/adafruit/Adafruit_CircuitPython_PyPortal/blob/master/adafruit_pyportal.py
def wrap_nicely(string, max_chars):
"""Wrap nicely function
A helper that will return a list of lines with word-break wrapping
Parameters
----------
string : str
The text to be wrapped
max_chars: int
The maximum number of characters on a line before wrapping
Returns
-------
list
Returns a list of lines where each line is separated based
on the amount of max_chars provided
"""
string = string.replace('\n', '').replace('\r', '') # Strip confusing newlines
words = string.split(' ')
the_lines = []
the_line = ''
for w in words:
if len(the_line + ' ' + w) <= max_chars:
the_line += ' ' + w
else:
the_lines.append(the_line)
the_line = '' + w
if the_line: # Last line remaining
the_lines.append(the_line)
# Remove first space from first line:
the_lines[0] = the_lines[0][1:]
return the_lines |
class GameStats():
"""Track stats for alien invader"""
def __init__(self, ai_settings):
"""Initialize stats"""
self.ai_settings = ai_settings
self.game_active = False
self.reset_stats()
self.update_highscore()
def update_highscore(self):
filename = 'highscore'
unsplit_contents = ''
with open(filename) as file_object:
unsplit_contents = file_object.read()
s_contents = unsplit_contents.split(".", 1)[0]
self.high_score = int(str(s_contents))
def reset_stats(self):
"""Initialize stats that can be changed during the game"""
self.ships_left = self.ai_settings.ship_limit
self.mines_left = self.ai_settings.mine_limit
self.score = 0
|
from bs4 import BeautifulSoup
import requests
from datetime import datetime
import smtplib
#the reason for the fake header so that website won't think it's a bot that try to scrap the data.
header = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36",
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8"
}
#the request for the target URL and don't forget to add the "header" in the end
response = requests.get("https://money.cnn.com/data/fear-and-greed/", headers=header)
#getting the response in text and turn it into html.parser/lxml.parser to be a "SOUP"
data = response.text
soup = BeautifulSoup(data, "html.parser")
#print(soup.prettify())
#soup select the class "feargreed" and "a" href the link that put everything in the list
all_index_elements = soup.select(".feargreed li")
current_index = ""
#split the get text and split text and test the last item
all_index = [index.get_text().split(" ") for index in all_index_elements]
for i in range(3, len(all_index[0]), 1):
current_index += all_index[0][i]
current_index += " "
#print(current_index)
#type in your own email and password, if you're using gmail make sure the security setting is updated
MY_EMAIL = "xxxx"
MY_PASSWORD = "xxxx"
today = datetime.now()
#put multiple email in a list
recipients = ["xxxxx@gmail.com", "xxxxx@gmail.com"]
#smtplib.SMTP_SSL("smtp.gmail.com", 465)
with smtplib.SMTP("smtp.gmail.com", 587, timeout=120) as connection:
connection.starttls()
connection.login(MY_EMAIL, MY_PASSWORD)
connection.sendmail(
from_addr= MY_EMAIL,
to_addrs= recipients,
msg=f"Subject:Today's Fear & Greed Index is {current_index}\n\n Today {today.now()} Based on the website https://money.cnn.com/data/fear-and-greed/ the Fear&Greed index is now {current_index}"
) |
import os,sys,argparse
import re
import logging
import json
from pycommons import pycommons
from pycommons import generic_logging
logger = logging.getLogger('handover')
import common
from common import Experiment, open_file
from common import *
KEY = 'handover'
class HandoverData(object):
RRC = 1
SERVING_CELL = 2
NEIGHBOR_CELL = 3
RRC_STATE_CHANGE = 4
def __init__(self, type, os_timestamp, diag_timestamp, **kwargs):
self.os_timestamp = os_timestamp
self.diag_timestamp = diag_timestamp
self.type = type
for key, value in kwargs.iteritems():
setattr(self, key, value)
@classmethod
def parse(cls, data):
start_line = data[0]
end_line = data[-1]
if cls.START_PATTERN.match(start_line) and \
cls.END_PATTERN.match(end_line):
timestamps_line = data[1]
try:
os_timestamp, diag_timestamp = [float(x) for x in timestamps_line.split(' ')]
data = data[2:-1]
d = {'data' : data}
return HandoverData(cls.TYPE, os_timestamp, diag_timestamp, **d)
except:
return None
class HandoverRRC(HandoverData):
TYPE = HandoverData.RRC
START_PATTERN = re.compile(r'\[start LTE RRC\]')
END_PATTERN = re.compile(r'\[end LTE RRC\]')
class HandoverServingCell(HandoverData):
TYPE = HandoverData.SERVING_CELL
START_PATTERN = re.compile(r'\[start Serving cell meas\]')
END_PATTERN = re.compile(r'\[end Serving cell meas\]')
class HandoverNeighborCell(HandoverData):
TYPE = HandoverData.NEIGHBOR_CELL
START_PATTERN = re.compile(r'\[start Neighbor cell meas\]')
END_PATTERN = re.compile(r'\[end Neighbor cell meas\]')
class HandoverRRCStateChange(HandoverData):
TYPE = HandoverData.RRC_STATE_CHANGE
START_PATTERN = re.compile(r'\[start LTE RRC state change\]')
END_PATTERN = re.compile(r'\[end LTE RRC state change\]')
class Handover(object):
START_PATTERN = re.compile(r'\[start.*\]')
END_PATTERN = re.compile(r'\[end.*\]')
def __init__(self, path):
self.path = path
self.data = []
self.process()
def process(self, start=0, stop=None):
assert os.path.exists(self.path), "'%s' does not exist!" % (file)
data = []
# cmdline = 'cat %s | diag_parser' % (file)
# ret, stdout, stderr = pycommons.run(cmdline, log=False)
# lines = stdout.split('\n')
lines = []
with open_file(self.path, 'rb') as f:
for line in f:
lines.append(line.strip())
for idx in range(len(lines)):
line = lines[idx]
m = self.START_PATTERN.match(line)
if m:
obj_data = []
while idx < len(lines):
line = lines[idx]
obj_data.append(lines[idx])
m = self.END_PATTERN.match(line)
if m:
break
idx += 1
obj = self.parse(obj_data)
if not obj:
#logger.debug("Could not convert: \n%s\n" % ('\n'.join(obj_data)))
pass
else:
if obj.os_timestamp > start:
if not stop or (stop and obj.os_timestamp < stop):
data.append(obj)
idx += 1
self.data = data
def get_data(self, start_idx, start=0, end=None):
data = []
last_idx = start_idx
for idx, obj in enumerate(self.data):
if obj.diag_timestamp > start:
if not end or (end and obj.diag_timestamp < end):
data.append(obj)
last_idx = start_idx + idx
return data, last_idx
def parse(self, data):
classes = [HandoverRRC, HandoverServingCell, HandoverNeighborCell, HandoverRRCStateChange]
for c in classes:
f = getattr(c, 'parse')
obj = f(data)
if obj:
return obj
return None
def process(file_path, experiments):
logger.info("Processing ...")
handover = Handover(file_path)
expt_with_no_handover = 0
num_cellular_expts = 0
last_idx = 0
for expt in experiments:
for measurement in expt.get_measurements():
if measurement.interface == Experiment.WIFI_IFACE:
continue
op = measurement.operation
start_time = measurement.results.get(op + 'Begin', 0)
end_time = measurement.results.get(op + 'End', 0)
handover_data, last_idx = handover.get_data(last_idx, start_time, end_time)
setattr(measurement, KEY, handover_data)
if len(handover_data) == 0:
expt_with_no_handover += 1
num_cellular_expts += 1
logger.info("%d/%d cellular experiments with no handover data" % (expt_with_no_handover, num_cellular_expts))
def main(argv):
generic_logging.init(level=logging.WARN)
global logger
logger = logging.getLogger()
config = json.loads(open('config').read())
battor_logfile_path = config['parse']['file_path']
sample_rate = get_sample_rate(battor_logfile_path)
down_sample = config['parse']['down_sample']
loglines = parse(**config)
start_edge = find_start_edge(loglines, sample_rate, down_sample, **config['edge'])
logcat_lines = logcat_generator(**config['logcat'])
logcat_time_offset, battor_first_edge_timestamp = get_edge_timestamps(loglines, start_edge, logcat_lines)
h = Handover(config['handover']['dir'], logcat_time_offset)
if __name__ == '__main__':
main(sys.argv)
|
from django.shortcuts import render,redirect, HttpResponse
from .models import Zakgeld
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Avg, Sum, Count
# Create your views here.
def index(request):
return render(request,"index.html")
def minizakgeld_add (request):
if request.method == "POST":
child = request.POST['child']
task = request.POST['task']
amount = request.POST['amount']
minizakgeld=Zakgeld(child=child, task=task, amount=amount)
minizakgeld.save()
return redirect('/')
return render(request, "minizakgeld_add.html")
#şkşldşadjaşdjaşdk
def detail (request,id):
return HttpResponse("Detail:" + str(id))
def minizakgeld_details (request):
da=Zakgeld.objects.all()
context = {
"da": da
}
return render(request,"minizakgeld_details.html", context)
def child_1(request):
Zakgelds=Zakgeld.objects.filter(child="feridihsan").order_by('-date_created')
toplam_amount = toplam("feridihsan")
context = {
"Zakgelds": Zakgelds,
"toplam":toplam_amount
}
return render(request, "child_1.html",context )
def child_2(request):
Zakgelds=Zakgeld.objects.filter(child="ahmedvedat").order_by('amount')
toplam_amount = toplam("ahmedvedat")
context = {
"Zakgelds": Zakgelds,
"toplam":toplam_amount }
return render(request, "child_2.html",context )
def child_3 (request):
Zakgelds=Zakgeld.objects.filter(child="alisami").order_by('-amount')
toplam_amount = toplam("alisami")
context = {
"Zakgelds": Zakgelds,
"toplam":toplam_amount
}
return render(request, "child_3.html",context )
def toplam (name):
all_objects_child = Zakgeld.objects.filter(child=name)
toplam = 0
for i in range(len(all_objects_child)):
toplam += all_objects_child[i].amount
return toplam
|
import pygame
class Bullet(pygame.sprite.Sprite):
def __init__(self, direction, character):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface((6, 6))
#self.image.fill((255, 0, 0))
pygame.draw.circle(self.image, (255, 255, 255), (3, 3), 3)
pygame.draw.circle(self.image, (255, 0, 0), (3, 3), 1)
self.image.convert()
self.rect = self.image.get_rect()
self.direction = direction
self.speed = 7
if self.direction == "right":
self.rect.center = (character.rect.right, character.rect.centery)
if self.direction == "left":
self.rect.center = (character.rect.left, character.rect.centery)
if self.direction == "up":
self.rect.center = (character.rect.centerx, character.rect.top)
if self.direction == "down":
self.rect.center = (character.rect.centerx, character.rect.bottom)
if self.direction == "topLeft":
self.rect.center = (character.rect.left, character.rect.top)
if self.direction == "bottomLeft":
self.rect.center = (character.rect.left, character.rect.bottom)
if self.direction == "topRight":
self.rect.center = (character.rect.right, character.rect.top)
if self.direction == "bottomRight":
self.rect.center = (character.rect.right, character.rect.bottom)
### movement for bullet
def update(self):
if self.direction == "right":
if self.rect.centerx < 480:
self.rect.centerx += self.speed
else:
self.kill()
if self.direction == "left":
if self.rect.centerx > 0:
self.rect.centerx -= self.speed
else:
self.kill()
if self.direction == "up":
if self.rect.centery > 0:
self.rect.centery -= self.speed
else:
self.kill()
if self.direction == "down":
if self.rect.centery < 320:
self.rect.centery += self.speed
else:
self.kill()
### Diagonal bullet firing movement
if self.direction == "topLeft":
if self.rect.centerx > 0 and self.rect.centery > 0:
self.rect.centerx -= 4
self.rect.centery -= 4
else:
self.kill()
if self.direction == "bottomLeft":
if self.rect.centerx > 0 and self.rect.centery < 320:
self.rect.centerx -= 4
self.rect.centery += 4
else:
self.kill()
if self.direction == "topRight":
if self.rect.centerx < 480 and self.rect.centery > 0:
self.rect.centerx += 4
self.rect.centery -= 4
else:
self.kill()
if self.direction == "bottomRight":
if self.rect.centerx < 480 and self.rect.centery < 320:
self.rect.centerx += 4
self.rect.centery += 4 |
# SPDX-FileCopyrightText: 2020 EACG GmbH
#
# SPDX-License-Identifier: Apache-2.0 |
from Entity.Plantings import Plantings
from State import State
from random import randint
from Vector2 import Vector2
class ChefStateFree(State):
def __init__(self, chef):
State.__init__(self, "free")
self.chef = chef
def random_walk(self):
x = randint(-50, 50)
y = randint(-50, 50)
random_destination = self.chef.location + Vector2(x, y)
world_w, world_h = self.chef.world.WHOLE_MAP_SIZE
# don't walk out of world
if 0 < random_destination.x - 10 < world_w and 0 < random_destination.y < world_h - 10:
self.chef.destination = random_destination
def do_action(self):
if abs(self.chef.location - self.chef.destination) < 5:
if randint(1, 50) == 1:
self.random_walk()
# Set chef Walk Image
if self.chef.location.get_xy()[0] < self.chef.destination.get_xy()[0]:
if self.chef.location.get_xy()[1] < self.chef.destination.get_xy()[1]:
self.chef.image = self.chef.world.image_class.chef_rb_img
else:
self.chef.image = self.chef.world.image_class.chef_ru_img
else:
if self.chef.location.get_xy()[1] <= self.chef.destination.get_xy()[1]:
self.chef.image = self.chef.world.image_class.chef_lb_img
else:
self.chef.image = self.chef.world.image_class.chef_lu_img
def check_condition(self):
# 10 people need one farmland
farm_land_number = self.chef.world.get_entity_number("planting")
if farm_land_number * 10 < len(self.chef.main_tower.people_list):
x_offset = randint(-self.chef.main_tower.territory_left, self.chef.main_tower.territory_right)
y_offset = randint(-self.chef.main_tower.territory_bottom, self.chef.main_tower.territory_up)
new_farmland_location = Vector2(x_offset, y_offset) + self.chef.main_tower.location
# don't build house out of map
if new_farmland_location.x < 75 or new_farmland_location.x > self.chef.world.WHOLE_MAP_SIZE[0] - 75 or \
new_farmland_location.y < 60 or new_farmland_location.y > self.chef.world.WHOLE_MAP_SIZE[1] - 60:
return None
# don't build house over other house
for building in self.chef.main_tower.building_list:
if building.is_over(new_farmland_location):
return None
if randint(1, 10) <= 7:
new_farmland = Plantings(self.chef.world, self.chef.world.image_class.wheats_unfinished_img, new_farmland_location, 1)
else:
new_farmland = Plantings(self.chef.world, self.chef.world.image_class.carrots_unfinished_img, new_farmland_location, 2)
self.chef.world.add(new_farmland)
self.chef.new_farmland = new_farmland
return "goPlanting"
return None
def entry_action(self):
self.chef.speed = 30
self.chef.destination = self.chef.location
class ChefStateGoPlanting(State):
def __init__(self, chef):
State.__init__(self, "goPlanting")
self.chef = chef
def do_action(self):
if self.chef.location.get_xy()[0] < self.chef.destination.get_xy()[0]:
if self.chef.location.get_xy()[1] < self.chef.destination.get_xy()[1]:
self.chef.image = self.chef.world.image_class.chef_rb_img
else:
self.chef.image = self.chef.world.image_class.chef_ru_img
else:
if self.chef.location.get_xy()[1] <= self.chef.destination.get_xy()[1]:
self.chef.image = self.chef.world.image_class.chef_lb_img
else:
self.chef.image = self.chef.world.image_class.chef_lu_img
def check_condition(self):
if abs(self.chef.location - self.chef.destination) < 10:
return "planting"
return None
def entry_action(self):
self.chef.destination = self.chef.new_farmland.location
self.chef.speed = 50
class ChefStatePlanting(State):
def __init__(self, chef):
State.__init__(self, "planting")
self.chef = chef
def do_action(self):
self.chef.build_process += self.chef.time_passed / 1000
def check_condition(self):
if self.chef.build_process >= 10:
return "free"
return None
def entry_action(self):
self.chef.image = self.chef.world.image_class.chef_work_img
self.chef.destination = self.chef.location
def exit_action(self):
if self.chef.new_farmland.plant_type == 1:
self.chef.new_farmland.image = self.chef.world.image_class.wheats_img
else:
self.chef.new_farmland.image = self.chef.world.image_class.carrots_img
self.chef.main_tower.add_building(self.chef.new_farmland)
self.chef.main_tower.food += self.chef.new_farmland.food
self.chef.new_farmland = None
self.chef.build_process = 0
|
# coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License.
import glob
import os
# require python fontforge module
msg = ('Python module `fontforge` is required. Install it with'
' `apt-get install python-fontforge`'
' or `brew install python; brew install fontforge --HEAD`')
try:
import fontforge
except ImportError:
raise Exception(msg)
# require ttfautohint
msg = ('Command line tool `ttfautohint` is required. Install it with'
' `apt-get install ttfautohint` or `brew install ttfautohint`')
assert [os.path.join(p, 'ttfautohint')
for p in os.environ.get('PATH').split(':')
if os.path.exists(os.path.join(p, 'ttfautohint'))], msg
# require libmagic
import ctypes
import ctypes.util
libmagic = None
# Let's try to find magic or magic1
dll = ctypes.util.find_library('magic') or ctypes.util.find_library('magic1')
# This is necessary because find_library returns None if it doesn't find the library
if dll:
libmagic = ctypes.CDLL(dll)
if not libmagic or not libmagic._name:
import sys
platform_to_lib = {'darwin': ['/opt/local/lib/libmagic.dylib',
'/usr/local/lib/libmagic.dylib'] +
# Assumes there will only be one version installed
glob.glob('/usr/local/Cellar/libmagic/*/lib/libmagic.dylib'),
'win32': ['magic1.dll']}
for dll in platform_to_lib.get(sys.platform, []):
try:
libmagic = ctypes.CDLL(dll)
break
except OSError:
pass
if not libmagic or not libmagic._name:
# It is better to raise an ImportError since we are importing magic module
raise ImportError('failed to find libmagic. Check your installation')
# now installation can begin!
from setuptools import setup
setup(
name="fontbakery",
version='0.0.5',
url='https://github.com/googlefonts/fontbakery/',
description='Font Bakery is a set of command-line tools for building'
' and testing font projects',
author='Vitaly Volkov',
author_email='hash3g@gmail.com',
packages=["bakery_cli",
"bakery_cli.pipe",
"bakery_cli.scripts",
"bakery_lint",
"bakery_lint.tests",
"bakery_lint.tests.downstream",
"bakery_lint.tests.upstream",
"bakery_cli.report",
"bakery_cli.scrapes",
"bakery_cli.scrapes.familynames",
"bakery_cli.scrapes.familynames.familynames",
"bakery_cli.scrapes.familynames.familynames.spiders",
"bakery_cli.scrapes.familynames.familynames.commands"],
scripts=['bakery_cli/scrapes/familynames/scrapy.cfg',
'tools/fontbakery-build.py',
'tools/fontbakery-build-font2ttf.py',
'tools/fontbakery-build-metadata.py',
'tools/fontbakery-fix-ascii-fontmetadata.py',
'tools/fontbakery-fix-fstype.py',
'tools/fontbakery-fix-nbsp.py',
'tools/fontbakery-fix-style-names.py',
'tools/fontbakery-fix-opentype-names.py',
'tools/fontbakery-fix-vertical-metrics.py',
'tools/fontbakery-check.py',
'tools/fontbakery-travis-deploy.py',
'tools/fontbakery-report.py',
'tools/fontbakery-fix-gasp.py',
'tools/fontbakery-fix-dsig.py',
'tools/fontbakery-fix-glyph-private-encoding.py',
'tools/fontbakery-crawl.py',
'tools/fontbakery-setup.py',
'tools/collection-management/fontbakery-travis-secure.sh'],
zip_safe=False,
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
install_requires=[
'lxml',
'requests',
'pyyaml',
'robofab',
'fontaine',
'html5lib',
'python-magic',
'markdown',
'scrapy',
'urwid',
'GitPython==0.3.2.RC1',
'defusedxml'
],
setup_requires=['nose', 'mock', 'coverage'],
test_suite='nose.collector'
)
|
from sqlalchemy import Integer, ForeignKey, String, Column
from flask import url_for
from .recipeAuth import RecipeApp
from app import db
class Category(db.Model):
"""This class represents the recipeApp table."""
__tablename__ = 'category'
category_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
category_name = db.Column(db.String(255))
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
user_id = db.Column(db.Integer, db.ForeignKey(RecipeApp.user_id))
recipes = db.relationship(
'Recipe', order_by='Recipe.recipe_id', cascade="all, delete-orphan")
def __init__(self, category_name, user_id, category_id=None, recipe_name=None):
"""initialize"""
self.category_name = category_name
self.category_id = category_id
self.recipe_name = recipe_name
self.user_id = user_id
def category_json(self):
"""This method jsonifies the recipe model"""
return {'category_id': self.category_id,
'category_name': self.category_name,
'date_created': self.date_created,
'date_modified': self.date_modified,
'recipes': url_for('recipe_api.create_recipes', category_id=self.category_id, _external=True),
'created_by': self.user_id
}
def save(self):
db.session.add(self)
db.session.commit()
@staticmethod
def get_all():
return Category.query.all()
def delete(self):
db.session.delete(self)
db.session.commit()
def __repr__(self):
return "<Category: {}>".format(self.category_name)
|
import requests
import pandas as pd
from bs4 import BeautifulSoup
import json
from boto.s3.connection import S3Connection
from boto.s3.key import Key
tournament = 'RBC Heritage'
tournament_link = 'rbc-heritage'
year = 2015
# create connection to bucket
c = S3Connection('AKIAIQQ36BOSTXH3YEBA','cXNBbLttQnB9NB3wiEzOWLF13Xw8jKujvoFxmv3L')
# create connection to bucket
b = c.get_bucket('public.tenthtee')
link = 'http://www.pgatour.com/tournaments/' + tournament_link + '/field.html'
field = []
r = requests.get(link)
soup = BeautifulSoup(r.text)
player_table = soup.find(class_='field-table-content')
players = player_table.find_all("p")
for player in players:
raw_name = player.text
clean_name = raw_name.split(',')
clean_name = clean_name[1][1:] + ' ' + clean_name[0]
field.append(clean_name)
print field
print len(field)
field = json.dumps(field)
k = Key(b)
k.key = 'field'
k.set_contents_from_string(field)
k.make_public()
k1 = Key(b)
k1.key = 'field/' + str(year) + '/' + tournament + '/field'
k1.set_contents_from_string(field)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020/7/4 9:18
# @Author : TheTAO
# @Site :
# @File : data_utils.py
# @Software: PyCharm
import os
import random
import math
import pickle
import logging
import re
from tqdm import tqdm
import pandas as pd
import numpy as np
from conlleval import return_report
import codecs
entities_dict_chinese = {
'Level': '等级-Level',
'Test_Value': '检测值-Test_Value',
'Test': '测试类-Test',
'Anatomy': '解剖类-Anatomy',
'Amount': '程度-Amount',
'Disease': '疾病类-Disease',
'Drug': '药物类-Drug',
'Treatment': '治疗方法-Treatment',
'Reason': '原因-Reason',
'Method': '方法类-Method',
'Duration': '持续时间-Duration',
'Operation': '手术类-Operation',
'Frequency': '频率-Frequency',
'Symptom': '症状类-Symptom',
'SideEff': '副作用-SideEff'
}
def get_dict(path):
with open(path, 'rb') as f:
char_dict = pickle.load(f)
return char_dict
def get_sent_tag(path):
with open(path, 'rb') as f:
sent_tag = pickle.load(f)
return sent_tag
def get_data_with_windows(name='train'):
with open(f'datas/prepare_data/dict.pkl', 'rb') as f:
map_dict = pickle.load(f)
def item2id(data, w2i):
return [w2i[x] if x in w2i else w2i['UNK'] for x in data]
results = []
root = os.path.join('datas/prepare_data', name)
files = list(os.listdir(root))
for file in tqdm(files):
result = []
path = os.path.join(root, file)
samples = pd.read_csv(path, sep=',')
num_samples = len(samples)
# 先拿到分割下标
sep_idx = [-1] + samples[samples['word'] == 'sep'].index.tolist() + [num_samples]
# 获取所有句子进行ID的转化
for i in range(len(sep_idx) - 1):
start = sep_idx[i] + 1
end = sep_idx[i + 1]
id_data = []
# 开始转换,拿到每个
for feature in samples.columns:
id_data.append(item2id(list(samples[feature])[start:end], map_dict[feature][2]))
result.append(id_data)
# 去掉换行符
if len(result[-1][0]) == 1:
result = result[:-1]
# 拼接长短句,数据增强
two = []
for i in range(len(result) - 1):
first = result[i]
second = result[i + 1]
# 拼接两个
two.append([first[k] + second[k] for k in range(len(first))])
# 拼接三个
three = []
for i in range(len(result) - 2):
first = result[i]
second = result[i + 1]
third = result[i + 2]
# 拼接三个
three.append([first[k] + second[k] + third[k] for k in range(len(first))])
results.extend(result + two + three)
# 保存到文件, 这里将训练集分为train和dev
if name == 'train':
split_ratio = [0.8, 0.2]
total = len(results)
p1 = int(total * split_ratio[0])
p2 = int(total * (split_ratio[0] + split_ratio[1]))
with open(f'datas/prepare_data/train.pkl', 'wb') as f:
pickle.dump(results[:p1], f)
with open(f'datas/prepare_data/dev.pkl', 'wb') as f:
pickle.dump(results[p1:p2], f)
else:
with open(f'datas/prepare_data/test.pkl', 'wb') as f:
pickle.dump(results, f)
# batch管理对象
class BatchManager(object):
def __init__(self, batch_size, name='train'):
# 这里就直接读取文件了
with open(f'datas/prepare_data/' + name + '.pkl', 'rb') as f:
data = pickle.load(f)
# 初始化排序和填充
self.batch_data = self.sort_pad(data, batch_size)
# 计算总的batch长度
self.len_data = len(self.batch_data)
def sort_pad(self, data, batch_size):
# 计算总共有多少个批次
num_batch = int(math.ceil(len(data) / batch_size))
# 安装句子长度排序
sorted_data = sorted(data, key=lambda x: len(x[0]))
batch_data = list()
# 获取batch
for i in range(num_batch):
batch_data.append(self.pad_data(sorted_data[i * int(batch_size): (i + 1) * int(batch_size)]))
return batch_data
@staticmethod
def pad_data(data):
chars, bounds, flags, radicals, pinyins, targets = [], [], [], [], [], []
max_length = max([len(sentence[0]) for sentence in data])
for line in data:
char, bound, flag, target, radical, pinyin = line
# 需要填充的个数
padding = [0] * (max_length - len(char))
chars.append(char + padding)
bounds.append(bound + padding)
flags.append(flag + padding)
targets.append(target + padding)
radicals.append(radical + padding)
pinyins.append(pinyin + padding)
return [chars, bounds, flags, radicals, pinyins, targets]
def iter_batch(self, shuffle=False):
if shuffle:
random.shuffle(self.batch_data)
for idx in range(self.len_data):
yield self.batch_data[idx]
# 获取日志文件
def get_logger(log_file):
logger = logging.getLogger(log_file)
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
ch.setFormatter(formatter)
fh.setFormatter(formatter)
logger.addHandler(ch)
logger.addHandler(fh)
return logger
# 创建对应的文件夹
def make_path(param):
# 预测结果集文件夹
if not os.path.isdir(param.result_path):
os.makedirs(param.result_path)
# 模型保存文件夹
if not os.path.isdir(param.ckpt_path):
os.makedirs(param.ckpt_path)
# 日志文件
if not os.path.isdir(param.log_dir):
os.makedirs(param.log_dir)
# 测试结果写入文件
def test_ner(results, path):
output_file = os.path.join(path, "ner_predict.utf8")
with open(output_file, "w", encoding='utf8') as f:
to_write = []
for block in results:
for line in block:
to_write.append(line + "\n")
to_write.append("\n")
f.writelines(to_write)
# 返回评估报告
eval_lines = return_report(output_file)
return eval_lines
# 映射函数
def create_mapping(dico):
"""
创造一个词典映射
Create a mapping (item to ID / ID to item) from a dictionary.
Items are ordered by decreasing frequency.
"""
sorted_items = sorted(dico.items(), key=lambda x: (-x[1], x[0]))
id_to_item = {i: v[0] for i, v in enumerate(sorted_items)}
item_to_id = {v: k for k, v in id_to_item.items()}
return item_to_id, id_to_item
# 使用预训练好的词典来扩充字典
def augment_with_pretrained(dictionary, ext_emb_path, chars):
"""
:param dictionary:频率字典
:param ext_emb_path:预训练好的向量
:param sentence:对应的词列表
:return:
"""
assert os.path.isfile(ext_emb_path)
# 加载已经预训练好的词向量
pretrained = set([line.rstrip().split()[0].strip()
for line in codecs.open(ext_emb_path, 'r', 'utf-8') if len(ext_emb_path) > 0])
# We either add every word in the pretrained file,
# or only words given in the `words` list to which
# we can assign a pretrained embedding
# 这里应该是在判断词在字典中没有,如果没有就分配为0
if chars is None:
for char in pretrained:
if char not in dictionary:
dictionary[char] = 0
else:
for char in chars:
if any(x in pretrained for x in [char, char.lower(), re.sub(r'\d', '0', char.lower())]) \
and char not in dictionary:
dictionary[char] = 0
# 重新生成词典映射
word_to_id, id_to_word = create_mapping(dictionary)
return dictionary, word_to_id, id_to_word
# 读取预训练的词向量将其替换成新的
def load_word2vec(emb_path, id_to_word, word_dim, old_weights):
"""
加载预训练词向量
:param emb_path:
:param id_to_word:
:param word_dim:
:param old_weights:
:return:
"""
# 获取随机向量
new_weights = old_weights
print('Loading pretrained embeddings from {}...'.format(emb_path))
pre_trained = {}
# 无效词的统计
emb_invalid = 0
# 先遍历将值转化为浮点数
for i, line in enumerate(codecs.open(emb_path, 'r', 'utf-8')):
line = line.rstrip().split()
if len(line) == word_dim + 1:
pre_trained[line[0]] = np.array([float(x) for x in line[1:]]).astype(np.float32)
else:
# 统计无效值
emb_invalid += 1
if emb_invalid > 0:
# 打印无效向量
print('WARNING: %i invalid lines' % emb_invalid)
# 开始替换对应词典中存在的词对应的向量
c_found = 0
c_lower = 0
c_zeros = 0
n_words = len(id_to_word)
for i in range(n_words):
# 从词典中取出词去找
word = id_to_word[i]
if word in pre_trained:
new_weights[i] = pre_trained[word]
c_found += 1
elif word.lower() in pre_trained:
# 寻找小写词,估计对应英文字母
new_weights[i] = pre_trained[word.lower()]
c_lower += 1
elif re.sub(r'\d', '0', word.lower()) in pre_trained:
# 寻找数字,对应数值且需要全部转化为0
new_weights[i] = pre_trained[re.sub(r'\d', '0', word.lower())]
c_zeros += 1
print('Loaded %i pretrained embeddings.' % len(pre_trained))
# 打印统计信息
print('%i / %i (%.4f%%) words have been initialized with pretrained embeddings.' % (
c_found + c_lower + c_zeros, n_words, 100. * (c_found + c_lower + c_zeros) / n_words))
print('%i found directly, %i after lowercasing, %i after lowercasing + zero.' % (c_found, c_lower, c_zeros))
# 返回新参数
return new_weights
# 将结果写成JSON文件
def result_to_json(string, tags):
item = {"string": string, "entities": []}
entity_name = ""
entype = ""
entity_start = 0
idx = 0
for char, tag in zip(string, tags):
if tag[0] == "S":
item["entities"].append(
{"word": char, "start": idx, "end": idx + 1, "type": entities_dict_chinese[tag[2:]]})
elif tag[0] == "B":
entype = entities_dict_chinese[tag[2:]]
entity_name += char
entity_start = idx
elif tag[0] == "I":
entity_name += char
elif tag[0] == "O" or tag[0] == "S" or tag[0] == "B":
if entity_name != "":
item["entities"].append({"word": entity_name, "start": entity_start, "end": idx - 1,
"type": entype})
entity_name = ""
idx += 1
return item
if __name__ == '__main__':
get_data_with_windows('train')
get_data_with_windows('test')
# lines = '我是中国人'
# with open(f'datas/prepare_data/dict.pkl', 'rb') as f:
# map_dict = pickle.load(f)
# # lines = input_from_line_with_feature(lines)
# print(map_dict['bound'][2])
|
def swap(buf, a, b):
buf[a], buf[b] = buf[b], buf[a]
def permute_string(string):
print("### PERMUTING:", string)
str_buf = list(string)
_permute_string(str_buf, left_i=0)
def _permute_string(str_buf, left_i):
if left_i >= len(str_buf) - 1:
print_str_buf(str_buf)
return
for i in range(left_i, len(str_buf)):
swap(str_buf, i, left_i)
_permute_string(str_buf, left_i+1)
swap(str_buf, i, left_i)
def print_str_buf(str_buf):
print(''.join(str_buf))
def fact(n):
if n <= 1:
return 1
res = 1
for i in range(1, n):
res *= i
return res
permute_string("A")
permute_string("AB")
permute_string("ABC")
permute_string("ABCD")
permute_string("ABCDE")
|
from django import forms
from django.db.models import Q
from sale.models import Transfer, Position, Commdity, Stock, TemplateCategory, Patch
class TransferForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TransferForm, self).__init__(*args, **kwargs)
status = kwargs['initial'].get('transfer_status', None)
self.fields['transfer_status'].widget.choices = TemplateCategory.objects.filter(parent__code=status).values_list('code', 'title')
self.fields['transfer_patch'] = forms.ChoiceField(label='批号')
if status == '1000':
self.fields['transfer_off'] = forms.CharField(label='折扣', widget=forms.TextInput())
if status == '0110':
self.fields['transfer_position'] = forms.CharField(label='门店', widget=forms.TextInput())
def clean_transfer_code(self):
code = self.cleaned_data.get('transfer_code', None)
if code:
raise forms.ValidationError('参数错误!')
return code
def clean_transfer_status(self):
data = self.cleaned_data.get('transfer_status', None)
if data or data in ['100001', '000101', '011001', '011002']:
return data
raise forms.ValidationError('参数错误!')
def clean(self):
return self.cleaned_data
class Meta:
model = Transfer
fields = '__all__'
exclude = 'transfer_position', 'transfer_off'
widgets = {
'transfer_code': forms.TextInput(attrs={'onchange': 'getCode(this)'}),
'transfer_patch': forms.Select(),
'transfer_product': forms.Select(),
'transfer_validity': forms.Select()
}
class PatchForm(forms.ModelForm):
def clean_patch_commdity(self):
data = self.cleaned_data.get('patch_commdity', None)
if not data:
raise forms.ValidationError('错误!')
return data
class Meta:
model = Patch
fields = '__all__'
widgets = {
'patch_commdity': forms.TextInput(attrs={'readonly':''}),
} |
from django.test import TestCase
from ddt import ddt, file_data
from scheduler.organize_data import organize_courses, organize_rooms, organize_course_time, organize_room_time, organize_timeslots
from scheduler.models import Course, Room, Time, Day
import os
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@ddt
class OrganizeDataTest(TestCase):
def setup_courses(self, courses):
Course.objects.all().delete()
for key in courses:
Course.objects.create(cname=key, capacity=courses[key])
@file_data(os.path.join(ROOT_DIR, "test_data/test_org_course_data.json"))
def test_organize_courses(self, courses_from, all_courses, strategy, expected):
self.setup_courses(all_courses)
organized_courses = organize_courses(courses_from, Course.objects.all(), strategy)
self.assertEqual(organized_courses, expected)
def setup_rooms(self, rooms):
Room.objects.all().delete()
for key in rooms:
Room.objects.create(rname=key, capacity=rooms[key])
@file_data(os.path.join(ROOT_DIR, "test_data/test_org_room_data.json"))
def test_organize_rooms(self, all_rooms, expected):
self.setup_rooms(all_rooms)
organized_rooms = organize_rooms(Room.objects.all())
self.assertEqual(organized_rooms, expected)
def setup_time_and_day(self, times, days):
Time.objects.all().delete()
Day.objects.all().delete()
for time in times:
Time.objects.create(times=time)
for day in days:
Day.objects.create(days=day)
@file_data(os.path.join(ROOT_DIR, "test_data/test_org_ctime_data.json"))
def test_organize_course_time(self, times_list, days_list, duration, expected):
self.setup_time_and_day(times_list, days_list)
day_times_list = organize_course_time(Time.objects.all(), Day.objects.all(), duration)
self.assertEqual(day_times_list, expected)
@file_data(os.path.join(ROOT_DIR, "test_data/test_org_rtime_data.json"))
def test_organize_room_time(self, times_list, days_list, expected):
self.setup_time_and_day(times_list, days_list)
day_time_dict = organize_room_time(Time.objects.all(), Day.objects.all())
self.assertEqual(day_time_dict, expected)
@file_data(os.path.join(ROOT_DIR, "test_data/test_org_timeslots_data.json"))
def test_organize_timeslots(self, times, expected):
timeslots = organize_timeslots(times)
self.assertEqual(timeslots, expected)
|
from _datetime import datetime
from flask import render_template, redirect, url_for, flash, request
from werkzeug.urls import url_parse
from flask_login import login_user, logout_user, current_user
from flask_babel import _
from app import db
from app.controller import i2c
from app.controller import bp
from app.controller.forms import *
from flask_login import login_required
from app.models import Param
@bp.route("/close_door", methods=["GET", "POST"])
@login_required
def close_door():
form = CloseDoorForm()
if form.validate_on_submit():
i2c.close_door(form.door_num.data)
return redirect(url_for(form.form_name))
return render_template("controller/{}.html".format(form.html_name), form=form)
@bp.route("/open_door", methods=["GET", "POST"])
@login_required
def open_door():
form = OpenDoorForm()
if form.validate_on_submit():
i2c.open_door(form.door_num.data)
return redirect(url_for(form.form_name))
return render_template("controller/{}.html".format(form.html_name), form=form)
@bp.route("/open_door_reader", methods=["GET", "POST"])
@login_required
def open_door_reader():
form = OpenDoorReaderForm()
if form.validate_on_submit():
time_to_open = Param.query.filter_by(door_num=form.door_num.data).first().open_lock_time
i2c.open_door_reader(form.card_num.data, form.door_num.data, time_to_open)
return redirect(url_for(form.form_name))
return render_template("controller/{}.html".format(form.html_name), form=form)
@bp.route("/pin_status", methods=["GET", "POST"])
@login_required
def pin_status():
form = PinStatusForm()
if form.validate_on_submit():
try:
pin = int(form.pin_num.data)
#GPIO.setup(pin, GPIO.IN)
if True:#GPIO.input(pin):
status = "Pin number {} is high!".format(str(pin))
else:
status = "Pin number {} is low!".format(str(pin))
except:
status = "There was an error reading pin {}.".format(str(pin))
response_data = {
"title": "Status of Pin({})".format(str(pin)),
"status": status,
"time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
}
return render_template("{}.html".format(form.html_name), form=form, **response_data)
return render_template("{}.html".format(form.html_name), form=form)
@bp.route("/change/<change_pin>/<action>")
@login_required
def change_pin(change_pin, action):
change_pin = int(change_pin)
if action == "on":
GPIO.output(change_pin, GPIO.HIGH)
if action == "off":
GPIO.output(change_pin, GPIO.HIGH)
if action == "toggle":
GPIO.output(change_pin, not GPIO.input(change_pin))
return redirect("pin_status", change_pin)
|
import sys, os
f = sys.argv[1]
use = 'a'
use = sys.argv[2]
if use == 'a':
clusters = ['UNN','UNU','NUN','NUU','NNU','DNN','DND','NDN','NDD','NND']
elif use == 'p':
clusters = ['UNN','UNU','NUN','NNU','DNN','DND','NND']
with open(f, 'r') as file:
lines = file.readlines()
lines = [item.strip() for item in lines]
out = open(f, 'w')
for c in clusters:
for l in lines:
out.write(l.replace('XXX', c))
out.write('\n')
|
#adapted from www.pygame.org/project-Ink+Spil-1660-.html
import pygame, sys
from pygame.locals import *
#from pygame import gfxdraw
import const
import mapping
import math
const.red = (255,0,0)
const.green = (0,255,0)
const.blue = (0,0,255)
const.white = (255,255,255)
const.black = (0,0,0)
const.pink = (255,100,255)
const.orange = (255,133,10)
const.fuchsia = (255,10,133)
const.purple = (100, 0, 100)
const.yellow = (245, 250, 10)
const.turquoise = (0, 255, 240)
const.lime = (150, 255, 0)
const.teal = (0,128,128)
const.olive = (128,128,0)
const.tan = (139, 90, 43)
const.slateblue = (113, 113, 198)
const.salmon = (198,113,113)
const.maroon = (128,0,0)
const.melon = (227,168,105)
const.tableX = 100
const.tableY = 100
const.tableWidth = 520
const.tableHeight = 520
const.gridX = const.tableX + 100
const.gridY = const.tableY + 100
const.gridWidth = 20 *16
const.gridHeight = 20 * 16
const.modeButtonsX = const.tableX + 25
const.modeButtonsY = const.tableY + 145
const.modeButtonsWidth = 50
const.modeButtonsHeight = 230
const.gameSelectionColours = ((const.red,const.turquoise),(const.fuchsia,const.olive),(const.pink,const.yellow),(const.green,const.teal),
(const.orange,const.blue),(const.maroon,const.tan),(const.slateblue,const.salmon),(const.purple,const.melon))
const.LCDX = const.tableX + 190
const.LCDY = [const.tableY + 25, const.tableY + 445]
const.LCDXtext = const.LCDX + 9
const.LCDYtext = [[const.LCDY[0] + 11,const.LCDY[0] + 26],[const.LCDY[1] + 11, const.LCDY[1] + 25]]
const.LCDOuterWidth = 140
const.LCDOuterHeight = 50
const.LCDInnerWidth = 130
const.LCDInnerHeight = 32
const.keyMap = ['e','d','r','f',chr(92),'2','7','h']
class em:
def __init__(self,qOut,qIn):
#self.qOut = qOut
#self.qIn = qIn
self.pygame = pygame
self.pygame.init()
pygame.display.set_caption('LETT Emulator')
self.screen = pygame.display.set_mode((1000,700),0,32)
self.font = pygame.font.Font(None, 18)
self.setBGColour(200,200,0)
self.ink = [[0,0,0],[0,0,0],[0,0,0],[0,0,0]]
self.LCDTexts= [["LCD1 Line 1","LCD1 Line 2"],["LCD2 Line 1","1234567890123456"]]
self.drawTable()
#self.pygame.display.flip()
self.drawButtons()
self.drawBlankScreen()
self.drawLCDOutline()
self.arrowButtons()
self.refreshScreen()
self.spriteArrays = []
'''
sprite8 = [[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0]]
spriteArray1 = []
for j in range(5): # grids
spriteGridArray =[]
for i in range(5): # sprites
spriteGridArray.append(sprite8)
spriteArray1.append(spriteGridArray)
'''
spriteArray1 = [[[[0 for x in xrange(8)] for x in xrange(8)]for x in xrange(5)]for x in xrange(5)]
'''
sprite4 = [[0,0,0,0],
[0,0,0,0],
[0,0,0,0],
[0,0,0,0]]
spriteArray2 = []
for j in range(5): # grids
spriteGridArray2 =[]
for i in range(7): # sprites
spriteGridArray2.append(sprite4)
spriteArray2.append(spriteGridArray2)
'''
spriteArray2 = [[[[0 for x in xrange(4)] for x in xrange(4)]for x in xrange(7)]for x in xrange(5)]
self.spriteArrays.append(spriteArray2)
self.spriteArrays.append(spriteArray1)
def screenPrint(self,display):
print display[0]
print display[1]
print display[2]
print display[3]
def input(self):
self.qIn.put(raw_input('-->'))
#print "here"
def refreshScreen(self):
#if change:
#change = False
#self.pygame.display.update()
self.pygame.display.flip()
def drawBlankScreen(self):
self.gridColours = []
for i in range(16):
row = []
for j in range(16):
row.append((0,0,0))
self.gridColours.append(row)
for i in range(16):
for j in range(16):
self.pygame.draw.rect(self.screen, self.gridColours[i][j], (const.gridX+i*20,const.gridY+j*20,20,20),0)
self.pygame.draw.rect(self.screen, (255,255,255), (const.gridX+i*20,const.gridY+j*20,20,20),2)
def setBGColour(self, r, g, b): #call at startup to set the 'table' colour
self.backgroundColour = r, g, b
self.screen.fill(self.backgroundColour)
def arrowButtons(self):
self.pygame.draw.polygon(self.screen,(125,0,0), [(const.tableX +150, const.LCDY[0]), (const.tableX +100,const.LCDY[0] + const.LCDOuterHeight/2), (const.tableX +150, const.LCDY[0]+const.LCDOuterHeight)],0)
self.pygame.draw.polygon(self.screen,(125,0,0), [(const.tableX +370, const.LCDY[0]), (const.tableX +420,const.LCDY[0] + const.LCDOuterHeight/2), (const.tableX +370, const.LCDY[0]+const.LCDOuterHeight)],0)
self.pygame.draw.polygon(self.screen,(125,0,0), [(const.tableX +150, const.LCDY[1]), (const.tableX +100,const.LCDY[1] + const.LCDOuterHeight/2), (const.tableX +150, const.LCDY[1]+const.LCDOuterHeight)],0)
self.pygame.draw.polygon(self.screen,(125,0,0), [(const.tableX +370, const.LCDY[1]), (const.tableX +420,const.LCDY[1] + const.LCDOuterHeight/2), (const.tableX +370, const.LCDY[1]+const.LCDOuterHeight)],0)
def setBG(self, r1,g1,b1, r2,g2, b2, res):
colours = [[r1,g1,b1],[r2,g2,b2]]
for i in range (16):
for j in range(16):
self.setPixelColour(i,j,colours[(i+j)%2])
self.refreshScreen()
def clearSpace(self, x, y):
if x <16 and y<16:
self.pygame.draw.rect(self.screen, (0,0,0), (const.gridX+x*20,const.gridY+y*20,20,20),0)
self.pygame.draw.rect(self.screen, (255,255,255), (const.gridX+x*20,const.gridY+y*20,20,20),2)
def clearScreen(self, grid):
#print "emulator - clearScreen"
if grid < 4 :
for i in range(0,8):
for j in range(0,8):
self.clearSpace(i+((grid/2)*8),j+((grid%2)*8))
else:
#print "full clear"
for i in range(0,16):
for j in range(0,16):
self.clearSpace(i,j)
self.refreshScreen()
def setSprite(self, grid, spriteAddress, size, list):
#print "emulator - set sprite"
bytes = 0
if size ==4:
bytes = 2
elif size == 8:
bytes = 8
count =0
array = (size/4)-1
for i in range(len(list)):
for h in reversed(range(8)):
self.spriteArrays[array][grid][spriteAddress][count%size][count/size]= getBit(list[i],h)
count+=1
#print "array ", array, "grid " ,grid, "sprite address ", spriteAddress
def addToSprite(self, grid, spriteAddress, size, x,y, list):
#print "emulator - add to sprite"
array = (size/4)-1
self.spriteArrays[array][grid][spriteAddress][x][y]= getBit(list[0],4)
def displaySprite(self, grid, spriteAddress, size, x, y):
#print "emulator - display sprite"
array = (size/4)-1
#print "array ", array, "grid " ,grid, "sprite address ", spriteAddress
# display sprite in black (for inkspill)
#self.setInk(0,0,0,4)
#print "grid: ", grid, "sprite: ", spriteAddress
for i in range(size):
#print self.spriteArrays[array][grid][spriteAddress][i]
for j in range(size):
if self.spriteArrays[array][grid][spriteAddress][i][j]:
self.setPixel(x+i+((grid%2)*8),y+j+((grid/2)*8))
#print x+i, " ", y+j, " set"
#else:
#self.clearSpace(x+i,y+j)
self.refreshScreen()
def clearSprite(self, grid, spriteAddress, size, x, y):
array = (size/4)-1
print "array ", array, "grid " ,grid, "sprite address ", spriteAddress
for i in range(size):
for j in range(size):
if self.spriteArrays[array][grid][spriteAddress][i][j]:
self.clearSpace(x+i+((grid%2)*8),y+j+((grid/2)*8))
self.refreshScreen()
def moveSprite(self, grid, spriteAddress, size, x, y, newX, newY):
self.clearSprite(grid, spriteAddress, size,x, y)
self.displaySprite(grid, spriteAddress, size, newX, newY)
def movePiece(self, x1, y1, x2, y2):
pass
def setInk(self, r, g, b, grid):
if grid >= 4:
for i in range(4):
self.ink[i][0]=r
self.ink[i][1]=g
self.ink[i][2]=b
else:
self.ink[grid][0]=r
self.ink[grid][1]=g
self.ink[grid][2]=b
def flashPixel(self, x, y):
pass
def drawTable(self):
self.pygame.draw.rect(self.screen, (255,255,255),(const.tableX, const.tableY, const.tableWidth, const.tableHeight),4)
self.pygame.draw.rect(self.screen, (0,0,0),(const.tableX, const.tableY, const.tableWidth, const.tableHeight),0)
def waitForScreenPixelPress(self):
print "waiting for screen pixel press in em"
for event in self.pygame.event.poll():
if event.type == MOUSEBUTTONUP:
mousex, mousey = event.pos
print mousex
if ((mousex > const.gridX) and (mousex < (const.gridX + const.gridWidth))) and ((mousey > const.gridY) and( mousey < (const.gridY + const.gridHeight))):
mousex -= const.gridX
mousey -= const.gridY
mousex = mousex/20
mousey = mousey/20
return [mousex, mousey]
elif event.type == pygame.NOEVENT:
return 0
def waitForKeyPress(self):
print "Emulator - wait for key press"
for event in self.pygame.event.get():
print "here2"
if event.type == KEYDOWN:
print "here3"
key = event.key
print chr(key)
if const.keyMap.count(chr(key))>0:
print "found"
return const.keyMap.index(chr(key))
def waitForButtonPress(self, emulated, keyboardHacked):
#print "Waiting for button press"
for event in self.pygame.event.get():
if emulated:
if event.type == MOUSEBUTTONUP:
mousex, mousey = event.pos
#print "mousex ", mousex , " mousey ", mousey
# inside the mode button area
if mousex > const.modeButtonsX and mousey > const.modeButtonsY and mousex < (const.modeButtonsX + const.modeButtonsWidth) and mousey < (const.modeButtonsY + const.modeButtonsHeight):
if mousex < const.modeButtonsX+20 or mousex > (const.modeButtonsX + const.modeButtonsWidth - 20):
x = (mousex-const.modeButtonsX)/30
#print "x is ", x
#if mousey < const.modeButtonsY+20 or (mousey > const.modeButtonsY +30 and mousey< const.modeButtonsY +50)or (mousey > const.modeButtonsY +60 and mousey< const.modeButtonsY +80)or (mousey > const.modeButtonsY +90 and mousey< const.modeButtonsY +110)or (mousey > const.modeButtonsY +120 and mousey< const.modeButtonsY +140)or (mousey > const.modeButtonsY +150 and mousey< const.modeButtonsY +170)or (mousey > const.modeButtonsY +180 and mousey< const.modeButtonsY +50)or(mousey > const.modeButtonsY +210):
y = mousey-const.modeButtonsY
ycheck = y%30
y = y/30
if ycheck < 20:
#print "y is ", y
return x +(y*2)
if keyboardHacked:
if event.type == KEYDOWN:
print "here3"
key = event.key
print key
print chr(key)
if const.keyMap.count(chr(key))>0:
print "found"
return const.keyMap.index(chr(key))
def drawButtons(self):
for i in range(8):
for j in range(2):
self.pygame.draw.rect(self.screen, (0,0,0), ( const.modeButtonsX + j*30 , const.modeButtonsY + i*30 ,20,20),0)
self.pygame.draw.rect(self.screen, (255,255,255), ( const.modeButtonsX + j*30 , const.modeButtonsY + i*30 ,20,20),1)
def colourButton(self,r,g,b,x,y):
self.pygame.draw.rect(self.screen, (r,g,b), ( const.modeButtonsX + x*30 , const.modeButtonsY + y*30 ,20,20),0)
self.pygame.draw.rect(self.screen, (255,255,255), ( const.modeButtonsX + x*30 , const.modeButtonsY + y*30 ,20,20),1)
self.refreshScreen()
def printChar(self):
pass
def drawLine(self, sx, sy, ex, ey):
lx = math.trunc(math.fabs(ex-sx))
ly = math.trunc(math.fabs(ey-sy))
print "lx is ", lx
print "ly is ", ly
if lx > ly:
for i in range(lx+1):
x = math.trunc(mapping.mapRange(i, 0, lx, sx, ex))
y = math.trunc(mapping.mapRange(i, 0, lx, sy, ey))
print "x is ", x
print "y is ", y
self.setPixel(x, y)
else:
for i in range(ly+1):
x = math.trunc(mapping.mapRange(i, 0, ly, sx, ex))
y = math.trunc(mapping.mapRange(i, 0, ly, sy, ey))
print "x is ", x
print "y is ", y
self.setPixel(x,y)
self.refreshScreen()
def drawPixel(self, x, y, grid):
self.pixelDraw(x, y, grid)
self.refreshScreen()
def pixelDraw(self, x, y, grid):
self.gridColours[x][y]=(self.ink[grid])
self.setPixelColour(x,y,self.gridColours[x][y])
def setPixelColour(self, x, y, colour):
if x <16 and y<16:
self.pygame.draw.rect(self.screen, colour, (const.gridX+x*20,const.gridY+y*20,20,20),0)
self.pygame.draw.rect(self.screen, (255,255,255), (const.gridX+x*20,const.gridY+y*20,20,20),2)
self.refreshScreen()
def setPixel(self, x, y): # uses ink colour of corresponding grid
if x <16 and y<16:
self.pygame.draw.rect(self.screen, self.ink[x/8 + (y/8)*2], (const.gridX+x*20,const.gridY+y*20,20,20),0)
self.pygame.draw.rect(self.screen, (255,255,255), (const.gridX+x*20,const.gridY+y*20,20,20),2)
def drawLCDOutline(self):
self.pygame.draw.rect(self.screen, (255,255,255),(const.LCDX, const.LCDY[0], const.LCDOuterWidth,const.LCDOuterHeight),2)
#self.pygame.draw.rect(self.screen, (0,0,0),(const.LCDX+5, const.LCDY[0]+9,const.LCDInnerWidth, const.LCDInnerHeight),1)
self.pygame.draw.rect(self.screen, (255,255,255),(const.LCDX, const.LCDY[1],const.LCDOuterWidth,const.LCDOuterHeight),2)
#self.pygame.draw.rect(self.screen, (0,0,0),(const.LCDX+5, const.LCDY[1]+9,const.LCDInnerWidth, const.LCDInnerHeight),1)
def writeLCD(self, LCD, line, message):
if len(message) > 16: # only display first 16 characters
print message[:15]
messsage = message[:15]
print "EMULATOR: LCD: Message too long, printed first 16 characters"
if LCD <2:
self.pygame.draw.rect(self.screen, (0,0,0),(const.LCDX+5, const.LCDY[LCD]+9+(line*(const.LCDInnerHeight/2)),const.LCDInnerWidth, const.LCDInnerHeight/2),0) # draw over previous text
displayText = self.font.render(message ,1, (255,255,255)) #white
self.screen.blit(displayText, (const.LCDXtext, const.LCDYtext[LCD][line]))
elif LCD == 2:
self.pygame.draw.rect(self.screen, (0,0,0),(const.LCDX+5, const.LCDY[0]+9+(line*(const.LCDInnerHeight/2)),const.LCDInnerWidth, const.LCDInnerHeight/2),0) # draw over previous text
displayText = self.font.render(message ,1, (255,255,255)) #white
self.screen.blit(displayText, (const.LCDXtext, const.LCDYtext[0][line]))
self.pygame.draw.rect(self.screen, (0,0,0),(const.LCDX+5, const.LCDY[1]+9+(line*(const.LCDInnerHeight/2)),const.LCDInnerWidth, const.LCDInnerHeight/2),0) # draw over previous text
displayText = self.font.render(message ,1, (255,255,255)) #white
self.screen.blit(displayText, (const.LCDXtext, const.LCDYtext[1][line]))
self.refreshScreen()
return 1
'''
elif LCD == 5:
displayLCD1Line1 = self.font.render(self.LCD1Line1, 1, (0,0,0))
displayLCD1Line2 = self.font.render(self.LCD1Line2, 1, (0,0,0))
displayLCD2Line1 = self.font.render(self.LCD2Line1, 1, (0,0,0))
displayLCD2Line2 = self.font.render(self.LCD2Line2, 1, (0,0,0))
self.screen.blit(displayLCD1Line1, (const.LCD1Xtext, const.LCD1YtextLine1))
self.screen.blit(displayLCD1Line2, (const.LCD1Xtext, const.LCD1YtextLine2))
self.screen.blit(displayLCD2Line1, (const.LCD2Xtext, const.LCD2YtextLine1))
self.screen.blit(displayLCD2Line2, (const.LCD2Xtext, const.LCD2YtextLine2))
'''
'''
def waitForControlPress(self): #physical buttons, must move this as requires optional emulator to be running
while True:
for event in self.pygame.event.get():
if event.type==QUIT:
self.pygame.quit()
self.sys.exit()
if event.type==KEYDOWN:
if event.key==K_LEFT:
pass
elif event.key==K_RIGHT:
pass
elif event.key==K_UP:
pass
elif event.key==K_DOWN:
pass
if event.type==KEYUP:
if event.key==K_LEFT:
pass
elif event.key==K_RIGHT:
pass
elif event.key==K_UP:
pass
elif event.key==K_DOWN:
pass
def waitForScreenPress():
if event.type == MOUSEBUTTONUP:
mouse_click = True
mousex, mousey = event.pos
change = True
def waitForButtonScreenPress(self):
if (mousex > modeButtonsX and mousex < modeButtonsX + modeButtonsWidth) and (mousey > modeButtonsY and mousey < modeButtonsY + modeButtonsHeight):
for i in range(2):
for j in range(8):
if (mousex > modeButtonsX + j*30 and mousex < modeButtonsX + j*30 + 20) and (mousey > modeButtonsY + i*30 and mousey < modeButtonsY + i*30 + 20):
print i;
print j;
#if mousex == previousMouseX and mousey == previouseMouseY:
#previousMouseX = mousex
#previousMouseY = mousey
#x,y = pygame.mouse.get_pos()
#x -= mouse_c.get_width()/2
#y -= mouse_c.get_height()/2
#for i in range(16):
# for j in range(16):
# pygame.draw.rect(screen, gridColours[i][j], (gridX+i*20,gridY+j*20,20,20),0)
# pygame.draw.rect(screen, (0,0,0), (gridX+i*20,gridY+j*20,20,20),2)
#screen.blit(mouse_c,(x,y))
'''
def getBit(byteval,idx): #from http://stackoverflow.com/questions/2591483/getting-a-specific-bit-value-in-a-byte-string
return ((byteval&(1<<idx))!=0)
|
from flask import Flask, render_template
from flask_wtf import FlaskForm
from wtforms import StringField, TextField, SubmitField
app = Flask(__name__)
app.config["SECRET_KEY"] = "mysecretkey"
class Form(FlaskForm):
firstname = StringField("Enter Your Firstname")
lastname = StringField("Enter Your lastname")
about = TextField("Enter About You")
submit = SubmitField("Submit")
@app.route("/", methods=['GET', 'POST'])
def home():
firstname = False
lastname = False
about = False
form = Form()
if form.validate_on_submit():
firstname = form.firstname.data
form.firstname.data = ''
lastname = form.lastname.data
form.lastname.data = ''
about = form.about.data
form.about.data = ''
return render_template("home.html", form=form, firstname=firstname,
lastname=lastname, about=about)
app.run(debug=True)
|
A, B, T = map( int, input().split())
print(T//A*B)
|
import random
import copy as cp
class Connect4Game:
def __init__(self):
self.board = [[0 for col in range(7)] for row in range(6)]
self.turn = 1
self.p1Score = 0
self.p2Score = 0
self.piecesPlayed = 0
self.gameFile = None
self.searchDepth = 42
random.seed()
def updatePieceCount(self):
self.piecesPlayed = self.getPieceCount(self.board)
def printBoard(self):
print('---------')
for row in range(6):
print('|' + ''.join([str(c) for c in self.board[row]]) + '|')
print('---------')
def writeBoardToFile(self, fout):
fout = open(fout, 'w')
for row in range(6):
fout.write(''.join([str(c) for c in self.board[row]]) + '\n')
fout.write(str(self.turn))
def playPiece(self, col, board=None, player = None, searching=False):
if not board:
board = self.board
if not player:
player = self.turn
#too many pieces
if self.getPieceCount(board) == 42:
if not searching:
return False
else:
return (False, board)
if col < 0 or col > 6:
if not searching:
return False
else:
return (False, board)
#there is an opening
if board[0][col] == 0:
row = 5
while not board[row][col] == 0:
row -= 1
board[row][col] = player
if not searching:
self.piecesPlayed += 1
self.turn = 1 if player == 2 else 2
return True
else:
return (True, board)
#invalid play
else:
if not searching:
return False
else:
return (False, board)
def playPieceAI(self, method='random', depth=None):
if method == 'random':
possible = [i for i in range(7)]
col = random.choice(possible)
while not self.playPiece(col):
possible.remove(col)
col = random.choice(possible)
if len(possible) == 0:
return False
return True
if method == 'minimax':
search_board = cp.deepcopy(self.board)
if self.turn == 2:
best_val = self.find_max(search_board, self.turn, final=True)
else:
best_val = self.find_min(search_board, self.turn, final=True)
self.playPiece(best_val)
if method == 'alpha-beta':
search_board = cp.deepcopy(self.board)
alpha = -float('inf')
beta = float('inf')
if self.turn == 2:
best_val = self.find_max_AB(search_board, self.turn, alpha, beta, final=True)
else:
best_val = self.find_min_AB(search_board, self.turn, alpha, beta, final=True)
self.playPiece(best_val)
def find_max_AB(self, board, player, alpha, beta, final=False):
if self.getPieceCount(board) == self.searchDepth:
scores = self.calcScore(board, searching=True)
score = scores[1] - scores[0]
return score
best_val = (-1, -float('inf'))
successors = []
choices = [0,1,2,3,4,5,6]
random.shuffle(choices)
for i in choices:
search_board = cp.deepcopy(board)
new_state = self.playPiece(i, search_board, player, searching=True)
if new_state[0]:
best_val = max(best_val,(i, self.find_min_AB(new_state[1],\
1 if player == 2 else 2,\
alpha, beta)),
key = lambda x: x[1])
if best_val[1] >= beta:
if final:
return best_val[0]
else:
return best_val[1]
alpha = max(alpha, best_val[1])
if final:
return best_val[0]
else:
return best_val[1]
def find_min_AB(self, board, player, alpha, beta, final=False):
if self.getPieceCount(board) == self.searchDepth:
scores = self.calcScore(board, searching=True)
score = scores[1] - scores[0]
return score
best_val = (-1, float('inf'))
successors = []
choices = [0,1,2,3,4,5,6]
random.shuffle(choices)
for i in choices:
search_board = cp.deepcopy(board)
new_state = self.playPiece(i, search_board, player, searching=True)
if new_state[0]:
best_val = min(best_val,(i, self.find_max_AB(new_state[1],\
1 if player == 2 else 2,\
alpha, beta)),
key = lambda x: x[1])
if best_val[1] <= alpha:
if final:
return best_val[0]
else:
return best_val[1]
beta = min(beta, best_val[1])
if final:
return best_val[0]
else:
return best_val[1]
def find_max(self, board, player, final=False):
if self.getPieceCount(board) == self.searchDepth:
scores = self.calcScore(board, searching=True)
score = scores[1] - scores[0]
return score
successors = []
choices = [0,1,2,3,4,5,6]
random.shuffle(choices)
for i in choices:
search_board = cp.deepcopy(board)
new_state = self.playPiece(i, search_board, player, searching=True)
if new_state[0]:
successors.append((i, self.find_min(new_state[1],\
1 if player == 2 else 2)))
best_val = max(successors, key=lambda x: x[1])
#print ("MAX: " + str(best_val[1]))
if final:
return best_val[0]
else:
return best_val[1]
def find_min(self, board, player, final=False):
if self.getPieceCount(board) == self.searchDepth:
scores = self.calcScore(board, searching=True)
score = scores[1] - scores[0]
return score
successors = []
choices = [0,1,2,3,4,5,6]
random.shuffle(choices)
for i in choices:
search_board = cp.deepcopy(board)
new_state = self.playPiece(i, search_board, player, searching=True)
if new_state[0]:
successors.append((i, self.find_max(new_state[1],\
1 if player == 2 else 2)))
best_val = min(successors, key=lambda x: x[1])
#print ("Min: " + str(best_val[1]))
if final:
return best_val[0]
else:
return best_val[1]
def getPieceCount(self, board):
p = 0
for row in range(6):
for col in range(7):
if not board[row][col] == 0:
p += 1
return p
def calcScore(self, board=None, searching=False):
if not board:
board = self.board
score1 = 0
score2 = 0
four1 = [1]*4
four2 = [2]*4
#horizontal
for row in board:
for i in range(4):
if row[i:i+4] == four1:
score1 += 1
elif row[i:i+4] == four2:
score2 += 1
#vertical
for j in range(7):
col = [row[j] for row in board]
for i in range(3):
if col[i:i+4] == four1:
score1 += 1
elif col[i:i+4] == four2:
score2 += 1
#diagonal
for c in range(4):
for r in range(3):
bl_tr = [board[r+3-i][c+i] for i in range(4)]
tl_br = [board[r+i][c+i] for i in range(4)]
if bl_tr == four1:
score1 += 1
elif bl_tr == four2:
score2 += 1
if tl_br == four1:
score1 += 1
elif tl_br == four2:
score2 += 1
if not searching:
self.p1Score = score1
self.p2Score = score2
else:
return (score1, score2)
|
from random import choice
aluno = [0]*4
for i in range(0, 4):
aluno[i] = str(input('Nome do {}º aluno: '.format(i+1)))
escolhido = choice(aluno)
print('Aluno escolhido foi: {}'.format(escolhido)) |
import ast
import textwrap
from twitter.checkstyle.common import (
CheckstylePlugin,
Nit,
OffByOneList,
PythonFile
)
import pytest
def make_statement(statement):
return '\n'.join(textwrap.dedent(statement).splitlines()[1:])
PYTHON_STATEMENT = make_statement("""
import ast
from os.path import (
join,
split,
)
import zookeeper
class Keeper(object):
def __init__(self):
self._session = None
def session(self):
return self._session
""")
def test_python_file():
pf = PythonFile(PYTHON_STATEMENT, 'keeper.py')
assert pf.filename == 'keeper.py'
assert pf.logical_lines == {
1: (1, 2, 0),
2: (2, 6, 0),
7: (7, 8, 0),
10: (10, 11, 0),
11: (11, 12, 2),
12: (12, 13, 4),
14: (14, 15, 2),
15: (15, 16, 4)
}
with pytest.raises(IndexError):
pf[0]
with pytest.raises(IndexError):
pf[len(PYTHON_STATEMENT.splitlines()) + 1]
assert pf[1] == ["import ast"]
assert pf[2] == ["from os.path import (", " join,", " split,", ")"]
assert pf[3] == [" join,"]
assert '\n'.join(pf) == PYTHON_STATEMENT
assert list(pf.enumerate()) == list(enumerate(PYTHON_STATEMENT.splitlines(), 1))
def test_style_error():
pf = PythonFile(PYTHON_STATEMENT, 'keeper.py')
class ActualCheckstylePlugin(CheckstylePlugin):
def nits(self):
return []
cp = ActualCheckstylePlugin(pf)
se = cp.error('A123', 'You have a terrible taste in libraries.')
assert se.line_number is None
assert se.code == 'A123'
str(se)
se = cp.error('A123', 'You have a terrible taste in libraries.', 7)
assert se.line_number == '007'
str(se)
se = cp.error('A123', 'You have a terrible taste in libraries.', 2)
assert se.line_number == '002-005'
assert se.severity == Nit.ERROR
str(se)
sw = cp.warning('A321', 'You have a terrible taste in libraries.', 2)
assert sw.severity == Nit.WARNING
assert sw.code == 'A321'
str(sw)
import_from = None
for node in ast.walk(pf.tree):
if isinstance(node, ast.ImportFrom):
import_from = node
assert import_from is not None
ase = cp.error('B380', "I don't like your from import!", import_from)
assert ase.severity == Nit.ERROR
se = cp.error('B380', "I don't like your from import!", 2)
assert str(se) == str(ase)
def test_off_by_one():
obl = OffByOneList([])
for index in (-1, 0, 1):
with pytest.raises(IndexError):
obl[index]
for s in (slice(1, 1), slice(1, 2), slice(-2, -1)):
assert obl[s] == []
for s in (slice(-1, 0), slice(0, 1)):
with pytest.raises(IndexError):
obl[s]
obl = OffByOneList([1, 2, 3])
for k in (1, 2, 3):
assert obl[k] == k
assert obl[k:k + 1] == [k]
assert obl.index(k) == k
assert obl.count(k) == 1
assert list(reversed(obl)) == [3, 2, 1]
for k in (0, 4):
with pytest.raises(IndexError):
obl[k]
for value in (None, 2.0, type):
with pytest.raises(TypeError):
obl[value]
|
from itertools import combinations
def all_against_all_design(conditions):
return [(a, b) for a, b in combinations(conditions, 2)]
|
import random
from discord.ext import commands
@commands.command(
name="random",
brief="1부터 주어진 수 사이의 임의의 자연수를 출력함.",
help=(
"num에 1 이상의 자연수를 넣으면, 1부터 num 사이의 임의의 값을 출력함.\n"
"그러니까 0 이하의 수나 자연수가 아닌 걸 넣으면 로그 따서 널 XXXXX....)"
),
)
async def _random(ctx: commands.Context, num: int):
random_number = random.randrange(num) + 1
await ctx.send(f"Random number : {random_number}")
@commands.command(
brief="together-bot의 repo url을 출력함.",
help="아니 이 명령어를 help까지 쳐본다고? 그런 당신에게는 role get bot developer",
)
async def repo(ctx: commands.Context):
await ctx.send(
"Bot repository URL : https://github.com/team-play-together/together-bot"
)
@commands.command(brief="구글 검색 결과의 url을 출력함.", help="args에 검색할 내용을 넣으면 됨.")
async def google(ctx: commands.Context, *args):
query = "+".join(args)
await ctx.send(f"https://www.google.com/search?q={query}")
@commands.command(brief="나무위키 검색 결과의 url을 출력함.", help="args에 검색할 내용을 넣으면 됨.")
async def namu(ctx: commands.Context, *, query):
# namu.wiki 상에서 임의로 변경되는 url escape characters
namu_wiki_escape_code_dict = {
"%": "%25",
"\\": "%5C",
" ": "%20",
"#": "%23",
"?": "%3F",
}
for character, escape_code in namu_wiki_escape_code_dict.items():
query = query.replace(character, escape_code)
await ctx.send(f"https://namu.wiki/w/{query}")
@commands.command(
brief="봇이 탁구를 쳐줍니다. 실행 중이라면 무조건 받아칩니다.",
help="봇이 실행 중이라면 pong을 출력함.\n(근데 봇이 죽어있으면 help도 안 나오는데 핑을 설명해 줘야해?)",
)
async def ping(ctx: commands.Context):
await ctx.send("pong!")
@commands.command(brief="together-bot에 기여하는 방법")
async def contribute(ctx: commands.Context):
await ctx.send(
"아이디어 제안, 버그 : "
"`https://github.com/team-play-together/together-bot/issues/new`\n"
"코드 기여(PR) : `https://github.com/team-play-together/together-bot/pulls`"
)
def setup(bot: commands.Bot):
bot.add_command(ping)
bot.add_command(repo)
bot.add_command(_random)
bot.add_command(google)
bot.add_command(namu)
bot.add_command(contribute)
|
import sys
import re
class Project(object):
def __init__(self, data):
self.input = data
self.compression_pattern = re.compile('(\((\d+)x(\d+)\))')
def run1(self, ):
self.output = ""
self.pos = 0
while True:
try:
self.find_next_decompression()
except StopIteration:
break
return len(''.join(self.output.split()))
def find_next_decompression(self):
match = self.compression_pattern.search(self.input, self.pos)
if match:
# If there are uncompressed characters, add them to output here
self.output += self.input[self.pos:match.start()]
self.pos = match.start() + len(match.group(0))
repeating = self.input[self.pos:self.pos + int(match.group(2))]
self.output += repeating * int(match.group(3))
self.pos += len(repeating)
else:
# Add remaining uncompressed characters
self.output += self.input[self.pos:]
raise StopIteration()
def run2(self,):
return self.calculate_expansion(self.input) - 1 # -1 for newline. lol
def calculate_expansion(self, line):
total = 0
cur_pos = 0
while cur_pos < len(line):
match = self.compression_pattern.search(line, cur_pos)
if match:
# No compression until we match a location
total += match.start() - cur_pos
expand_pos = match.start() + len(match.group(1))
expand_line = line[expand_pos:expand_pos + int(match.group(2))]
expanded_size = self.calculate_expansion(expand_line)
total += expanded_size * int(match.group(3))
cur_pos = expand_pos + int(match.group(2))
else:
total += len(line[cur_pos:])
break
return total
if __name__ == '__main__':
with open('input.txt', 'r') as f:
p = Project(f.read())
print "Part 1:", p.run1()
print "Part 2:", p.run2()
|
#!/usr/bin/python
#Change ID.
import argparse
parser= argparse.ArgumentParser(description='Change the ID of a Dynamixel.')
addarg= lambda *args,**kwargs: parser.add_argument(*args,**kwargs)
addarg('-dev', '--dev', metavar='DEV', dest='dev', default='/dev/ttyUSB0',
help='Device location. default: /dev/ttyUSB0')
addarg('-type', '--type', metavar='DXL_TYPE', dest='dxl_type', default='XM430-W350',
help='Dynamixel type. default: XM430-W350')
addarg('-cid', '--curr_id', metavar='CURR_ID', dest='curr_id', default=1, type=int,
help='Current Dynamixel ID. default: 1')
addarg('-nid', '--new_id', metavar='NEW_ID', dest='new_id', type=int, required=True,
help='New Dynamixel ID.')
addarg('-br', metavar='BAUDRATE', dest='baudrate', default=2e6, type=int,
help='Baud rate. default: 2e6')
args= parser.parse_args()
#print(args)
from dxl_util import *
#Setup the device
dxl= TDynamixel1(args.dxl_type, dev=args.dev)
dxl.Id= args.curr_id
dxl.Baudrate= args.baudrate
dxl.Setup()
#Change ID
dxl.Write('ID',args.new_id)
dxl.Quit()
|
from django.shortcuts import render
from django.http import HttpResponse
import requests
from .models import Currency
# Create your views here.
def index(request):
req = (requests.get("https://api.privatbank.ua/p24api/pubinfo?json&exchange&coursid=5")).json()
currencies = []
for i in req:
c = Currency()
c.name = i['ccy']
c.buy = i['buy']
c.sale = i['sale']
currencies.append(c)
return render(request, 'rates.html', {'currencies' : currencies}) |
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from pathlib import Path
import io
import sys
import re
import xlwt
import xlrd
import os
import time
import datetime
if __name__ == "__main__":
sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf-8')
print("开始", flush = True)
add_flag = False #字符串拼接flag
question_text = "" #输出字符串
last_num = 0 #最后一次的工会编号
last_year = datetime.datetime.now().year # 最后一次的年份,默认值为本年
year = ""
file_year = ""
# 文件名
excel_file_name = os.getcwd() + "\\result_" + \
datetime.datetime.now().strftime("%Y%m%d%H%M%S") + ".xls"
# 获取最后一次的工会编号以及年份
my_file = Path("./the_last_dance.txt")
if my_file.is_file():
with open('./the_last_dance.txt', 'r') as f:
last_data = (f.readline()).split(",")
last_num = int(last_data[0])
last_year = int(last_data[1])
# 做成Excel文件
count=0
workbook = xlwt.Workbook()
sheet = workbook.add_sheet("Sheet Name1")
# 读取文件里面的工会代码
excel_data = xlrd.open_workbook("lm3_auditor_input.xls")
table = excel_data.sheet_by_index(0)
del excel_data
try:
for rowNum in range(table.nrows):
rowVale = table.row_values(rowNum)
file_num = int(rowVale[0]) # 获取Excel第一列,工会代码
# 如果当年工会编码小于最后一次的编码,则跳过去
if file_num < last_num:
continue
# 获取cookie
url_cok = "https://olms.dol-esa.gov/query/getOrgQry.do"
r_cok = requests.get(url_cok)
cookie_jar = r_cok.cookies
# 获取当前工会的所有年报
url_union = "https://olmsapps.dol.gov/query/orgReport.do"
param_union = {"reportType":"detailResults","detailID":file_num,"detailReport":"unionDetail",
"rptView":"undefined","historyCount":"0","screenName":"orgQueryResultsPage",
"searchPage":"/getOrgQry.do","pageAction":"-1","startRow":"1",
"endRow":"1","rowCount":"1","sortColumn":"","sortAscending":"false",
"reportTypeSave":"orgResults"}
r_union = requests.post(url_union, param_union, cookies=cookie_jar)
#print(r.status_code)
# 再次封装,获取具体标签内的内容
result_union = r_union.text
bs_union = BeautifulSoup(result_union,"html.parser")
# 获取已爬取内容中的Fiscal Year行的链接
data_union = bs_union.select("a[class='getFormReportLink']")
# 循环打印输出
for j in data_union:
year = (j.text)[0:4]
file_year = rowVale[1]
# 获取的链接,年份等于Excel记录的年份,并且是报告的时候,打开链接
if year.isdigit() and int(year) == file_year and (j.text).find("Report") >= 0:
if int(year) > last_year:
continue
strlist = j["href"].split(",") # 获取年报的编码
# 获取当前工会的Question 14
url_detail = "https://olmsapps.dol.gov/query/orgReport.do"
param_detail = {"reportType":"formReport","detailID":strlist[1],"detailReport":"LM3Form",
"rptView":"undefined","historyCount":"1","screenName":"orgDetailPage",
"searchPage":"/getOrgQry.do","pageAction":"-1","startRow":"1",
"endRow":"25","rowCount":"25","sortColumn":"","sortAscending":"false",
"reportTypeSave":"detailResults"}
r_detail = requests.post(url_detail, param_detail, cookies=cookie_jar)
#print(r.status_code)
# 再次封装,获取具体标签内的内容
result_detail = r_detail.text
bs_detail = BeautifulSoup(result_detail,"html.parser")
# 获取已爬取内容中的Fiscal Year行的链接
data_detail = bs_detail.select("div[class='ERDS-form-text'] br")
# 循环打印输出
data_detail_count = 0
for k in data_detail:
data_detail_count = data_detail_count + 1
# 判断下一个元素是字符串
if isinstance(k.next, str):
if re.match("Question\s14", k.next):
question_text = question_text + k.next + " "
add_flag = True
if data_detail_count == len(data_detail):
print("工会编号:" + str(file_num), flush = True)
print("年份:" + year, flush = True)
print("内容:" + question_text, flush = True)
print("--------------------------")
sheet.write(count,0, file_num) # row, column, value
sheet.write(count,1, year)
sheet.write(count,2, question_text)
count = count + 1;
question_text = ""
add_flag = False
continue
if add_flag:
if re.match("Question", k.next) or re.match("Schedule", k.next) \
or re.match("Statement", k.next):
print("工会编号:" + str(file_num), flush = True)
print("年份:" + year, flush = True)
print("内容:" + question_text, flush = True)
print("--------------------------")
sheet.write(count,0, file_num) # row, column, value
sheet.write(count,1, year)
sheet.write(count,2, question_text)
count = count + 1;
question_text = ""
add_flag = False
else:
# 如果question14的内容是换行的,拼接数据
question_text = question_text + k.next
if data_detail_count == len(data_detail):
print("工会编号:" + str(file_num), flush = True)
print("年份:" + year, flush = True)
print("内容:" + question_text, flush = True)
print("--------------------------")
sheet.write(count,0, file_num) # row, column, value
sheet.write(count,1, year)
sheet.write(count,2, question_text)
count = count + 1;
question_text = ""
add_flag = False
del k
del r_detail
del url_detail
del param_detail
del result_detail
del bs_detail
del data_detail
# 延迟2秒,防止访问太快
time.sleep(2)
del j
# 输出结果到Excel
workbook.save(excel_file_name)
# 释放变量内存
del r_cok
del url_cok
del r_union
del url_union
del param_union
del result_union
del bs_union
del data_union
finally:
# 中断或者异常,记录最后的工会编码以及年份
with open('./the_last_dance.txt', 'w') as obj_f:
obj_f.write(str(file_num) + "," + year)
# 执行完成后,删除文件
if(os.path.exists('./the_last_dance.txt')):
os.remove('./the_last_dance.txt')
print("完成",flush = True)
|
#-- GAUDI jobOptions generated on Mon Jul 6 15:32:07 2015
#-- Contains event types :
#-- 11104125 - 43 files - 501996 events - 151.82 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-123901
#-- StepId : 123901
#-- StepName : Stripping20-NoPrescalingFlagged for MC MagUp
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20120831
#-- CONDDB : sim-20121025-vc-mu100
#-- ExtraPackages : AppConfig.v3r155
#-- Visible : Y
#-- Processing Pass Step-124440
#-- StepId : 124440
#-- StepName : Sim06b with Nu=2.5 - MU - MayJune 2012
#-- ApplicationName : Gauss
#-- ApplicationVersion : v42r4
#-- OptionFiles : $APPCONFIGOPTS/Gauss/Beam4000GeV-mu100-MayJun2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIAROOT/options/Pythia.py;$APPCONFIGOPTS/Gauss/G4PL_LHEP_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20120831
#-- CONDDB : sim-20121025-vc-mu100
#-- ExtraPackages : AppConfig.v3r160;DecFiles.v26r24
#-- Visible : Y
#-- Processing Pass Step-124055
#-- StepId : 124055
#-- StepName : Digi12 w/o spillover - MU - L0TCK 0x003D
#-- ApplicationName : Boole
#-- ApplicationVersion : v24r0
#-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/L0/L0TCK-0x003D.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20120831
#-- CONDDB : sim-20121025-vc-mu100
#-- ExtraPackages : AppConfig.v3r155
#-- Visible : N
#-- Processing Pass Step-123755
#-- StepId : 123755
#-- StepName : Reco14 for MC - MagUp
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p2
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20120831
#-- CONDDB : sim-20121025-vc-mu100
#-- ExtraPackages : AppConfig.v3r151
#-- Visible : Y
#-- Processing Pass Step-124019
#-- StepId : 124019
#-- StepName : Trigger - TCK 0x4097003d Flagged - MU - MayJune 2012
#-- ApplicationName : Moore
#-- ApplicationVersion : v14r2p1
#-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProduction.py;$APPCONFIGOPTS/Conditions/TCK-0x4097003d.py;$APPCONFIGOPTS/Moore/DataType-2012.py;$APPCONFIGOPTS/L0/L0TCK-0x003D.py
#-- DDDB : dddb-20120831
#-- CONDDB : sim-20121025-vc-mu100
#-- ExtraPackages : AppConfig.v3r155
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000028_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00023160/0000/00023160_00000044_1.allstreams.dst'
], clear=True)
|
"""
Definition of urls for DjangoWebProject1.
"""
from datetime import datetime
from django.urls import path
from django.contrib import admin
from django.contrib.auth.views import LoginView, LogoutView
from app import forms, views
from django.conf.urls import include, url
import ERPApp.views
urlpatterns = [
url(r'^$', ERPApp.views.index, name='index'),
url(r'^home$', ERPApp.views.index, name='home'),
url(r'^about$', ERPApp.views.about, name ='about'),
]
|
# Generated by Django 2.1.5 on 2019-01-19 06:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0003_article_content_preview'),
]
operations = [
migrations.AddField(
model_name='article',
name='main_category',
field=models.CharField(default='test', max_length=120),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='sub_category',
field=models.CharField(default='test_sub_category', max_length=120),
preserve_default=False,
),
]
|
from flask import Blueprint,render_template,Flask,request,redirect,session,json,current_app as app
import hashlib,time
from jsjy.models import db, Student,Class
from jsjy.public import r
student=Blueprint('student',__name__)
@student.route('/student')
def index():
return render_template('/student.html')
@student.route('/student',methods=['POST'])
def add_class():
data = request.get_data()
j_data = json.loads(data)
user=db.session.query(Student).filter_by(cid=j_data['cid']).first()
if user is not None:
return r({},0,'',{'cid':'身份证已经存在'})
j_data.setdefault('l2_name','')
j_data.setdefault('l2_phone','')
j_data.setdefault('add','')
j_data.setdefault('code','')
j_data.setdefault('info','')
cl = Student(j_data['class_id'],j_data['name'],j_data['code'],j_data['cid'],j_data['in_time'],0,j_data['info'],j_data['l_name'],j_data['l_phone'],j_data['l2_name'],j_data['l2_phone'],j_data['add'])
db.session.add(cl)
db.session.commit()
set_class_count(j_data['class_id']);
return r({},0,'添加成功')
#获取用户
@student.route('/studentlist',methods=['GET'])
def get_student():
perPage=int(request.values.get('perPage'))
page=int(request.values.get('page'))
in_time=request.values.get('in_time')
search={}
search['name']=request.values.get('name')
search['t_id']=request.values.get('t_id')
search['code']=request.values.get('code')
search['cid']=request.values.get('cid')
search['l_name']=request.values.get('l_name')
search['l_phone']=request.values.get('l_phone')
search['l2_name']=request.values.get('l2_name')
search['l2_phone']=request.values.get('l2_phone')
search['add']=request.values.get('add')
orderBy=request.values.get('orderBy')
orderDir=request.values.get('orderDir')
count=db.session.query(Student).count()
db_tc=db.session.query(Student)
offset=((page-1)*perPage)
rt={}
ids=[]
temp=[]
order=Student.id.desc()
if orderBy and orderDir:#排序
temp1=getattr(Student,orderBy)
order=getattr(temp1,orderDir)()
where=[Student.id>0]
for k,v in search.items():
if v:
temp1=getattr(Student,k).like("%"+v+"%")
where.append(temp1)
if in_time :
temptime=in_time.split(',')
where.append(Student.in_time>=temptime[0])
where.append(Student.in_time<=temptime[1])
tc = db_tc.order_by(order).filter(*where).limit(perPage).offset(offset)#.all()
print(tc)
for t in tc:
temp.append({
'id':t.id,
'class_id':t.class_id,
'name':t.name,
'code':t.code,
'cid':t.cid,
'in_time':t.in_time,
'out_time':t.out_time,
'info':t.info,
'l_name':t.l_name,
'l_phone':t.l_phone,
'l2_name':t.l2_name,
'l2_phone':t.l2_phone,
'add':t.add
})
rt['count']=count
rt['rows']=temp
# rt['hasNext']=1
return r(rt)
pass
#删除
@student.route('/studentlist/<int:cid>',methods=['DELETE'])
def delete_user(cid):
tc = db.session.query(Student).filter_by(id=cid).first()
sql2=db.session.query(Student).filter_by(id=cid).delete()
db.session.commit()
set_class_count(tc.class_id);
return r({},0,'删除成功')
#修改
@student.route('/studentlist/<int:cid>',methods=['PUT'])
def edit_user(cid):
data = request.get_data()
j_data = json.loads(data)
user=db.session.query(Student).filter_by(cid=j_data['cid']).filter(Student.id != cid).first()
if user is not None:
return r({},0,'',{'cid':'身份证已经存在'})
tc = db.session.query(Student).filter_by(id=cid).first()
old_class_id=0
flag=False
if tc.class_id != j_data['class_id']:
old_class_id=tc.class_id
flag=True
pass
tc.name=j_data['name'],
tc.class_id=j_data['class_id'],
tc.code=j_data['code'],
tc.cid=j_data['cid'],
tc.in_time=j_data['in_time'],
tc.out_time=j_data['out_time'],
tc.l_name=j_data['l_name'],
tc.l_phone=j_data['l_phone'],
tc.l2_name=j_data['l2_name'],
tc.l2_phone=j_data['l2_phone'],
tc.add=j_data['add'],
tc.info=j_data['info'],
db.session.commit()
if flag:
set_class_count(j_data['class_id']);
if old_class_id >0:
set_class_count(old_class_id);
return r({},0,'修改成功')
def set_class_count(class_id):
if class_id>0:
count=db.session.query(Student).filter_by(class_id=class_id).count()
tc = db.session.query(Class).filter_by(id=class_id).first()
tc.user_count=count
db.session.commit()
return True |
import sys
if len(sys.argv) < 2:
print "Mising arguments"
else:
for i in range(int(sys.argv[1])+1):
print "Testing {} times ...".format(i)
|
# Generated by Django 3.0.7 on 2020-10-09 09:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cl_app', '0002_auto_20201009_0613'),
('custom', '0002_auto_20201009_0626'),
('cl_table', '0022_employee'),
]
operations = [
migrations.AddField(
model_name='employee',
name='EMP_TYPEid',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='custom.EmpLevel'),
),
migrations.AddField(
model_name='employee',
name='LEVEL_ItmIDid',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='cl_table.Securities'),
),
migrations.AddField(
model_name='employee',
name='Site_Codeid',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='staff_emp', to='cl_app.ItemSitelist'),
),
migrations.AddField(
model_name='employee',
name='created_at',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='employee',
name='defaultSiteCodeid',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='staff', to='cl_app.ItemSitelist'),
),
migrations.AddField(
model_name='employee',
name='fcmtoken',
field=models.TextField(blank=True, db_column='FCMToken', null=True),
),
migrations.AddField(
model_name='employee',
name='fullname',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='employee',
name='is_login',
field=models.CharField(choices=[('YES', 'YES'), ('NO', 'NO')], db_column='Login', max_length=50, null=True),
),
migrations.AddField(
model_name='employee',
name='leave_bal',
field=models.IntegerField(blank=True, db_column='Leave_bal', null=True),
),
migrations.AddField(
model_name='employee',
name='leave_taken',
field=models.IntegerField(blank=True, db_column='Leave_taken', null=True),
),
migrations.AddField(
model_name='employee',
name='notificationsetting',
field=models.BooleanField(blank=True, db_column='notificationSetting', null=True),
),
migrations.AddField(
model_name='employee',
name='otp',
field=models.CharField(blank=True, max_length=20, null=True),
),
migrations.AddField(
model_name='employee',
name='pw_password',
field=models.CharField(db_column='PW_Password', max_length=15, null=True),
),
migrations.AddField(
model_name='employee',
name='skills_list',
field=models.CharField(max_length=1000, null=True),
),
migrations.AddField(
model_name='employee',
name='skillset',
field=models.CharField(blank=True, max_length=40, null=True),
),
migrations.AddField(
model_name='employee',
name='updated_at',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AlterField(
model_name='employee',
name='age_range0',
field=models.BooleanField(db_column='Age_Range0', null=True),
),
migrations.AlterField(
model_name='employee',
name='age_range1',
field=models.BooleanField(db_column='Age_Range1', null=True),
),
migrations.AlterField(
model_name='employee',
name='age_range2',
field=models.BooleanField(db_column='Age_Range2', null=True),
),
migrations.AlterField(
model_name='employee',
name='age_range3',
field=models.BooleanField(db_column='Age_Range3', null=True),
),
migrations.AlterField(
model_name='employee',
name='age_range4',
field=models.BooleanField(db_column='Age_Range4', null=True),
),
migrations.AlterField(
model_name='employee',
name='emp_dob',
field=models.DateField(blank=True, db_column='Emp_DOB', null=True),
),
migrations.AlterField(
model_name='employee',
name='emp_email',
field=models.EmailField(blank=True, db_column='Emp_email', max_length=40, null=True),
),
migrations.AlterField(
model_name='employee',
name='emp_isactive',
field=models.BooleanField(db_column='Emp_isactive', default=True),
),
migrations.AlterField(
model_name='employee',
name='emp_joindate',
field=models.DateField(blank=True, db_column='Emp_JoinDate', null=True),
),
migrations.AlterField(
model_name='employee',
name='emp_pic',
field=models.ImageField(blank=True, db_column='Emp_PIC', null=True, upload_to='img'),
),
migrations.AlterField(
model_name='employee',
name='getsms',
field=models.BooleanField(db_column='GetSMS', null=True),
),
migrations.AlterField(
model_name='employee',
name='show_in_appt',
field=models.BooleanField(db_column='Show_In_Appt', default=False, null=True),
),
migrations.AlterField(
model_name='employee',
name='show_in_sales',
field=models.BooleanField(db_column='Show_In_Sales', default=False, null=True),
),
migrations.AlterField(
model_name='employee',
name='show_in_trmt',
field=models.BooleanField(db_column='Show_In_Trmt', default=False, null=True),
),
]
|
import MyModule
MyModule.message("arun") |
import matplotlib.pyplot as plt
import sys
import string
import numpy
from matplotlib.backends.backend_pdf import PdfPages
from itertools import chain
from matplotlib.patches import Polygon
#maxi = lambda v: max(enumerate(v), opeator.itemgetter(1))
SSTART = 1
NSSTART = 15
SEQ = "QKLVFFAEDVGSNKGAIIGLMVGGVVIATVIVITLVMLKKK"
NSEQ = [e[1]+str(e[0]) for e in enumerate(SEQ, NSSTART)]
SINON = 25.0 #SINO for 1 - i.e. not broaded
SINOC = 75.0 #SINO for 1 - i.e. not broaded
PERPAGE = 5
NCcorr = [1,2]
def kk(ss):
s = ss[0]
ai = s.split('-')[0]
if ai[0] not in string.digits:
ai = ai[1:]
return int(ai)-2
def aa(ss):
s = ss[0]
ai = s.split('-')[1]
ai = ai.split('/')[0]
ai = ''.join([l for l in ai if not l.isdigit()])
return ai
if len(sys.argv) != 2:
print("Invalid cmd")
sys.exit(0)
data = []
with open(sys.argv[1]) as f:
for l in f.readlines():
t = l.split()
d = [t[0]]
nss = map(float, t[1:])
ns = nss
#ns = map(lambda x: x[0]*x[1], zip(nss, NCcorr))
d+= map(lambda x: x*1.0, ns)
if d[0][-1] != 'N':
d[1] = min(d[1]*2,0.920)
data.append(d)
ORDER = {'H':0, 'HA':1, 'HB':2, 'HG':3, 'HD':4, 'HE':5}
AC = {'H':'black', 'HA':'r', 'HB':'b', 'HG':'g', 'HD':'y', 'HE':'y'}
AD = {'H': 0, 'HA':0.1, 'HB':0.2, 'HG':0.3, 'HD':0.4, 'HE':0.45}
GREEK = {'H':r'$H_N$', 'HA':r'$H_\alpha$', 'HB':r'$H_\beta$', 'HG':r'$H_\gamma$',
'HD':r'$H_\delta$', 'HE':r'$H_\epsilon$'}
def aac(ss):
a = aa(ss)
return AC[a]
data.sort(key=kk)
data = [d for d in data if kk(d) != 2]
#data.sort(key = lambda x: x[0][1:])
#data = data[:5]
pp = PdfPages(sys.argv[1]+".pdf")
ylim = max(chain(*[d[1:] for d in data]))
fig = plt.figure(figsize=(8.3, 1./3*11.7))
ax = fig.add_subplot(111)
#fig, ax = plt.subplots()
def kkz(ss):
t = kk(ss)
t += AD[aa(ss)]
return t
dd = zip(map(kk, data), map(lambda d: d[1], data))
#dd = sorted(dd,key = lambda x: x[1], reverse=True)
dd = zip(*dd)
#ax.bar(dd[0], dd[1])
#POLYGON
xx = zip(map(aa, data), map(kk, data), map(lambda d: d[1], data))
mx = []
mn = []
for i in range(SSTART, SSTART+len(SEQ)):
l = [x[2] for x in xx if x[1] == i]
if l:
mx.append((i, max(l)))
l = [x[2] for x in xx if x[1] == i]
if l:
mn.append((i, min(l)))
mn.reverse()
mm = mx+mn
pol = plt.Polygon(mm, closed = True, fill = True, color='grey', alpha=0.3)
ax.add_patch(pol)
#DOTS
err = [(2-s)/SINON if len(a) == 1 else (2-s)/SINOC for a,s in zip(map(aa, data), dd[1])]
ax.scatter(dd[0], dd[1], s=25, c=map(aac,data), lw=0)
ax.errorbar(dd[0], dd[1], yerr = err, marker=None, fmt=None, ecolor='black')
#LEGENG
arts = []
labs = []
for n, c in sorted(AC.items(), key=lambda x: ORDER[x[0]]):
arts.append(plt.Line2D((0,0), (0,0), color = c, marker='o', linestyle = ''))
labs.append(GREEK[n])
ax.legend(arts, labs, loc=4, prop={'size':10}, numpoints=1)
#for a, o in zip(map(aa, data), sl):
# plt.setp(o, color=AC[a])
#ax.bar(map(kk, data), map(lambda d: d[1], data))
plt.ylim(0.0, 1.25)
plt.xlim(SSTART-0.5, SSTART+len(SEQ)+3)
ax.grid()
plt.xticks(0.0+ numpy.array(range(SSTART, SSTART + len(SEQ))))
ax.set_xticklabels(NSEQ, rotation=45, fontsize = 6)
plt.savefig("1.png", dpi=600)
pp.savefig(fig)
pp.close()
sys.exit(0)
#fig = plt.figure(figsize=(8.3, len(data)*11.7/7))
for i, d in enumerate(data):
if i%PERPAGE == 0 :
fig = plt.figure(figsize=(8.3, 1*11.7))
plt.subplot(PERPAGE, 1, i%PERPAGE+1)
plt.text(2.5, 0.9, d[0])
plt.grid(True)
plt.plot(d[1:], "o-")
plt.ylim(0,ylim)
if i%PERPAGE == PERPAGE-1:
pp.savefig(fig)
#fig.savefig(sys.argv[1]+".pdf")
pp.close()
#plt.show()
|
# dæmi B
texti = "at santa at nasa"
reverse: texti[::-1]
for letter in texti:
print(letter)
texti = input("sláðu innn texta")
val = int(input("með hvaða millibili viltu sjá stafina? "))
nýr_texti = texti[::val] |
import cv2
import numpy as np
import logging
log = logging.basicConfig(level=logging.DEBUG)
vcap = cv2.VideoCapture("rtsp://user:a1234567890@192.168.0.123:554/Streaming/Channels/201/?transportmode=unicast")
while(1):
ret,frame = vcap.read()
## frame = cv2.resize(frame,(900, 600))
cv2.imshow('VIDEO', frame)
if (cv2.waitKey(1) & 0xFF == ord('q')):
break
vcap.release()
cv2.destroyAllWindows()
|
import numpy as np
import matplotlib.pyplot as plt
import tensorflow.keras as keras
from tensorflow.keras import backend as K
from tensorflow.keras.callbacks import (
Callback,
LearningRateScheduler,
TensorBoard
)
# cycle Lr
# https://github.com/bckenstler/CLR
class LRFinder(Callback):
def __init__(self, min_lr, max_lr, mom=0.9, stop_multiplier=None,
reload_weights=True, batches_lr_update=5):
self.min_lr = min_lr
self.max_lr = max_lr
self.mom = mom
self.reload_weights = reload_weights
self.batches_lr_update = batches_lr_update
if stop_multiplier is None:
self.stop_multiplier = -20*self.mom/3 + 10 # 4 if mom=0.9
# 10 if mom=0
else:
self.stop_multiplier = stop_multiplier
def on_train_begin(self, logs={}):
p = self.params
try:
n_iterations = p['epochs']*p['samples']//p['batch_size']
except:
n_iterations = p['steps']*p['epochs']
self.learning_rates = np.geomspace(self.min_lr, self.max_lr, \
num=n_iterations//self.batches_lr_update+1)
self.losses=[]
self.iteration=0
self.best_loss=0
if self.reload_weights:
self.model.save_weights('tmp.hdf5')
def on_batch_end(self, batch, logs={}):
loss = logs.get('loss')
if self.iteration!=0: # Make loss smoother using momentum
loss = self.losses[-1]*self.mom+loss*(1-self.mom)
if self.iteration==0 or loss < self.best_loss:
self.best_loss = loss
if self.iteration%self.batches_lr_update==0: # Evaluate each lr over 5 epochs
if self.reload_weights:
self.model.load_weights('tmp.hdf5')
lr = self.learning_rates[self.iteration//self.batches_lr_update]
K.set_value(self.model.optimizer.lr, lr)
self.losses.append(loss)
if loss > self.best_loss*self.stop_multiplier: # Stop criteria
self.model.stop_training = True
self.iteration += 1
def on_train_end(self, logs=None):
if self.reload_weights:
self.model.load_weights('tmp.hdf5')
plt.figure(figsize=(12, 6))
plt.plot(self.learning_rates[:len(self.losses)], self.losses)
plt.xlabel("Learning Rate")
plt.ylabel("Loss")
plt.xscale('log')
plt.show()
class WarmUpCosineDecayScheduler(keras.callbacks.Callback):
"""Cosine decay with warmup learning rate scheduler
"""
def __init__(self,
learning_rate_base,
global_step_init=0,
warmup_learning_rate=0.0,
warmup_epoch=0,
hold_base_rate_steps=0,
learning_rate_final=None,
stop_epoch=None,
verbose=0):
"""Constructor for cosine decay with warmup learning rate scheduler.
Arguments:
learning_rate_base {float} -- base learning rate.
total_steps {int} -- total number of training steps.
Keyword Arguments:
global_step_init {int} -- initial global step, e.g. from previous checkpoint.
warmup_learning_rate {float} -- initial learning rate for warm up. (default: {0.0})
warmup_steps {int} -- number of warmup steps. (default: {0})
hold_base_rate_steps {int} -- Optional number of steps to hold base learning rate
before decaying. (default: {0})
verbose {int} -- 0: quiet, 1: update messages. (default: {0})
"""
super(WarmUpCosineDecayScheduler, self).__init__()
self.learning_rate_base = learning_rate_base
self.global_step = global_step_init
self.warmup_learning_rate = warmup_learning_rate
self.warmup_epoch = warmup_epoch
self.hold_base_rate_steps = hold_base_rate_steps
self.learning_rates = []
self.verbose = verbose
self.stop_epoch = stop_epoch
self.learning_rate_final = learning_rate_final
self.epoch = 0
def on_epoch_begin(self, epoch, logs=None):
self.epoch = epoch
def on_batch_end(self, batch, logs=None):
self.global_step = self.global_step + 1
lr = K.get_value(self.model.optimizer.lr)
self.learning_rates.append(lr)
def on_batch_begin(self, batch, logs=None):
total_steps = int(
self.params['epochs'] * self.params['samples'] / self.params['batch_size'])
warmup_steps = int(
self.warmup_epoch * self.params['samples'] / self.params['batch_size'])
lr = self.cosine_decay_with_warmup(
global_step=self.global_step,
learning_rate_base=self.learning_rate_base,
total_steps=total_steps,
warmup_learning_rate=self.warmup_learning_rate,
warmup_steps=warmup_steps,
hold_base_rate_steps=self.hold_base_rate_steps)
if self.stop_epoch is not None and self.stop_epoch > 0 and self.epoch >= self.stop_epoch:
if self.learning_rate_final is not None:
K.set_value(self.model.optimizer.lr, self.learning_rate_final)
else:
self.learning_rate_final = lr
K.set_value(self.model.optimizer.lr, self.learning_rate_final)
else:
K.set_value(self.model.optimizer.lr, lr)
if self.verbose > 0:
print('\nBatch %05d: setting learning '
'rate to %s.' % (self.global_step + 1, lr))
def cosine_decay_with_warmup(self, global_step,
learning_rate_base,
total_steps,
warmup_learning_rate=0.0,
warmup_steps=0,
hold_base_rate_steps=0):
"""Cosine decay schedule with warm up period.
Cosine annealing learning rate as described in
Loshchilov and Hutter, SGDR: Stochastic Gradient Descent with Warm Restarts.
ICLR 2017. https://arxiv.org/abs/1608.03983
In this schedule, the learning rate grows linearly from warmup_learning_rate
to learning_rate_base for warmup_steps, then transitions to a cosine decay
schedule.
Arguments:
global_step {int} -- global step.
learning_rate_base {float} -- base learning rate.
total_steps {int} -- total number of training steps.
Keyword Arguments:
warmup_learning_rate {float} -- initial learning rate for warm up. (default: {0.0})
warmup_steps {int} -- number of warmup steps. (default: {0})
hold_base_rate_steps {int} -- Optional number of steps to hold base learning rate
before decaying. (default: {0})
Returns:
a float representing learning rate.
Raises:
ValueError: if warmup_learning_rate is larger than learning_rate_base,
or if warmup_steps is larger than total_steps.
"""
if total_steps < warmup_steps:
raise ValueError('total_steps must be larger or equal to '
'warmup_steps.')
learning_rate = 0.5 * learning_rate_base * (
1 + np.cos(
np.pi * (global_step - warmup_steps - hold_base_rate_steps) /
float(total_steps - warmup_steps - hold_base_rate_steps)
)
)
if hold_base_rate_steps > 0:
learning_rate = np.where(global_step > warmup_steps + hold_base_rate_steps,
learning_rate, learning_rate_base)
if warmup_steps > 0:
if learning_rate_base < warmup_learning_rate:
raise ValueError('learning_rate_base must be larger or equal to '
'warmup_learning_rate.')
slope = (learning_rate_base - warmup_learning_rate) / warmup_steps
warmup_rate = slope * global_step + warmup_learning_rate
learning_rate = np.where(global_step < warmup_steps, warmup_rate,
learning_rate)
return np.where(global_step > total_steps, 0.0, learning_rate)
|
from django.conf.urls import url, include
from django.contrib.auth.decorators import login_required
from ..pet.views import index, pet_view, pet_list, pet_edit, pet_delete, PetList, PetCreate
urlpatterns = [
url(r'^$', index, name="index"),
url(r'^new$', login_required(PetCreate.as_view()), name="new"),
url(r'^list$', login_required(PetList.as_view()), name="list"),
url(r'^edit/(?P<id>\d+)$', login_required(pet_edit), name="edit"),
url(r'^delete/(?P<id>\d+)', login_required(pet_delete), name="delete" )
] |
from mlpnn.Factories.LayerFactory import LayerFactory
from mlpnn.Functions.Sigmoid import Sigmoid
from mlpnn.Structure.MLPNN import MLPNN
class MLPNNFactory(object):
@staticmethod
def create(neurons_list, use_bias_node=False, training_strategy=MLPNN.ONLINE_TRAINING):
layers = list()
if use_bias_node:
neurons_list[0] += 1
for number_of_neurons in neurons_list:
layers.append(LayerFactory.create(number_of_neurons))
for index in range(len(neurons_list)):
if index == 0:
layers[index].connect_output(layers[index + 1])
elif index == len(neurons_list) - 1:
layers[index].connect_input(layers[index - 1])
else:
layers[index].connect_input(layers[index - 1])
layers[index].connect_output(layers[index + 1])
return MLPNN(layers, Sigmoid(), bias_node=use_bias_node, training=training_strategy)
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import unittest
class NewVisitorTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.quit()
def test_can_start_a_list_and_retrieve_it_later(self):
# check website
self.browser.get('http://localhost:8000')
# notice webiste 'SkinMate'
self.assertIn('SkinMate', self.browser.title)
header_text = self.browser.find_element_by_tag_name('h1').text
self.assertIn('Check UV', header_text)
# notice field to enter city name
inputbox = self.browser.find_element_by_id('id_new_item')
self.assertEqual(inputbox.get_attribute('placeholder'), 'Enter City Name')
# type "Schwerin" into text box
inputbox.send_keys('Schwerin')
# hit enter, the page updates and shows the current uv index
inputbox.send_keys(Keys.ENTER)
time.sleep(1)
table = self.browser.find_element_by_id('id_list_table')
rows = table.find_elements_by_tag_name('tr')
self.assertTrue(any(row.text == "Schwerin" for row in rows),
f'New City Not In Table. Content is {table.text}')
# generate unique url to rember city
# re-vist site and uv index is still there for "Schwerin"
# end
self.fail('Finish the test')
if __name__ == '__main__':
unittest.main(warnings='ignore')
|
#!/usr/bin/python3
def roman_to_int(roman_string):
if(type(roman_string) != str or roman_string is None):
return (0)
my_dict = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
v = ""
u = 0
res = 0
for i in range(len(roman_string)):
v = roman_string[i]
if (len(roman_string) != i + 1 and my_dict[roman_string[i + 1]] >
my_dict[v]):
u = my_dict[v]
res += my_dict[v]
res -= u
return (res)
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Account Invoice Extract',
'version': '1.0',
'category': 'Accounting/Accounting',
'summary': 'Extract data from invoice scans to fill them automatically',
'depends': ['account', 'iap', 'mail'],
'data': [
'security/ir.model.access.csv',
'data/account_invoice_extract_data.xml',
'data/config_parameter_endpoint.xml',
'data/extraction_status.xml',
'data/res_config_settings_views.xml',
'data/update_status_cron.xml',
],
'auto_install': True,
'qweb': [
'static/src/xml/invoice_extract_box.xml',
'static/src/xml/invoice_extract_button.xml',
],
'license': 'OEEL-1',
}
|
spells = 10
gandalf = [50, 40, 40, 10, 50, 10, 40, 50, 50, 50]
saruman = [45, 45, 25, 50, 25, 40, 10, 45, 10, 10]
gandalf_wins = 0
saruman_wins = 0
for i in range(spells):
if gandalf[i] > saruman[i]:
gandalf_wins += 1
if gandalf_wins == 3:
print(('Gandalf score: ' + str(gandalf_wins), 'Saruman score: ' + str(saruman_wins)))
print('Gandalf wins')
break
else:
saruman_wins = 0
print(('Gandalf score: ' + str(gandalf_wins), 'Saruman score: ' + str(saruman_wins)))
elif gandalf[i] < saruman[i]:
saruman_wins += 1
if saruman_wins == 3:
print(('Gandalf score: ' + str(gandalf_wins), 'Saruman score: ' + str(saruman_wins)))
print('Saruman wins')
break
else:
gandalf_wins = 0
print(('Gandalf score: ' + str(gandalf_wins), 'Saruman score: ' + str(saruman_wins)))
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('gogetthegood', views.goods, name='thegoods'),
path('happyhappyjoyjoy', views.joy, name='emotions'),
path('hey', views.response, name='callback'),
] |
from django.apps import AppConfig
class RisksConfig(AppConfig):
name = 'risks'
|
# -*- coding: utf-8 -*-
import fido.exceptions
import twisted.internet.error
import twisted.web.client
from bravado.fido_client import FidoClient
from bravado.fido_client import FidoFutureAdapter
from bravado.testing.integration_test import IntegrationTestsBaseClass
class TestServerFidoClient(IntegrationTestsBaseClass):
http_client_type = FidoClient
http_future_adapter_type = FidoFutureAdapter
connection_errors_exceptions = {
fido.exceptions.TCPConnectionError(),
twisted.internet.error.ConnectingCancelledError('address'),
twisted.internet.error.DNSLookupError(),
twisted.web.client.RequestNotSent(),
}
|
from __future__ import division
#!/usr/bin/python
__author__ = "Stephen Li"
__email__ = "stephen.liziyun at gmail dot com"
import sys
import random
import math
# define a globle counting for comparisons
count = 0
def QuickSort(array):
length = len(array)
if length == 0:
return array
elif length == 1:
return array # no need to sort single-element array
else:
global count
random.seed() # reset random seed, may be redundant
# pivot_idx = random.randint(0, (length-1)) # totally randomized
pivot_idx = 0 # always use the first element
# pivot_idx = length-1 # always use the last element
# Median of 3
# middle_idx = (int)(math.ceil(length / 2)) - 1
# pivot_idx = array.index(Median(array[0], array[length-1], array[middle_idx]))
pivot = array[pivot_idx]
array, new_pivot_idx = Partition(array, pivot_idx, length)
left = QuickSort(array[:new_pivot_idx])
count = count + new_pivot_idx - 1
right = QuickSort(array[new_pivot_idx+1:])
count = count + length - new_pivot_idx
return left + [array[new_pivot_idx]] + right
def Partition(array, pivot_idx, length):
# swap pivot and first element, might be redundant
array[pivot_idx], array[0] = array[0], array[pivot_idx]
pivot = array[0]
i = 1
for j in xrange(i,length):
if array[j] < pivot:
# swap array[i] and array[j]
array[i], array[j] = array[j], array[i]
i = i + 1
# swap pivot(first element) and array[i-1]
array[0], array[i-1] = array[i-1], array[0]
# return array and pivot's new index
return (array, i-1)
def Median(a, b, c):
if a > b:
if c > b:
return min(a, c)
else:
return b
else:
if c > a:
return min(b, c)
else:
return a
def main():
args = sys.argv[1:]
if not args:
print 'usage: inputfile'
sys.exit(1)
input_array= []
f = open(args[0])
for line in f:
new_num = int(line)
input_array.append(new_num)
f.close
# print input_array
# print len(input_array)
print Median(1, 2, 3)
print Median(1, 3, 2)
print Median(2, 1, 3)
print Median(2, 3, 1)
print Median(3, 2, 1)
print Median(3, 1, 2)
sorted_array = QuickSort(input_array)
print sorted_array
print count
if __name__ == '__main__':
main() |
import argparse
import threading
import socket
import sys
import Pyro4
import Pyro4.naming
Pyro4.config.SERIALIZER = 'json'
class TestServer(object):
def __init__(self):
self._name = "TestServer"
@Pyro4.expose
@property
def name(self):
# print("name.getter")
return self._name
@Pyro4.expose
@name.setter
def name(self, value):
# print("name.setter: {}".format(value))
self._name = value
@Pyro4.expose
@Pyro4.oneway
def oneway_method(self, x):
return x
@Pyro4.expose
def cube(self, x):
"""
Cube argument
"""
return x**3
@Pyro4.expose
def echo(self, arg):
"""
Echo back argument
"""
return arg
@Pyro4.expose
def square(self, x):
"""
Square argument
args:
x (int/float): The argument to be square.
returns:
float: The result of the arguemnt squared
"""
return x**2
def parse_args(init_description):
parser = argparse.ArgumentParser(description=init_description)
parser.add_argument(
"--ns_host", "-nsn", dest='ns_host',
action='store', default='localhost',
help="Specify a host name for the Pyro name server. Default is localhost")
parser.add_argument(
"--ns_port", "-nsp", dest='ns_port',
action='store', default=9090, type=int,
help="Specify a port number for the Pyro name server. Default is 9090.")
return parser.parse_args()
def startNSloop(*args):
try:
return Pyro4.naming.startNSloop(*args)
except socket.error as err:
pass
if __name__ == '__main__':
parsed = parse_args("Start a basic server")
bs = TestServer()
ns_thread = threading.Thread(
target=startNSloop, args=(parsed.ns_host, parsed.ns_port))
ns_thread.daemon = True
ns_thread.start()
with Pyro4.Daemon(host='localhost', port=50001) as daemon:
server_uri = daemon.register(bs, objectId='TestServer')
with Pyro4.locateNS(port=parsed.ns_port, host=parsed.ns_host) as ns:
ns.register('TestServer', server_uri)
print("Firing up daemon")
sys.stdout.flush()
daemon.requestLoop()
|
#!/usr/bin/python
import sys
# Time Complexity: O(n)
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
class Value:
max_size = 0 #for size of largest BST
is_bst = False
#int min = Integer.MAX_VALUE; // For minimum value in right subtree
#int max = Integer.MIN_VALUE; // For maximum value in left subtree
minVal = -sys.maxint - 1 # -2147483648 For minimum value in right subtree
maxVal = sys.maxint #For maximum value in left subtree
# Returns size of the largest BST subtree in a Binary Tree
def largestBST(node):
val = Value()
largestBSTUtil(node, val, val, val, val)
return val.max_size
''' largestBSTUtil() updates *max_size_ref for the size of the largest BST
subtree. Also, if the tree rooted with node is non-empty and a BST,
then returns size of the tree. Otherwise returns 0.'''
def largestBSTUtil(node, min_ref, max_ref, max_size_ref, is_bst_ref):
if node is None:
is_bst_ref.is_bst = True # An empty tree is BST
return 0 # Size of the BST is 0
minVal = sys.maxint #2147483647 #Integer.MAX_VALUE;
''' A flag variable for left subtree property
i.e., max(root->left) < root->data'''
left_flag = False
''' A flag variable for right subtree property
i.e., min(root->right) > root->data'''
right_flag = False
ls = rs = 0 # To store sizes of left and right subtrees
''' Following tasks are done by recursive call for left subtree
a) Get the maximum value in left subtree (Stored in *max_ref)
b) Check whether Left Subtree is BST or not (Stored in *is_bst_ref)
c) Get the size of maximum size BST in left subtree (updates *max_size)'''
max_ref.maxVal = -sys.maxint - 1 # Integer.MIN_VALUE;
ls = largestBSTUtil(node.left, min_ref, max_ref, max_size_ref, is_bst_ref)
if (is_bst_ref.is_bst is True and node.data > max_ref.maxVal):
left_flag = True
''' Before updating *min_ref, store the min value in left subtree. So that we
have the correct minimum value for this subtree '''
minVal = min_ref.minVal
''' The following recursive call does similar (similar to left subtree)
task for right subtree '''
min_ref.minVal = sys.maxint #Integer.MAX_VALUE;
rs = largestBSTUtil(node.right, min_ref, max_ref, max_size_ref, is_bst_ref)
if (is_bst_ref.is_bst is True and node.data < min_ref.minVal):
right_flag = True
# Update min and max values for the parent recursive calls
if (minVal < min_ref.minVal):
min_ref.minVal = minVal
if (node.data < min_ref.minVal):# // For leaf nodes
min_ref.minVal = node.data
if (node.data > max_ref.maxVal):
max_ref.maxVal = node.data
# If both left and right subtrees are BST. And left and right
# subtree properties hold for this node, then this tree is BST.
# So return the size of this tree
if (left_flag and right_flag):
if (ls + rs + 1 > max_size_ref.max_size):
max_size_ref.max_size = ls + rs + 1
return ls + rs + 1;
else:
# Since this subtree is not BST, set is_bst flag for parent calls
is_bst_ref.is_bst = False
return 0
''' public static void main(String[] args) {
/* Let us construct the following Tree
50
/ \
10 60
/ \ / \
5 20 55 70
/ / \
45 65 80
'''
root = Node(50)
root.left = Node(10);
root.right = Node(60);
root.left.left = Node(5);
root.left.right = Node(20);
root.right.left = Node(55);
root.right.left.left = Node(45);
root.right.right = Node(70);
root.right.right.left = Node(65);
root.right.right.right = Node(80);
''' /* The complete tree is not BST as 45 is in right subtree of 50.
The following subtree is the largest BST
60
/ \
55 70
/ / \
45 65 80
'''
print "Size of largest BST is ", largestBST(root)
root = Node(60);
root.left = Node(65);
root.right = Node(70);
root.left.left = Node(50);
print " Size of the largest BST is ", largestBST(root)
|
from django.core.management.base import BaseCommand, CommandError
from yoolotto.coin.models import CoinTicketTransaction, CoinSubmissionTransaction
from yoolotto.lottery.models import LotteryTicket
from yoolotto.user.models import YooLottoUser
class Command(BaseCommand):
def handle(self, *args, **options):
for coin_ticket in CoinTicketTransaction.objects.all():
transaction = coin_ticket.transaction
coin_ticket.delete()
transaction.delete()
for coin_submission in CoinSubmissionTransaction.objects.all():
transaction = coin_submission.transaction
coin_submission.delete()
transaction.delete()
for ticket in LotteryTicket.objects.all().order_by("user__id"):
ticket.update_coins(save=True) |
import discord
from discord.ext import commands
from discord.utils import get
import asyncio
import enchant
import json
import string
import requests
class AviKiller(commands.Cog):
def __init__(self, bot):
self.bot = bot
async def vegetable(self, user, channel):
vegetable_role = get(user.guild.roles, name='VEGETABLE')
await channel.send(f"{user.mention} has been vegetableised for 5 mins!")
await asyncio.sleep(300) # CHANGE TO 300 LATER
await user.remove_roles(vegetable_role)
await channel.send(f"{user.mention} has been unvegetableised!")
def check_URL(self, string_to_check):
try:
res = requests.get(string_to_check)
return True
except:
return False
def check_sentence(self, sentence):
d = enchant.DictWithPWL("en_GB", "slang.txt")
if self.check_URL(sentence) == True: return False
sentence = sentence.translate(str.maketrans('', '', string.punctuation))
words = sentence.split(" ")
good_words = []
for word in words:
try:
is_word = d.check(word)
if is_word: good_words.append(True)
else: good_words.append(False)
except: pass
valid_words = sum(good_words)
percentage = (valid_words / len(words)) * 100
if percentage <= 50:
return True
else: return False
async def spam(self, message):
channel = message.channel
messages = await channel.history(limit=10).flatten()
occurences = []
for i in messages:
if i.author == message.author and message.author != self.bot.user and len(i.attachments) < 1 and i.content[0].isalpha() == True:
occurences.append(i.content)
offences = []
for i in occurences:
is_spam = self.check_sentence(i)
if is_spam:
offences.append(i)
if len(i) == 1:
offences.append(i)
if len(offences) >= 5:
await self.vegetable(message.author, channel)
def setup(bot):
n = AviKiller(bot)
bot.add_listener(n.spam, "on_message")
bot.add_cog(n) |
#!/usr/bin/env python
"""
_GetAvailableFilesByRun_
Oracle implementation of Subscription.GetAvailableFilesByRun
"""
from WMCore.WMBS.MySQL.Subscriptions.GetAvailableFilesByRun import \
GetAvailableFilesByRun as GetAvailableFilesByRunMySQL
class GetAvailableFilesByRun(GetAvailableFilesByRunMySQL):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.