text stringlengths 8 6.05M |
|---|
# Plural And Single
# Works But not as well...Cactus = Cactuses instead of Cacti
# By Efrain
import inflect
p = inflect.engine()
while True:
plu = input("Enter A Plural Or Singular Word: ")
print("The Plural/Singular Of ", plu, " Is ", p.plural(plu))
"""
print("The singular of ", plu, " is ", p.singular_noun(plu))
""" |
import time
import os.path as osp
import numpy as np
from tqdm import tqdm
import torch
from torch import nn
import torch.nn.functional as F
import torch.nn.init as init
from torch.optim import SGD, Adam
from torch.optim.lr_scheduler import CosineAnnealingWarmRestarts, CosineAnnealingLR, ReduceLROnPlateau
def run_training(model, trainloader, validloader, epochs, optimizer, optimizer_params, scheduler, scheduler_params, loss_fn, early_stopping_steps, verbose, device, seed, weight_path):
optimizer = eval(optimizer)(model.parameters(), **optimizer_params)
scheduler = eval(scheduler)(optimizer, **scheduler_params)
early_step = 0
best_loss = np.inf
best_epoch = 0
start = time.time()
t = time.time() - start
for epoch in range(epochs):
train_loss = train_fn(model, optimizer, scheduler, loss_fn, trainloader, device)
valid_loss = valid_fn(model, loss_fn, validloader, device)
# scheduler step
if isinstance(scheduler, ReduceLROnPlateau):
scheduler.step(valid_loss)
elif isinstance(scheduler, CosineAnnealingLR):
scheduler.step()
elif isinstance(scheduler, CosineAnnealingWarmRestarts):
scheduler.step()
if epoch % verbose==0 or epoch==epoch_-1:
t = time.time() - start
print(f"EPOCH: {epoch}, train_loss: {train_loss}, valid_loss: {valid_loss}, time: {t}")
if valid_loss < best_loss:
best_loss = valid_loss
torch.save(model.state_dict(), osp.join( weight_path, f"seed_{seed}.pt") )
early_step = 0
best_epoch = epoch
elif early_stopping_steps != 0:
early_step += 1
if (early_step >= early_stopping_steps):
t = time.time() - start
print(f"early stopping in iteration {epoch}, : best itaration is {best_epoch}, valid loss is {best_loss}, time: {t}")
return best_loss
t = time.time() - start
print(f"training until max epoch {epochs}, : best itaration is {best_epoch}, valid loss is {best_loss}, time: {t}")
return best_loss
def train_fn(model, optimizer, scheduler, loss_fn, dataloader, device):
model.train()
final_loss = 0
s = time.time()
pbar = tqdm(enumerate(dataloader), total=len(dataloader))
for i, (images, targets) in pbar:
optimizer.zero_grad()
images = images.to(device)
targets = [ann.to(device) for ann in targets] # リストの各要素のテンソルをGPUへ
outputs = model(images)
loss_l, loss_c = loss_fn(outputs, targets)
loss = loss_l + loss_c
loss.backward()
nn.utils.clip_grad_value_(model.parameters(), clip_value=2.0)
optimizer.step()
final_loss += loss.item()
if i % 10 == 0:
description = f"train | iteration {i} | time {time.time() - s:.4f} | avg loss {final_loss / (i+1):.16f}"
pbar.set_description(description)
final_loss /= len(dataloader)
return final_loss
def valid_fn(model, loss_fn, dataloader, device):
model.eval()
final_loss = 0
s = time.time()
pbar = tqdm(enumerate(dataloader), total=len(dataloader))
with torch.no_grad():
for i, (images, targets) in pbar:
images = images.to(device)
targets = [ann.to(device) for ann in targets]
outputs = model(images)
loss_l, loss_c = loss_fn(outputs, targets)
loss = loss_l + loss_c
final_loss += loss.item()
if i % 10 == 0:
description = f"valid | iteration {i} | time {time.time() - s:.4f} | avg loss {final_loss / (i+1):.16f}"
pbar.set_description(description)
final_loss /= len(dataloader)
return final_loss
def inference_fn(model, dataloader, device): # need debug
model.eval()
preds = []
s = time.time()
pbar = tqdm(enumerate(dataloader), total=len(dataloader))
with torch.no_grad():
for i, images in pbar:
images = images.to(device)
outputs = model(images)
preds.append(outputs)
if i % 10 == 0:
description = f"iteration {i} | time {time.time() - s:.4f}"
pbar.set_description(description)
preds = np.concatenate(preds)
return preds |
from django.shortcuts import render,get_list_or_404,get_object_or_404
from django.contrib.auth import get_user_model
from residents.models import Lot,Community,Area,Street,Resident,ResidentLotThroughModel
from django.utils import timezone
from rest_framework import generics,status,viewsets
from rest_framework.views import APIView
from rest_framework.response import Response
from datetime import datetime
from rest_framework_jwt.settings import api_settings
from drf_yasg.utils import swagger_auto_schema
from api.serializer import resident
from rest_framework.permissions import AllowAny
from drf_yasg.openapi import Schema, TYPE_OBJECT, TYPE_STRING, TYPE_ARRAY
from rest_framework_jwt.serializers import (
JSONWebTokenSerializer, RefreshJSONWebTokenSerializer,
VerifyJSONWebTokenSerializer
)
from rest_framework_jwt.views import JSONWebTokenAPIView
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
from django.contrib.sites.shortcuts import get_current_site
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.utils.encoding import force_bytes, force_text
from django.core.mail import EmailMessage
from django.template import loader,Context
from rest_framework.decorators import action
from rest_framework_jwt.views import JSONWebTokenAPIView
# Create your views here.
class ObtainJSONWebToken(JSONWebTokenAPIView):
"""
API View that receives a POST with a user's username and password.
Returns a JSON Web Token that can be used for authenticated requests.
"""
serializer_class = resident.CustomJSONWebTokenSerializer
class ResidentViewSet(viewsets.GenericViewSet):
"""
Get Resident Model.
"""
@swagger_auto_schema(responses={200: resident.ResidentSerializer()})
def list(self, request):
queryset = Resident.objects.all()
r = get_object_or_404(queryset, user_id=request.user.id)
serializer = resident.ResidentSerializer(r,context = {'request': self.request})
return Response(serializer.data)
def update(self, request, pk=None):
queryset = Resident.objects.all()
r = get_object_or_404(queryset, user_id=pk)
serializer = resident.ResidentSerializer(r, data=request.data,context = {'request': self.request},partial=True)
if serializer.is_valid():
serializer.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@action(detail=False, methods=['post'],serializer_class=resident.defaultlotSerializer)
def defaultProperty(self, request):
queryset = Resident.objects.all()
r = get_object_or_404(queryset, user_id=request.user.id)
qs = Lot.objects.all()
if(request.data['default_lot'] != '0'):
req = get_object_or_404(qs, pk=request.data['default_lot'])
r.default_lot = req
else:
r.default_lot = None
r.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_200_OK)
@action(detail=False, methods=['post'],serializer_class=resident.RemoveLot)
def removeLot(self,request):
queryset = ResidentLotThroughModel.objects.all()
if(request.data['user_id'] != '0' and request.data['lot_id'] != '0'):
req = queryset.filter(resident_id = request.data['user_id'], lot_id = request.data['lot_id'])
req.delete()
rs = Resident.objects.get(pk=request.data['user_id'])
if rs.user.is_active == False and rs.lot.count() == 0:
rs.user.delete()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@action(detail=False, methods=['post'],serializer_class=resident.FamilyOrderSerializer)
def orderFamily(self, request):
qs = ResidentLotThroughModel.objects.all()
froms = get_object_or_404(qs, order=request.data['froms'],lot_id=request.data['lot'],resident_id=request.data['resident'])
to = get_object_or_404(qs, order=request.data['to'],lot_id=request.data['lot'],resident_id=request.data['resident_to'])
froms.order = request.data['to']
to.order = request.data['froms']
froms.save()
to.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_200_OK)
@action(detail=False, methods=['post'],serializer_class=resident.disablenotificationSerializer)
def disableNotification(self, request):
qs = ResidentLotThroughModel.objects.all()
r = get_object_or_404(qs, resident_id=request.user.resident.id,lot_id=request.data['lot'])
if(request.data['disable_notification'] == 'true'):
r.disable_notification = True
else:
r.disable_notification = False
r.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_200_OK)
class RequestViewSet(viewsets.ViewSet):
serializer_class = resident.RequestSerializer
permission_classes = [AllowAny]
@swagger_auto_schema(request_body=resident.RequestSerializer,responses={201: Schema(type=TYPE_OBJECT,request={'status':'success'})})
def create(self,request,*args, **kwargs):
serializer = self.serializer_class(data=request.data,context = {'request': self.request} )
if serializer.is_valid():
track = serializer.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class RequestFamilyViewSet(viewsets.ViewSet):
serializer_class = resident.RequestFamilySerializer
@swagger_auto_schema(request_body=resident.RequestFamilySerializer,responses={201: Schema(type=TYPE_OBJECT,request={'status':'success'})})
def create(self,request,*args, **kwargs):
serializer = self.serializer_class(data=request.data,context = {'request': self.request} )
if serializer.is_valid():
try:
user = get_user_model().objects.get(email=request.data['email'])
resident = Resident.objects.get(user_id=user.id)
resident_lot = ResidentLotThroughModel.objects.get(resident_id = resident.id, lot_id = request.data['lot'])
except (get_user_model().DoesNotExist, Resident.DoesNotExist ,ResidentLotThroughModel.DoesNotExist) as e:
track = serializer.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_201_CREATED)
return Response({'email':['user already exist']}, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class PasswordRecoveryViewSet(viewsets.ViewSet):
"""
Reset password endpoint.
"""
serializer_class = resident.PasswordRecoverySerializer
permission_classes = [AllowAny]
@swagger_auto_schema(request_body=resident.PasswordRecoverySerializer,responses={200: RefreshJSONWebTokenSerializer()})
def create(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
user = get_user_model().objects.get(email=request.data['email'])
if(user.is_active == True):
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
current_site = get_current_site(request)
mail_subject = 'Account Password Recovery'
message = loader.get_template(
'emails/forgotPassword.html').render(
{
'name': user.first_name+' '+user.last_name,
'domain': current_site.domain,
'uid':urlsafe_base64_encode(force_bytes(user.pk)),
'token':PasswordResetTokenGenerator().make_token(user),
}
)
to_email = user.email
email = EmailMessage(
mail_subject, message, to=[to_email]
)
email.content_subtype = 'html'
email.send()
response_data = {
"token": token,
}
return Response(response_data, status=status.HTTP_200_OK)
else:
error = dict()
error['email'] = 'Invalid email address.'
return Response(error, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ChangePasswordViewSet(viewsets.ViewSet):
serializer_class = resident.ChangePasswordSerializer
def create(self,request):
serializer = self.serializer_class(data=request.data,context = {'request': self.request})
if serializer.is_valid():
user = request.user
user.set_password(request.data['new_password'])
user.save()
response_data = {'status':'success'}
return Response(response_data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class PropertyViewSet(viewsets.GenericViewSet):
permission_classes = [AllowAny]
def get_queryset(self):
return Community.objects.all()
@action(detail=False, methods=['get'],permission_classes=[AllowAny],serializer_class=resident.CommunitySerializer)
def getcommunity(self,request):
queryset = Community.objects.all()
serializer = resident.CommunitySerializer(queryset,context={'request': request},many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=['get'],permission_classes=[AllowAny],serializer_class=resident.AreaSerializer)
def getarea(self,request):
queryset = Area.objects.all()
c_id = self.request.query_params.get('id', None)
area = get_list_or_404(queryset,community_id = c_id )
serializer = resident.AreaSerializer(area,context={'request': request},many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=['get'],permission_classes=[AllowAny],serializer_class=resident.StreetSerializer)
def getstreet(self,request):
queryset = Street.objects.all()
c_id = self.request.query_params.get('id', None)
area = get_list_or_404(queryset,area_id = c_id )
serializer = resident.StreetSerializer(area,context={'request': request},many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=['get'],permission_classes=[AllowAny],serializer_class=resident.LotzSerializer)
def getlot(self,request):
queryset = Lot.objects.all()
c_id = self.request.query_params.get('id', None)
area = get_list_or_404(queryset,street_id = c_id )
serializer = resident.LotzSerializer(area,context={'request': request},many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
custom_obtain_jwt_token = ObtainJSONWebToken.as_view() |
#create species or character class
import json
def create_log(type):
try:
fname=type+".json"
with open(fname) as f:
log=json.load(f)
except:
log={}
query="Enter "+type+" : "
c= input(query)
#tests if type already exists
if c.upper() not in log.keys():
log[c.upper()] = {}
atts=log[c.upper()]
if type.upper()=="SPECIES":
att_list={"Description":"string",
"Max Age":"int", "Max Health":"int",
"Max Damage":"int","Max Dexterity":"int",
"Hit %":"int","Block %":"int","Natural Fighter":"yn"}
elif type.upper()=="CLASSES":
att_list={"Description":"string",
"Health Bonus (+/-)":"int",
"Damage Bonus (+/-)":"int","Dexterity Bonus (+/-)":"int",
"Warrior":"yn","Magic User":"yn","Monster":"yn"}
#calls the adds attribute values func
for key in att_list:
print(att_list,att_list[key])
atts=addval(key,atts,att_list[key])
else:
print(type+" already exists")
savelog(fname,log)
return log
#collects and adds attribute and its
#value to a class or species dictionary
def addval(x,log,fmt):
status=0
while status==0:
val=input("Enter {}: ".format(x))
status=testtype(val,fmt)
if x not in log.keys():
log[x]=val
else:
print("Error already exists")
return log
#test if the user input is of valid type
def testtype(v,f):
if f =="int":
try:
v=int(v)
return 1
except:
print("\nNumber Must Be an Integer\n")
return 0
if f=="yn":
if v.upper()==("Y"or"N"or"YES"or"NO"):
return 1
else:
print("\nEnter Y or N ONLY\n")
return 0
if f=="string":
return 1
#saves new class or species to a json file
def savelog(file,data):
with open(file, "w") as save_file:
json.dump(data,save_file)
log=create_log("Species")
print(log)
|
from django.urls import path
from .views import *
from . import views
urlpatterns = [
path('', IndexView.as_view(), name='home'),
path('shop/', ShopView.as_view(), name='shop'),
path('book_single/', BookSingleView.as_view(), name='book_single'),
path('create/', BookCreate.as_view(), name='book_create'),
path('update/edit/<int:id>/', views.edit, name='book_edit'),
path("create_done/", views.create_done, name='create_done'),
path("edit_done/", views.edit_done, name='edit_done'),
path("update/", BookUpdate.as_view(), name='book_update'),
path('delete/<int:id>/', views.delete, name='book_delete')
]
|
# dungeon crawler game for me and my friends
from tkinter import *
from weapon import *
from player import *
from goblin import *
from Boss import *
import time as time
class Game():
pass
root = Tk()
root.title('Mictlan')
p = Player(10, "None")
root.geometry('1690x1120')
frame = Frame(root)
frame.pack(side=BOTTOM)
labelfont = ('times', 10, 'bold')
labelfont2 = ('times', 12)
wep = Text(root, height=2, width=18, font=labelfont)
health = Text(root, height=2, width=15, font=labelfont)
dialog = Text(frame, height=2, width=82, font=labelfont2)
wep.pack(side=TOP, anchor=W)
health.pack(side=TOP, anchor=W)
dialog.pack(side=TOP)
canvas = Canvas(root, width=1152, height=648)
canvas.pack()
img = PhotoImage(file="swords.png")
img2 = PhotoImage(file="cave.png")
img3= PhotoImage(file="goblin.png")
img4 = PhotoImage(file="goblin_dead.png")
img5 = PhotoImage(file="fight.png")
img6 = PhotoImage(file="dead.png")
img7 = PhotoImage(file="complete.png")
img8 = PhotoImage(file="died.png")
icon = PhotoImage(file="icon.png")
root.iconphoto(False, icon)
picture = canvas.create_image(20, 20, anchor=NW, image=img)
class Start(Game):
def begin(self):
start = Button(frame, text="Start", font=labelfont, bg="grey", fg="black", height=10, width=30,
command= lambda: self.choose_weapon(start))
dialog.insert(INSERT, "Welcome to my game. Get started by pressing the start button then choose your weapon.")
start.pack()
wep.insert(INSERT, 'Weapon: ' + p.get_weapon())
health.insert(INSERT, p.get_HP())
root.mainloop()
def choose_weapon(self, start):
dagger = Button(frame, text="Dagger", font=labelfont, bg="grey", fg="red", height=10, width=30,
command=lambda: [p.set_weapon("Dagger"), p.set_damage("Dagger"),
self.entrance(), dagger.pack_forget(), longsword.pack_forget(),
scimitar.pack_forget()])
longsword = Button(frame, text="Longsword", font=labelfont, bg="grey", fg="green", height=10, width=30,
command=lambda:[p.set_weapon("Longsword"), p.set_damage("Longsword"),
self.entrance(), dagger.pack_forget(), longsword.pack_forget(), scimitar.pack_forget()])
scimitar = Button(frame, text="Scimitar", font=labelfont, bg="grey", fg="blue", height=10, width=30,
command=lambda: [p.set_weapon("Scimitar"), p.set_damage("Scimitar"),
self.entrance(), dagger.pack_forget(), longsword.pack_forget(), scimitar.pack_forget()])
dialog.delete(1.0, END)
dialog.insert(INSERT, "Choose your weapon. Choose between a dagger, longsword, or scimitar.")
start.pack_forget()
dagger.pack(side=LEFT)
longsword.pack(side=LEFT)
scimitar.pack(side=LEFT)
def entrance(self):
picture = canvas.create_image(20, 20, anchor=NW, image=img2)
labelfont = ('times', 10, 'bold')
wep.delete(1.0, END)
wep.insert(INSERT, "Weapon: " + p.get_weapon())
enter = Button(frame, text="ENTER", font=labelfont, bg="grey", fg="yellow", height=10, width=30,
command=lambda: self.goblin_fight(enter))
dialog.delete(1.0, END)
dialog.insert(INSERT, "Dangerous beings dwell in this cave. DO you dare to find out who they are?")
enter.pack()
def goblin_fight(self, enter):
picture = canvas.create_image(20, 20, anchor=NW, image=img3)
g1 = Goblin()
g2 = Goblin()
attack = Button(frame, text="ATTACK", font=labelfont, bg="grey", fg="red", height=10, width=30,
command=lambda: [p.set_HP(p.get_HP() - 1), health.delete(1.0, END),
health.insert(INSERT, p.get_HP()),
g1.set_HP(g1.get_HP() - p.get_damage()), goblin_health.delete(1.0, END),
goblin_health.insert(INSERT, "Goblin HP: " + str(g1.get_HP())),
root.after(1000, self.goblin_dead(g1, attack, run, goblin_health))])
run = Button(frame, text="RUN", font=labelfont, bg="grey", fg="blue", height=10, width=30,
command=lambda: [self.entrance(), attack.pack_forget(), run.pack_forget(), goblin_health.pack_forget()])
goblin_health = Text(root, height=2, width=20, font=labelfont)
goblin_health.pack(side=TOP, anchor=N)
goblin_health.insert(INSERT, "Goblin HP: " + str(g1.get_HP()))
enter.pack_forget()
attack.pack(side=LEFT)
run.pack(side=LEFT)
dialog.delete(1.0, END)
dialog.insert(INSERT, "You enter the cave and encounter a Goblin! ")
def goblin_dead(self, goblin, button, button2, entry):
picture = canvas.create_image(20, 20, anchor=NW, image=img4)
gob_HP = goblin.get_HP()
if(gob_HP < 1):
dialog.delete(1.0, END)
dialog.insert(INSERT, "You managed to kill the goblin! Continue down the cave... or turn back.")
time.sleep(1)
button.pack_forget()
button2.pack_forget()
walk = Button(frame, text="WALK", font=labelfont, bg="grey", fg="blue", height=10, width=30,
command=lambda: [self.boss(), walk.pack_forget(), runs.pack_forget(), entry.pack_forget()])
runs = Button(frame, text="RUN", font=labelfont, bg="grey", fg="blue", height=10, width=30,
command=lambda: [self.entrance(), walk.pack_forget(), runs.pack_forget(), entry.pack_forget()])
walk.pack(side=LEFT)
runs.pack(side=LEFT)
def boss(self):
picture = canvas.create_image(20, 20, anchor=NW, image=img5)
boss = Boss()
attack = Button(frame, text="ATTACK", font=labelfont, bg="grey", fg="red", height=10, width=30,
command=lambda: [p.set_HP(p.get_HP()-boss.get_damage()), health.delete(1.0, END), health.insert(INSERT, p.get_HP()),
boss_health.delete(1.0, END), boss.set_HP(boss.get_HP()-p.get_damage()),
boss_health.insert(INSERT, "Toby's HP: " + str(boss.get_HP())), self.end(boss, attack, run, potion, boss_health)])
run = Button(frame, text="RUN", font=labelfont, bg="grey", fg="blue", height=10, width=30,
command=lambda: [self.entrance(), attack.pack_forget(), potion.pack_forget(), boss_health.pack_forget(), run.pack_forget()])
potion = Button(frame, text="DRINK POTION", font=labelfont, bg="grey", fg="yellow", height=10, width=30,
command=lambda: [self.potion(potion), health.delete(1.0, END), health.insert(INSERT, p.get_HP())])
boss_health = Text(root, height=2, width=20, font=labelfont)
boss_health.pack(side=TOP, anchor=N)
boss_health.insert(INSERT, "Toby's HP: " + str(boss.get_HP()))
dialog.delete(1.0, END)
dialog.insert(INSERT, "You encounter the boss Toby! FIGHT!")
attack.pack(side=LEFT)
run.pack(side=LEFT)
potion.pack(side=LEFT)
def potion(self, potion):
p.set_HP(p.get_HP()+5)
dialog.delete(1.0, END)
dialog.insert(INSERT, "You've healed yourself 5 hitpoints!")
potion.pack_forget()
def end(self, boss, button, button2, button3, entry):
if(boss.get_HP() < 1 and p.get_HP() > 0):
time.sleep(1)
dialog.delete(1.0, END)
picture = canvas.create_image(20, 20, anchor=NW, image=img6)
dialog.insert(INSERT, "The boss has been defeated! Please exit the cave.")
complete = Button(frame, text="PASS", bg="grey", fg="blue", height=10, width=30,
command=lambda: [self.leaving(), complete.pack_forget()])
complete.pack()
entry.pack_forget()
button.pack_forget()
button2.pack_forget()
button3.pack_forget()
elif(p.get_HP() < 1):
time.sleep(1)
picture = canvas.create_image(20, 20, anchor=NW, image=img8)
dialog.delete(1.0, END)
dialog.insert(INSERT, "You have died!")
died = Button(frame, text="END", bg="grey", fg="blue", height=10, width=30,
command=lambda: root.destroy())
died.pack()
entry.pack_forget()
button.pack_forget()
button2.pack_forget()
button3.pack_forget()
def leaving(self):
time.sleep(1)
picture = canvas.create_image(20, 20, anchor=NW, image=img7)
leave = Button(frame, text="LEAVE", bg="grey", fg="green", height=10, width=30, command=lambda: root.destroy())
leave.pack()
if __name__ == '__main__':
game = Start()
game.begin() |
#Wil Collins
#Part 3
import random
number = int(input("Please enter a number between 10 and 10,000: "))
while number<10 or number > 10000 :
number = int(input("That was an incorrect number please try again. "))
count = 1
wins = 0
percent = wins/count
for number in range(0,number+1,):
person = random.randint(1,3)
car = random.randint(1,3)
#all win on stay
if car== 1 and person ==1 :
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
count+=1
#loss
elif snap.lower() == "stay" :
count+=1
wins+=1
if car== 2 and person ==2 :
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
count+=1
#loss
elif snap.lower() == "stay" :
count+=1
wins+=1
if car== 1 and person ==1 :
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
count+=1
#loss
elif snap.lower() == "stay" :
count+=1
wins+=1
#all wins on switch
elif car== 3 and person ==1:
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
person = random.randint(2,3)
if person == car:
win +=1
count+=1
else:
count+=1
elif snap.lower() == "stay" :
count+=1
elif car== 2 and person ==1:
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
person = random.randint(2,3)
if person == car:
win +=1
count+=1
else:
count+=1
elif snap.lower() == "stay" :
count+=1
elif car== 1 and person ==3:
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
person = random.randint(1,2)
if person == car:
win +=1
count+=1
else:
count+=1
elif snap.lower() == "stay" :
count+=1
elif car== 2 and person ==3:
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
person = random.randint(1,2)
if person == car:
win +=1
count+=1
else:
count+=1
elif snap.lower() == "stay" :
count+=1
elif car== 1 and person ==2:
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
num = [1,3]
person = random.choice(num)
if person == car:
win +=1
count+=1
else:
count+=1
elif snap.lower() == "stay" :
count+=1
elif car== 3 and person ==2:
snap = input("Do you want to always switch or stay always: ")
while snap.lower() != "stay" and snap.lower() != "switch":
snap = input("Please try again ")
if snap.lower() == "switch":
num = [1,3]
person = random.choice(num)
if person == car:
win +=1
count+=1
else:
count+=1
elif snap.lower() == "stay" :
count+=1
print("Player won ",wins,"/",count," games", format(percent, '0.2f'),"%")
|
import enum
import pathlib
from typing import Any, BinaryIO, Dict, List, Optional, Tuple, Union
from torchdata.datapipes.iter import CSVDictParser, Demultiplexer, Filter, IterDataPipe, IterKeyZipper, Mapper
from torchvision.prototype.datasets.utils import Dataset, EncodedImage, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
getitem,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
path_accessor,
path_comparator,
read_categories_file,
)
from torchvision.prototype.tv_tensors import Label
from .._api import register_dataset, register_info
NAME = "oxford-iiit-pet"
class OxfordIIITPetDemux(enum.IntEnum):
SPLIT_AND_CLASSIFICATION = 0
SEGMENTATIONS = 1
@register_info(NAME)
def _info() -> Dict[str, Any]:
return dict(categories=read_categories_file(NAME))
@register_dataset(NAME)
class OxfordIIITPet(Dataset):
"""Oxford IIIT Pet Dataset
homepage="https://www.robots.ox.ac.uk/~vgg/data/pets/",
"""
def __init__(
self, root: Union[str, pathlib.Path], *, split: str = "trainval", skip_integrity_check: bool = False
) -> None:
self._split = self._verify_str_arg(split, "split", {"trainval", "test"})
self._categories = _info()["categories"]
super().__init__(root, skip_integrity_check=skip_integrity_check)
def _resources(self) -> List[OnlineResource]:
images = HttpResource(
"https://www.robots.ox.ac.uk/~vgg/data/pets/data/images.tar.gz",
sha256="67195c5e1c01f1ab5f9b6a5d22b8c27a580d896ece458917e61d459337fa318d",
preprocess="decompress",
)
anns = HttpResource(
"https://www.robots.ox.ac.uk/~vgg/data/pets/data/annotations.tar.gz",
sha256="52425fb6de5c424942b7626b428656fcbd798db970a937df61750c0f1d358e91",
preprocess="decompress",
)
return [images, anns]
def _classify_anns(self, data: Tuple[str, Any]) -> Optional[int]:
return {
"annotations": OxfordIIITPetDemux.SPLIT_AND_CLASSIFICATION,
"trimaps": OxfordIIITPetDemux.SEGMENTATIONS,
}.get(pathlib.Path(data[0]).parent.name)
def _filter_images(self, data: Tuple[str, Any]) -> bool:
return pathlib.Path(data[0]).suffix == ".jpg"
def _filter_segmentations(self, data: Tuple[str, Any]) -> bool:
return not pathlib.Path(data[0]).name.startswith(".")
def _prepare_sample(
self, data: Tuple[Tuple[Dict[str, str], Tuple[str, BinaryIO]], Tuple[str, BinaryIO]]
) -> Dict[str, Any]:
ann_data, image_data = data
classification_data, segmentation_data = ann_data
segmentation_path, segmentation_buffer = segmentation_data
image_path, image_buffer = image_data
return dict(
label=Label(int(classification_data["label"]) - 1, categories=self._categories),
species="cat" if classification_data["species"] == "1" else "dog",
segmentation_path=segmentation_path,
segmentation=EncodedImage.from_file(segmentation_buffer),
image_path=image_path,
image=EncodedImage.from_file(image_buffer),
)
def _datapipe(self, resource_dps: List[IterDataPipe]) -> IterDataPipe[Dict[str, Any]]:
images_dp, anns_dp = resource_dps
images_dp = Filter(images_dp, self._filter_images)
split_and_classification_dp, segmentations_dp = Demultiplexer(
anns_dp,
2,
self._classify_anns,
drop_none=True,
buffer_size=INFINITE_BUFFER_SIZE,
)
split_and_classification_dp = Filter(split_and_classification_dp, path_comparator("name", f"{self._split}.txt"))
split_and_classification_dp = CSVDictParser(
split_and_classification_dp, fieldnames=("image_id", "label", "species"), delimiter=" "
)
split_and_classification_dp = hint_shuffling(split_and_classification_dp)
split_and_classification_dp = hint_sharding(split_and_classification_dp)
segmentations_dp = Filter(segmentations_dp, self._filter_segmentations)
anns_dp = IterKeyZipper(
split_and_classification_dp,
segmentations_dp,
key_fn=getitem("image_id"),
ref_key_fn=path_accessor("stem"),
buffer_size=INFINITE_BUFFER_SIZE,
)
dp = IterKeyZipper(
anns_dp,
images_dp,
key_fn=getitem(0, "image_id"),
ref_key_fn=path_accessor("stem"),
buffer_size=INFINITE_BUFFER_SIZE,
)
return Mapper(dp, self._prepare_sample)
def _filter_split_and_classification_anns(self, data: Tuple[str, Any]) -> bool:
return self._classify_anns(data) == OxfordIIITPetDemux.SPLIT_AND_CLASSIFICATION
def _generate_categories(self) -> List[str]:
resources = self._resources()
dp = resources[1].load(self._root)
dp = Filter(dp, self._filter_split_and_classification_anns)
dp = Filter(dp, path_comparator("name", "trainval.txt"))
dp = CSVDictParser(dp, fieldnames=("image_id", "label"), delimiter=" ")
raw_categories_and_labels = {(data["image_id"].rsplit("_", 1)[0], data["label"]) for data in dp}
raw_categories, _ = zip(
*sorted(raw_categories_and_labels, key=lambda raw_category_and_label: int(raw_category_and_label[1]))
)
return [" ".join(part.title() for part in raw_category.split("_")) for raw_category in raw_categories]
def __len__(self) -> int:
return 3_680 if self._split == "trainval" else 3_669
|
from array import*
arr=array('i',[])
n=int(input("enter the length"))
for i in range(5):
x= int(input("enter the next value"))
arr.append(x)
print(arr)
print(arr.index(x))
|
"""CLI functions for the db module."""
from flask import Flask, Blueprint, current_app
from flask.cli import with_appcontext
import click
from ..util.logging import get_logger
from .db import DB
# make sure all models are imported for CLI to work properly
from . import models # noqa
DB_CLI_BLP = Blueprint("db_cli", __name__, cli_group=None)
DB_CLI = DB_CLI_BLP.cli # expose as attribute for autodoc generation
DB_COMMAND_LOGGER = "db"
@DB_CLI.command("create-db")
@with_appcontext
def create_db():
"""Create all db tables."""
create_db_function(current_app)
click.echo("Database created.")
def create_db_function(app: Flask):
DB.create_all()
get_logger(app, DB_COMMAND_LOGGER).info("Database created.")
@DB_CLI.command("drop-db")
@with_appcontext
def drop_db():
"""Drop all db tables."""
drop_db_function(current_app)
click.echo("Database dropped.")
def drop_db_function(app: Flask):
DB.drop_all()
get_logger(app, DB_COMMAND_LOGGER).info("Dropped Database.")
def register_cli_blueprint(app: Flask):
"""Method to register the DB CLI blueprint."""
app.register_blueprint(DB_CLI_BLP)
app.logger.info("Registered blueprint.")
|
'''
最长回文子序列
dp数组的运用:
1) 涉及两个字符串/数组时(比如最长公共子序列),dp 数组的含义如下:
在子数组 arr1[0..i] 和子数组 arr2[0..j] 中,我们要求的子序列(最长公共子序列)长度为 dp[i][j]
2) 只涉及一个字符串/数组时(比如本文要讲的最长回文子序列),dp 数组的含义如下:
在子数组 array[i..j] 中,我们要求的子序列(最长回文子序列)的长度为 dp[i][j]
'''
from collections import defaultdict
import numpy as np
def longestPalindromeSubseq(s):
num = len(s)
dp = np.zeros((num, num))
for i in range(num):
dp[i][i] = 1
'''
# 这么写是错误的
for i in range(0,num-1):
for j in range(0,i+1):
if s[i] == s[j]:
dp[i][j] = dp[i+1][j-1] + 2
else:
dp[i][j] = max(dp[i+1][j], dp[i][j-1])
'''
# 反着遍历
for i in range(num-1, 0-1, -1):
for j in range(i+1,num):
if s[i] == s[j]:
dp[i][j] = dp[i+1][j-1] +2
else:
dp[i][j] = max(dp[i][j-1], dp[i+1][j])
print(dp)
# 返回 最长回文子串长度,即最右上角位置的值
return dp[0][num-1]
s = 'abccba'
longestPalindromeSubseq(s) |
import ConfigParser
from datetime import datetime
import time
import os
from flask import json
import requests
from weasyprint import HTML
file_path = os.path.dirname(os.path.realpath(__file__)) + "/"
REPORT_DURATION = 1.8e+6 # 30 minutes
config = ConfigParser.ConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/../../server.cfg')
cps_address = config.get('CPS', 'address')
def get_incidents():
query_object = {
"completeTime": {
"after": int(time.time() * 1000 - REPORT_DURATION),
"allowIncomplete": True
},
"isLatest": True
}
post_package = {
"query": query_object,
"operator": {
"username": "operator5",
"password": "1234"
}
}
r = requests.post(cps_address + "request", data=json.dumps(post_package))
incidents = json.loads(r.text)
# print "receive incidents:", incidents
return incidents
def get_content():
incidents = get_incidents()['data']
type_map = {}
for incident in incidents:
type_name = incident['type']
if type_name not in type_map.keys():
type_map[type_name] = []
type_map[type_name].append(incident)
output_html = "<h1>Incidents Statistics</h1>"
for type_name, incident_list in type_map.items():
output_html += "<div><h3>"
output_html += type_name
output_html += "</h3><br/> Total: " + str(len(incident_list))
output_html += "<br/>"
i = 0
for incident in incident_list:
i += 1
output_html += "> " + str(i)
location = ""
for item in incident['location']:
if item["type"] == "string":
location = item["location"]
output_html += "<br />"
output_html += " >>> Location: " + location
output_html += "<br />"
output_html += " >>> Remark: " + incident['remark']
output_html += "<br />"
output_html += "</div>"
return output_html
def generate_file():
f = open('report.html', 'w')
h = open('header.html', 'r')
for line in h:
f.write(line)
f.write(get_content())
h = open('footer.html', 'r')
for line in h:
f.write(line)
f.close()
def generate_report():
generate_file()
file_name = "report_" + str(datetime.now()) + ".pdf"
HTML("file://" + file_path + "report.html").write_pdf(file_path + file_name)
return file_name
if __name__ == "__main__":
report = generate_report()
print "Report generated at", report |
f1 = open('text.txt','r')
f2 = open('d:/myimages/mypicture1.jpg','rb')
f1.close()
f2.close()
|
import numpy as np
import random
def sorted_split(x, y, n_shards):
sorted_index = np.argsort(y)
x = x[sorted_index]
y = y[sorted_index]
shard_size = len(x) // n_shards
init_indice = np.arange(0, len(x), shard_size)
print(f"the number of shard : {len(init_indice)}")
x = np.array([x[s:s+shard_size] for s in init_indice])
y = np.array([y[s:s+shard_size] for s in init_indice])
return x, y
def random_split(x, y, n_shards, shuffle=True):
shard_size = len(x) // n_shards
index = np.arange(len(y))
if shuffle:
random.shuffle(index)
x = x[index]
y = y[index]
init_indice = np.arange(0, len(x), shard_size)
print(f"the number of shard : {len(init_indice)}")
x = np.array([x[s:s + shard_size] for s in init_indice])
y = np.array([y[s:s + shard_size] for s in init_indice])
return x, y |
from taiga.requestmaker import RequestMaker
from taiga.models import Role, Roles
import unittest
from mock import patch
class TestRoles(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_role(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = Role(rm)
sv = Roles(rm).create(1, 'RL 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'RL 1'}
)
|
from spack import *
from spack.util.environment import is_system_path
import os,re
class Stitched(CMakePackage):
homepage = "https://github.com/cms-sw/stitched.git"
url = "https://github.com/cms-sw/stitched.git"
version('master', git='https://github.com/cms-sw/Stitched.git',
branch="master")
resource(name='cmaketools', git='https://github.com/gartung/cmaketools.git',
placement="cmaketools")
depends_on('md5-cms')
depends_on('boost')
depends_on('python')
depends_on('py-pybind11', type=('link', 'run', 'test'))
depends_on('tinyxml2@6.2.0')
depends_on('root ~opengl cxxstd=17')
depends_on('fmt+pic cxxstd=17')
depends_on('xrootd')
depends_on('clhep@2.4.1.3')
depends_on('intel-tbb-oneapi')
depends_on('cppunit', type=('link', 'test'))
depends_on('xerces-c')
depends_on('catch2', type=('link', 'test'))
depends_on('googletest', type=('link', 'test'))
depends_on('benchmark@1.4.1', type=('link', 'test'))
def cmake_args(self):
cxxstd = self.spec['root'].variants['cxxstd'].value
args = ['-DCMakeTools_DIR=%s/cmaketools' % self.stage.source_path]
args.append('-DCLHEP_ROOT_DIR=%s' % self.spec['clhep'].prefix)
args.append('-DBOOST_ROOT=%s' % self.spec['boost'].prefix)
args.append('-DTBB_ROOT_DIR=%s' % self.spec['intel-oneapi-tbb'].prefix.tbb.latest)
args.append('-D_TBB_COMPILER=gcc4.8')
args.append('-DTINYXML2_ROOT_DIR=%s' % self.spec['tinyxml2'].prefix)
args.append('-DCMSMD5ROOT=%s' % self.spec['md5-cms'].prefix)
args.append('-DCMAKE_CXX_STANDARD=%s'% cxxstd)
args.append('-DXROOTD_INCLUDE_DIR=%s/xrootd' % self.spec['xrootd'].prefix.include)
args.append('-DCATCH2_INCLUDE_DIRS=%s/catch2' % self.spec['catch2'].prefix.include)
args.append('-DBUILDTEST=BOOL:True')
return args
def setup_build_environment(self, env):
# This hack is made necessary by a header name collision between
# md5-cms and libmd md5.h
# But dependencies without CMake defined includes need to be added back
def prepend_include_path(dep_name):
include_path = self.spec[dep_name].prefix.include
if not is_system_path(include_path):
env.prepend_path('SPACK_INCLUDE_DIRS', include_path)
prepend_include_path('md5-cms')
|
my_first_name = input("What is your name? ")
neigh_first_name = input("What is your neighbors name? ")
months_coding = input("How many months have you been coding? ")
neigh_months_coding = input("How many months has your neighbor been coding? ")
total_months_coded = int(months_coding) + int(neigh_months_coding)
print("I am " + my_first_name + " and my neighboor is " + neigh_months_coding)
print("Together we have been coding for " + str(total_months_coded) + " months!") |
#! /usr/bin/env python
"""
A node in the NLNOG ring.
"""
# ABOUT
# =====
# This file is part of:
#
# ringtools - A generic module for running commands on nodes of the NLNOG
# ring. More information about the ring: U{https://ring.nlnog.net}
#
# source code: U{https://github.com/NLNOG/py-ring}
#
# AUTHOR
# ======
# Teun Vink - teun@teun.tv
import threading, sys, Queue, os, time, socket
from paramiko import *
from exception import RingException
from result import NodeResult, NodeResultSet
DFLT_SSH_TIMEOUT = 20 # seconds
DFLT_FQDN = "ring.nlnog.net" # fqdn for nodes
DFLT_MAX_THREADS = 25 # number of concurrent threads
# ===========================================================================
class RingNode:
"""
A node of the NLNOG ring.
"""
STATE_DISCONNECTED = 0
STATE_CONNECTED = 1
STATE_AUTHENTICATED = 2
def __init__(self, hostname=None, username=None, ssh_client=None, ssh_agent=None, ssh_config=None, timeout=DFLT_SSH_TIMEOUT):
""" Create a new RingNode object.
@param hostname: the host in the ring to connect to
@type hostname: string
@param username: the username used when authenticating using SSH.
If no I{username} is specified the SSH configuration is checked.
@type username: string
@param ssh_client: a I{paramiko.SSHClient} object. If none is
specified a new one is created. Passing this as an argument is
useful when a lot of L{RingNode} objects are created: only one
SSHClient object is used then.
I{Not providing this parameter is no problem.}
@type ssh_client: paramiko.SSHClient
@param ssh_agent: a I{paramiko.Agent} object. If none is specified
a new agent object is created. Passing this as asn argument is useful
when a lot of L{RingNode} objects are created: only one SSHAgent object
is used.
I{Not providing this parameter is no problem.}
@type ssh_agent: paramiko.Agent
@param ssh_config: a I{paramiko.SSHConfig} object. If none is specified
a new object is created. Passing this as asn argument is useful
when a lot of L{RingNode} objects are created: only one SSHConfig object
is used.
I{Not providing this parameter is no problem.}
@type ssh_config: paramiko.SSHConfig
@param timeout: SSH timeout in seconds
@type timeout: integer
"""
self.hostname = hostname
self.username = username
self.ssh_client = ssh_client
self.ssh_config = ssh_config
self.timeout = timeout
self.state = RingNode.STATE_DISCONNECTED
self.stdin = None
self.stdout = None
self.stderr = None
if ssh_agent != None:
self.ssh_agent = ssh_agent
else:
self.ssh_agent = Agent()
def close(self):
""" Close the SSH connection to the node.
"""
if self.ssh_client != None:
self.ssh_client.close()
def connect(self, hostname=None, timeout=DFLT_SSH_TIMEOUT):
""" Open a SSH connection to the host. If ssh_client and ssh_config were not
specified when constructing the object they are created.
@param hostname: the name of the host to connect to (needed if not specified when making the object)
@type hostname: string
@param timeout: SSH timeout in seconds
@type timeout: integer
@raise RingException: when the connection failed
"""
if hostname == None and self.hostname == None:
# no idea what host to connect to
self.state = RingNode.STATE_DISCONNECTED
raise RingException('No host specified.')
elif hostname != None:
self.hostname = hostname
if self.ssh_client == None:
self.ssh_client = SSHClient()
if self.ssh_config == None:
self.ssh_config = SSHConfig()
self.ssh_config.parse(open(os.path.join(os.environ['HOME'], '.ssh', 'config'), 'r'))
if self.username == None:
self.username = self.ssh_config.lookup('%s.%s' % (hostname, DFLT_FQDN)).get('user', '')
self.ssh_client.set_missing_host_key_policy(AutoAddPolicy())
self.ssh_client.load_system_host_keys()
try:
self.ssh_client.connect(
hostname='%s.%s' % (self.hostname, DFLT_FQDN),
username=self.username,
allow_agent=True,
look_for_keys=True,
timeout=self.timeout)
self.state = RingNode.STATE_CONNECTED
return RingNode.STATE_CONNECTED
except BadHostKeyException, e:
self.state = RingNode.STATE_DISCONNECTED
raise RingException('Bad host key for %s.%s' % (self.hostname, DFLT_FQDN))
except SSHException, e:
self.state = RingNode.STATE_DISCONNECTED
raise RingException(e)
except socket.error, e:
self.state = RingNode.STATE_DISCONNECTED
raise RingException(e.__str__())
except socket.timeout, e:
self.state = RingNode.STATE_DISCONNECTED
raise RingException('Socket timeout.')
def authenticate(self):
""" Authenticate on the SSH session.
If the SSH agent provides more than on SSH-key all of the
keys are tried.
@raise RingException: if the authentication failed
"""
if self.state == RingNode.STATE_DISCONNECTED or self.ssh_client == None or self.ssh_agent == None:
connect()
transport = self.ssh_client.get_transport()
channel = transport.open_session()
for key in self.ssh_agent.get_keys():
try:
channel.auth_publickey(key)
break
except:
# wrong key, nothing to worry about since people can have
# multiple keys available in their agent
continue
try:
if transport.is_authenticated:
self.state = RingNode.STATE_AUTHENTICATED
return RingNode.STATE_AUTHENTICATED
else:
self.state = RingNode.STATE_CONNECTED
raise RingException('Failed to authenticate.')
except Exception, e:
self.state = RingNode.STATE_DISCONNECTED
raise RingException(e)
def run_command(self, command):
""" Execute a command using the SSH connection.
Create a connection and authenticate if not done yet.
@param command: the command to be executed
@type command: string
@return: object containing the exitcode,
output of stdout and stderr and additional data
@rtype NodeResult
"""
if self.state == RingNode.STATE_DISCONNECTED:
self.connect()
if self.state == RingNode.STATE_CONNECTED:
self.authenticate()
transport = self.ssh_client.get_transport()
channel = transport.open_session()
if transport.is_authenticated:
self.stdin = channel.makefile('wb')
self.stdout = channel.makefile('rb')
self.stderr = channel.makefile_stderr('rb')
self.state = RingNode.STATE_AUTHENTICATED
channel.exec_command(command)
result = NodeResult(
hostname = self.hostname,
ssh_result= NodeResult.SSH_OK,
exitcode = channel.recv_exit_status(),
stdout = [line.strip() for line in self.stdout],
stderr = [line.strip() for line in self.stderr])
return result
def get_state(self):
""" Return the state of the SSH connection.
return: state (STATE_DISCONNECTED = 0,
STATE_CONNECTED = 1, STATE_AUTHENTICATED = 2)
rtype integer
"""
return self.state
# ===========================================================================
class NodeCommandThread(threading.Thread):
''' a thread for processing commands to a node via SSH
'''
def __init__(self, queue, command, agent, timeout=DFLT_SSH_TIMEOUT, analyse=None):
""" Create a new NodeCommandThread object.
@param queue: a list of nodes on which the commands is to be executed
@type queue: list of strings
@param command: the command to be executed
@type command: string
@param agent: a I{paramiko.Agent} SSH-agent object.
@type agent: I{paramiko.Agent} object
@param timeout: the SSH timeout in seconds
@type timeout: integer
@param analyse: callback analyse function. This function is called after
the command has been executed. Argument for the function is a L{NodeResult} object.
@type analyse: function
"""
self.queue = queue
self.command = command
self.agent = agent
self.timeout = timeout
self.result = NodeResultSet()
self.analyse = analyse
threading.Thread.__init__(self)
def run(self):
""" Execution of the thread.
"""
# read default SSH config
ssh = SSHClient()
conf = SSHConfig()
# use the local SSH configuration for keys, known hosts, etc
conf.parse(open(os.path.join(os.environ['HOME'], '.ssh', 'config'), 'r'))
ssh.set_missing_host_key_policy(AutoAddPolicy())
ssh.load_system_host_keys()
# continue to process hosts until the queue is empty
while True:
try:
starttime = time.time()
# pick the next available host
host = self.queue.get()
result = NodeResult(host)
node = RingNode(host)
try:
# some template replacements
cmd = self.command.replace("%%HOST%%", host)
result = node.run_command(cmd)
except RingException, e:
result.set_ssh_result(NodeResult.SSH_ERROR)
result.set_ssh_errormsg(e.__str__())
finally:
node.close()
if self.analyse:
self.analyse(result)
result.add_value('runtime', time.time() - starttime)
self.result.append(result)
except Queue.Empty:
# we're done!
pass
finally:
self.queue.task_done()
def get_result(self):
""" Get the result of the execution of the command.
@return: L{NodeResult} object with all information
@rtype: NodeResult
"""
return self.result
|
EXCHANGE_RATE_POUND_TO_DOLLARS = 1.31
pounds = int(input())
pounds_to_dollars = pounds * EXCHANGE_RATE_POUND_TO_DOLLARS
print("{:.3f}".format(pounds_to_dollars))
|
# Date: 09/09/2020
# Author: rohith mulumudy
# Description: stores san domain data.
import json
class San:
def __init__(self, in_file="certs.json", san_file="sans.txt"):
self.in_file = in_file
self.san_file = san_file
def get_san_lst(self, san):
lst = []
temp = san.split(';')
for i in range(len(temp)):
if(temp[i].find(":")<0):
continue
if '*' not in temp[i]:
lst.append(temp[i].split(':')[1].lower())
else:
lst.append(temp[i].split(':')[1][2:].lower())
return lst
# Given round number, stores san from the given round certs
def store_sans(self):
with open(self.in_file) as fp:
data = json.load(fp)
with open(self.san_file,'w') as fp:
for i in range(len(data)):
san = data[i]['certificate']['san']
lst = self.get_san_lst(san)
for dmn in lst:
fp.write(dmn+'\n')
def edit_san_file(self):
san_set = set()
with open(self.san_file) as fp:
for line in fp:
san_set.add(line.strip())
with open(self.san_file,'w') as fp:
for dmn in san_set:
fp.write(dmn+'\n')
return san_set
def get_current_round_sans(self):
self.store_sans()
return self.edit_san_file() |
from enum import Enum, unique
import random
from statemachine import Machine
from . import messages as m
@unique
class Card(Enum):
"""Bonus Card."""
INFANTRY = 1
CAVALRY = 2
ARTILLERY = 3
class Bonus(Enum):
"""Trade in three cards for bonus troops."""
INFANTRY = ([Card.INFANTRY] * 3, 4)
CAVALRY = ([Card.CAVALRY] * 3, 6)
ARTILLERY = ([Card.ARTILLERY] * 3, 8)
MIXED = ([Card.INFANTRY, Card.CAVALRY, Card.ARTILLERY], 10)
class Player(object):
"""
Wrap a player identifier with additional information used during a game.
"""
def __init__(self, ident):
"""The player identifier."""
self.ident = ident
"""Number of countries this player owns."""
self.owned_countries = 0
"""Flag to check if the player may draw a card."""
self.conquered_country_in_turn = False
"""Troops the player may deploy in this turn."""
self.available_troops = 0
"""The player's bonus cards."""
self.cards = []
def __eq__(self, other):
return self.ident == other.ident
def __ne__(self, other):
return self.ident != other.ident
def __hash__(self):
return hash(self.ident)
class Action(object):
"""
Base class for actions that can be performed during a game.
This class models actions for a game and is supposed to be extended.
Each action must fulfill certain preconditions before being executed.
Use is_permitted() to check the preconditions for the action and
execute() to apply the action to the current game state.
An action instance is used during the whole game for every turn.
At the start of every turn, next_turn() is called is called with the
new current player. Before checks and execution, prepare() is called.
To simplify communicating the result or effects of an action,
answer() can be used to send messages to the players that are affected.
Attributes:
board, current_player, current_message, success
Methods:
prepare, is_permitted, execute, next_turn, answer
"""
def __init__(self, board):
self.board = board
self.current_player = None
self.current_message = None
def prepare(self, message):
"""Extend this method to store properties of the message."""
self.current_message = message
def is_permitted(self, _):
"""Extend this method to check if the action is allowed."""
# TODO: check if executing player is current player
return True
def execute(self, _):
"""Override this method to execute the action."""
pass
def next_turn(self, player):
"""Extend/ Override this method to prepare for the next turn."""
self.current_player = player
def answer(self, message, player=None):
"""
Store an answer to be sent to the given player.
If player is None, the current player is used as recipient.
"""
if player is None:
player = self.current_player
self.current_message.add_answer(player.ident, message)
@property
def success(self):
return self.current_message.success
@success.setter
def success(self, success):
self.current_message.success = success
class BonusAction(Action):
def prepare(self, message):
super().prepare(message)
self.bonus = None # TODO
def is_permitted(self, _):
player_cards = self.current_player.cards.copy()
bonus_cards, _ = self.bonus.value
# check if player has every card needed to redeem what he claims
for card in bonus_cards:
try:
player_cards.remove(card)
except ValueError:
# player wants to trade in card but does not have it
return False
# player has all cards
return True
def execute(self, _):
player = self.current_player
player_cards = player.cards
bonus_cards, value = self.bonus.value
player.available_troops += value
for card in bonus_cards:
player_cards.remove(card)
self.success = True
class DeployAction(Action):
def prepare(self, message):
super().prepare(message)
self.country = self.board.country_for_name(message.country)
self.troops = message.troops
def is_permitted(self, _):
return (self.current_player == self.country.owner
and self.current_player.available_troops >= self.troops)
def execute(self, _):
self.country.troops += self.troops
self.current_player.available_troops -= self.troops
self.success = True
class AttackAction(Action):
def prepare(self, message):
super().prepare(message)
self.origin = self.board.country_for_name(message.origin)
self.destination = self.board.country_for_name(message.destination)
self.attack_troops = message.attack_troops
def is_permitted(self, _):
return (self._current_player_is_owner(self.origin)
and not self._current_player_is_owner(self.destination)
# use > since one troop must remain on attacking country
and self.origin.troops > self.attack_troops
and self.attack_troops >= 1
and self.attack_troops <= 3)
def _current_player_is_owner(self, country):
return self.current_player == country.owner
def execute(self, _):
attacker = self.current_player
defender = self.destination.owner
attack_troops = self.attack_troops
defend_troops = min(self.attack_troops, self.destination.troops, 2)
attack_losses, defend_losses = self._fight_for_country(
attack_troops, defend_troops
)
self.origin.troops -= attack_losses
self.destination.troops -= defend_losses
if self.destination.troops == 0:
# defending country is conquered
attacker.owned_countries += 1
defender.owned_countries -= 1
self.destination.troops = attack_troops
self.destination.owner = attacker
attacker.conquered_country_in_turn = True
self.answer(m.Conquered(self.destination), attacker)
self.answer(m.Defeated(self.destination), defender)
else:
self.success = True
self.answer(m.Defended(self.destination, defend_losses), defender)
def _fight_for_country(self, attack_troops, defend_troops):
attack_dice = self._roll_dice(attack_troops)
defend_dice = self._roll_dice(defend_troops)
attack_losses = 0
defend_losses = 0
for (attack_score, defend_score) in zip(attack_dice, defend_dice):
if attack_score > defend_score: # attacker won
defend_losses += 1
else: # defender won
attack_losses += 1
return attack_losses, defend_losses
@staticmethod
def _roll_dice(n):
return [random.randint(1, 6) for _ in range(n)].sort(reverse=True)
class MoveAction(Action):
def prepare(self, message):
super().prepare(message)
self.origin = self.board.country_for_name(message.origin)
self.destination = self.board.country_for_name(message.destination)
self.troops = message.troops
def is_permitted(self, _):
return (self._current_player_is_owner(self.origin)
and self._current_player_is_owner(self.destination)
# use > since at least one troop must remain in origin country
and self.origin.troops > self.troops)
# TODO: do countries have to be neighbours/ connected?
def _current_player_is_owner(self, country):
return self.current_player == country.owner
def execute(self, _):
self.origin.troops -= self.troops
self.destination.troops += self.troops
self.success = True
class GetCardAction(Action):
def is_permitted(self, _):
return self.current_player.conquered_country_in_turn
def execute(self, _):
new_card = random.choice(list(Card))
self.current_player.cards.append(new_card)
self.success = True
class NextTurnAction(Action):
def __init__(self, board, players, actions):
super().__init__(board)
# contains all players except the current one
self.players = players[1:]
# player whose turn it is now
self.current_player = players[0]
self.actions = actions
def next_turn(self, player):
pass
def execute(self, _):
# rotate list with current player
self.players.append(self.current_player)
self.current_player = self.players[0]
self.players = self.players[1:]
# TODO: probably more logic to set up the next player's turn
player = self.current_player
player.conquered_country_in_turn = False
player.available_troops = player.available_troops // 3
for action in self.actions:
action.next_turn(player)
class Logic(object):
"""
Contains the Logic for a game of Risk.
Each turn in a game is a sequence of actions by the current
player. Each action is modelled by the Action class and used
here in a state machine that allows all possible actions only
if they are allowed according to the game's rules.
Attributes:
board, players, machine
Public Methods:
distribute_countries, is_ingame, kick
"""
def __init__(self, board, players):
""" Create a new Logic for the given board and players."""
"""Store the board of the game."""
self.board = board
"""Store all participating players"""
self.players = []
for ident in players:
self.players.append(Player(ident))
self.distribute_countries()
# State machine that checks if an action is
# allowed for the current player right now
# and if so performs the action.
# states
before_start = 'before_start' # before the first move
start_of_turn = 'start_of_turn' # at the start of a player's turn
got_bonus = 'got_bonus' # player traded cards for a bonus
deploying = 'deploying' # player is deploying troops
attacking = 'attacking' # player is attacking others
drew_card = 'drew_card' # player drew a card after attacking
moved = 'moved' # player moved troops
# actions
bonus = BonusAction(board)
deploy = DeployAction(board)
attack = AttackAction(board)
get_card = GetCardAction(board)
move = MoveAction(board)
actions = [bonus, deploy, attack, get_card, move]
next_turn = NextTurnAction(board, players, actions)
states = [before_start, start_of_turn, got_bonus,
deploying, attacking, moved, drew_card]
def make_transition(trigger, source, dest, action):
return {
'trigger': trigger,
'source': source,
'dest': dest,
'prepare': action.prepare,
'conditions': action.is_permitted,
'after': action.execute,
}
trans = [
make_transition('bonus', start_of_turn, got_bonus, bonus),
make_transition('deploy', [start_of_turn, got_bonus, deploying],
deploying, deploy),
make_transition('attack', [deploying, attacking],
attacking, attack),
make_transition('draw_card', attacking, drew_card, get_card),
make_transition('move', [deploying, attacking, drew_card],
moved, move),
make_transition('next_turn', [before_start, deploying, attacking,
drew_card, moved], start_of_turn, next_turn)
]
self.machine = Machine(
self,
states=states,
transitions=trans,
initial=before_start,
auto_transitions=False
)
def distribute_countries(self):
"""
Distribute the board's countries to the participating players.
Changes the owner attribute of the countries stored in the board.
"""
# TODO: is this a fair distribution?
num_players = len(self.players)
countries = self.board.countries_list()
random.shuffle(countries)
for i, country in enumerate(countries):
player = self.players[i % num_players]
country.owner = player
player.owned_countries += 1
def is_ingame(self, player):
"""Check if a player participates in the game."""
participants = self.players
if not isinstance(player, Player):
participants = map(lambda p: p.ident, participants)
return player in participants
def kick(self, player):
"""Kick a player from the game."""
# TODO: remove player from game, board, etc.
# TODO: unskip test
print('kick idiot', self.players, player)
|
import pyttsx3
import components.greet as greet
import components.commands as commands
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[0].id)
def speak(audio):
engine.say(audio)
engine.runAndWait()
if __name__ == "__main__":
greeting = greet.greetings()
speak(greeting)
while True:
commands.commands()
|
s = 'bobobobstqfbobbdobboobobobobbmb'
count = 0
index = int(0)
for char in s:
#print index
if char in ['b']:
index += 1
#print str(index) + ' 1'
for char in s[index]:
if char in ['o']:
index += 1
#print str(index) + ' 2'
for char in s[index]:
if char in ['b']:
count += 1
#print ' 3'
index = 0
else:
index = 0
index = 0
#print index
print "Number of times bob occurs is: " + str(count) |
def doc(func):
def wrap(request):
print(help(func))
return func(request)
return wrap
@doc
def hello(request):
"""
测试一下
:param request:
:return abc: 321
"""
return '123'
if __name__ == '__main__':
hello('ddd') |
#!/usr/bin/env python
# coding=utf-8
from pymongo import MongoClient
from app import HOST, PORT, DATABASE, USERNAME, PASSWORD
def update_user(user):
client = MongoClient(host=HOST, port=int(PORT))
db = client[DATABASE]
if USERNAME is not None:
db.authenticate(USERNAME, PASSWORD, DATABASE, mechanism='MONGODB-CR')
try:
db['account'].update(
{
'id': user['id']
},
user,
upsert=True
)
return True
except:
pass
return False
def get_user(id):
client = MongoClient(host=HOST, port=int(PORT))
db = client[DATABASE]
if USERNAME is not None:
db.authenticate(USERNAME, PASSWORD, DATABASE, mechanism='MONGODB-CR')
try:
userinfo = db['account'].find_one({'id': id})
if userinfo is not None:
userinfo = dict(userinfo)
except:
userinfo = None
return userinfo
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-09 18:28
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('qa', '0003_answerrating_questionrating'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Enter the category', max_length=200)),
],
),
migrations.RemoveField(
model_name='answerrating',
name='answer',
),
migrations.RemoveField(
model_name='answerrating',
name='contributer',
),
migrations.RemoveField(
model_name='questionrating',
name='contributer',
),
migrations.RemoveField(
model_name='questionrating',
name='question',
),
migrations.AddField(
model_name='answer',
name='rating',
field=models.IntegerField(default=0, help_text='Rate the answer'),
),
migrations.AddField(
model_name='question',
name='rating',
field=models.IntegerField(default=0, help_text='Rate the question'),
),
migrations.DeleteModel(
name='AnswerRating',
),
migrations.DeleteModel(
name='QuestionRating',
),
migrations.AddField(
model_name='question',
name='category',
field=models.ForeignKey(blank=True, help_text='Choose category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='qa.Category'),
),
]
|
t = int(input())
arr = list(map(int,input().split()))
for i in range(t//2):
sum = arr[i]+arr[-i-1]
print(sum//10,sum%10)
|
def discount(price):
return 0.95*price
|
#!/usr/bin/env python
# coding: utf-8
"""
Created on Tue Jun 10 15:56:48 2019
Modified Tue Jun 11 2019
@author: jnsofini
Program to convert the bin files from out phytopet system to castor data format cdf.
The bin file contains a sequence of data in the form [detector coincidence {ABCD}]
: [X_position det1 {ABCD}] : [Y_position det1 {ABCD}] : [Energy det1 {ABCD}]
: [X_position det2 {ABCD}] : [Y_position det2 {ABCD}] : [Energy det2 {ABCD}]
: [Time-Stamp {ABCD abcd jklm pqrs}].
"""
import os.path as path
# The input file is given below. It should be edited as desired. The output file has the same name as the input but with a .txt extension
infile = '/home/jnsofini/R2019/Data/Cyl-cont-rot-6-May8.bin'
time_units = 4e-9
default_path, input_file_name = path.split(infile)
oufile_name = path.splitext(input_file_name)[0]+".txt"
outfile = path.join(default_path, oufile_name)
#=====================================================================================================================================
# ### Helper functions implementation:
#
# There are three functions which include:
#
# 1. getCoincidence to return the coincidence event
# 2. getData which gets the coordinate of x,y position and energy e
# 3. getTime which return the time at which the event occured
#
#-------------------------------------------------------------------------------------------------------------------
# Get coincidence to read the coincidence and return a boolean mask with four entries representing det4,det3,det2,det1
def getCoincidence(single_line):
"""
Gets a binary format buffer containing 2 chars which represents the coincidence part of the event. It then return
and a mask that shows the detector in coincidence represented as 0213 or 1324
Parameters
----------
single_line:
Immutable binary array buffer containing cobinations of F and 0 representing the
coincidence between the detectors
co_buffer, mask:
co_buffer is a list containing the values that tell if the detector fired or not
mask is a boolian generator (basically an on-demand list, can be printed by using list(mask))
of the detectors that fired
"""
co_buffer = [single_line[i%2] for i in range(4)] # Converts its to a mutable format
co_buffer[0] = (co_buffer[0]>>4)&0xF # uses bit shifting to extract value for single det bc val read is for two dets
co_buffer[1] = (co_buffer[1]>>4)&0xF
co_buffer[2] = co_buffer[2]&0xF
co_buffer[3] = co_buffer[3]&0xF
#maska = (int(k==15) for k in co_buffer)
mask = [int(k==15) for k in co_buffer] #a mutable form for easy use. Return is coincidence 0213
return co_buffer, mask
#-------------------------------------------------------------------------------------------------------------------
def getData(binary_file):
"""
Takes an input line if events are present, reads their features to and return list of Rawdata data
Parameters
----------
binary_file:
Impur file that contain the stream as described in the beginning.
Coordinate:
A stream containing on the coordinates (x, y, e)_1 and (x, y, e)_1 of the detectors that fired in the coincidence
"""
cordinate = []
cord_line = binary_file.read(6) # each of xye contains 2 chars
for k in range(0, len(cord_line), 2):
cordinate.append((cord_line[k] << 8)|(cord_line[k+1] & 0xFF))
return cordinate
#-------------------------------------------------------------------------------------------------------------------
def getTime(binary_file):
"""
The next 8 pairs are the time. To combine them to a single time stamp, we will NOT shift last one
and then the one to its left by 8 and so on until the first digit.
time;
----------------
The integer form of the time.
8 char are read from the file to the buffer. Each of them correcpond to the bin base component for
each of the location. The chars are converted to 64 (because all the 8 read are of a single var) by p
roper left bitshift << and clearing residues with bitset |. All of them are finally combined together
using | which generate a final time stamp.
"""
timestamp = binary_file.read(8)
time = 0
time |= ((timestamp[0]<<56) & 0xff00000000000000)
time |= ((timestamp[1]<<48) & 0x00ff000000000000)
time |= ((timestamp[2]<<40) & 0x0000ff0000000000)
time |= ((timestamp[3]<<32) & 0x000000ff00000000)
time |= ((timestamp[4]<<24) & 0x00000000ff000000)
time |= ((timestamp[5]<<16) & 0x0000000000ff0000);
time |= ((timestamp[6]<<8) & 0x000000000000ff00);
time |= ((timestamp[7]) & 0x00000000000000ff);
return time
#=====================================================================================================================================
# ## Main program:
# Converting any file from bin to text and then saving in the same folders. the following ask for the file names:
'''
Reads the data and store in a
'''
with open(infile, "rb") as fbin:
with open(outfile, "w") as ftext:
#----Get first machine event which is not good and discard--------
coin = fbin.read(2);
_ , mask = getCoincidence(coin)
for k in range(sum(mask)):
getData(fbin)
getTime(fbin)
#----------------------------------------------------------------
#----The real first event and use it to set the time-------------
coin = fbin.read(2);
coincidence, mask = getCoincidence(coin)
event_and_data = mask
for k in range(sum(mask)):
#read coordinates
event_and_data.extend(getData(fbin))
#times
initial_time = 0
time = getTime(fbin)
event_and_data.extend([initial_time])
for events in event_and_data:
ftext.write(str(events)+'\t')
ftext.write('\n') # write new line to move to the next line and represent the next event
initial_time = time
coin = fbin.read(2) #proceed to next
while coin:
""" Receives various versions of coincidencecoincidence, mask, maska and then read the coordinate and time"""
coincidence, mask = getCoincidence(coin)
event_and_data = mask
for k in range(sum(mask)):
#read coordinates
event_and_data.extend(getData(fbin))
#times
time = getTime(fbin)
event_and_data.extend([round((time - initial_time)*time_units, 6)])
#print(event_and_data)
for events in event_and_data:
ftext.write(str(events)+'\t')
ftext.write('\n') # write new line to move to the next line and represent the next event
#Proceed to the next event
coin = fbin.read(2)
|
import network
import time
def connect_wifi(essid :str, password : str) -> bool:
connected = False
sta_if = network.WLAN(network.STA_IF)
if not sta_if.isconnected():
print('connecting to network...')
sta_if.active(True)
sta_if.connect(essid, password)
for i in reversed(range(5)):
if sta_if.isconnected():
connected = True
break
else:
print("Timing out in... " + str(i) + "s")
time.sleep(1)
print('network config:', sta_if.ifconfig())
return connected
def connect_wifi_ben() -> None:
return connect_wifi('IPF', 'fuckno1234')
|
dog_age = int(input("How old is your dog? "))
if dog_age > 2:
dog_years = 2 * 10.5
dog_years = dog_years + ((dog_age - 2) * 4)
print("Your dog is", int(dog_years), "in dog years.")
else:
dog_years = dog_age * 10.5
print("Your dog is", int(dog_years), "in dog years.")
|
# Generated by Django 2.2.4 on 2019-09-06 14:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webapp', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Power',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('qualification', models.CharField(max_length=50)),
('designation', models.CharField(max_length=50)),
('email', models.CharField(max_length=50)),
('number', models.IntegerField()),
],
),
migrations.DeleteModel(
name='pei',
),
migrations.AlterField(
model_name='bsc_chem',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='bsc_it',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='bsc_maths',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='bsc_phys',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='civil',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='etc',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='mecha',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='msc_chem',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='msc_maths',
name='number',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='msc_phys',
name='number',
field=models.IntegerField(),
),
]
|
#!/usr/bin/env python3
import os.path
import tensorflow as tf
import helper
import warnings
from distutils.version import LooseVersion
import project_tests as tests
KEEP_PROB = 0.7
LEARNING_RATE = 0.0001
correct_label = tf.placeholder(tf.float32)
learning_rate = tf.placeholder(tf.float32)
keep_prob = tf.placeholder(tf.float32)
# Check TensorFlow Version
assert LooseVersion(tf.__version__) >= LooseVersion('1.0'), 'Please use TensorFlow version 1.0 or newer. You are using {}'.format(tf.__version__)
print('TensorFlow Version: {}'.format(tf.__version__))
# Check for a GPU
if not tf.test.gpu_device_name():
warnings.warn('No GPU found. Please use a GPU to train your neural network.')
else:
print('Default GPU Device: {}'.format(tf.test.gpu_device_name()))
def load_vgg(sess, vgg_path):
"""
Load Pretrained VGG Model into TensorFlow.
:param sess: TensorFlow Session
:param vgg_path: Path to vgg folder, containing "variables/" and "saved_model.pb"
:return: Tuple of Tensors from VGG model (image_input, keep_prob, layer3_out, layer4_out, layer7_out)
"""
# TODO: Implement function
# Use tf.saved_model.loader.load to load the model and weights
vgg_tag = 'vgg16'
vgg_input_tensor_name = 'image_input:0'
vgg_keep_prob_tensor_name = 'keep_prob:0'
vgg_layer3_out_tensor_name = 'layer3_out:0'
vgg_layer4_out_tensor_name = 'layer4_out:0'
vgg_layer7_out_tensor_name = 'layer7_out:0'
model = tf.saved_model.loader.load(sess, ['vgg16'], vgg_path)
graph = tf.get_default_graph()
image_input = graph.get_tensor_by_name('image_input:0')
keep_prob = graph.get_tensor_by_name('keep_prob:0')
layer3_out = graph.get_tensor_by_name('layer3_out:0')
layer4_out = graph.get_tensor_by_name('layer4_out:0')
layer7_out = graph.get_tensor_by_name('layer7_out:0')
return image_input, keep_prob, layer3_out, layer4_out, layer7_out
tests.test_load_vgg(load_vgg, tf)
def layers(vgg_layer3_out, vgg_layer4_out, vgg_layer7_out, num_classes):
"""
Create the layers for a fully convolutional network. Build skip-layers using the vgg layers.
:param vgg_layer3_out: TF Tensor for VGG Layer 3 output
:param vgg_layer4_out: TF Tensor for VGG Layer 4 output
:param vgg_layer7_out: TF Tensor for VGG Layer 7 output
:param num_classes: Number of classes to classify
:return: The Tensor for the last layer of output
"""
conv1x1_7 = tf.layers.conv2d(inputs = vgg_layer7_out,filters = num_classes,\
kernel_size = (1, 1),strides = (1, 1),name = "conv1x1_7")
conv1x1_4 = tf.layers.conv2d(inputs = vgg_layer4_out,filters = num_classes,\
kernel_size = (1, 1),strides = (1, 1),name = "conv1x1_4")
conv1x1_3 = tf.layers.conv2d(inputs = vgg_layer3_out,filters = num_classes,\
kernel_size = (1, 1),strides = (1, 1),name = "conv1x1_3")
upsample1 = tf.layers.conv2d_transpose(inputs = conv1x1_7,filters = num_classes,\
kernel_size = (4, 4),strides = (2, 2), \
padding = 'same',name = "upsample1")
upsample2 = tf.layers.conv2d_transpose(inputs = conv1x1_4,filters = num_classes,\
kernel_size = (4, 4),strides = (2, 2), \
padding = 'same',name = "upsample2")
skip1 = tf.add(upsample1, conv1x1_4, name = "skip1")
skip2 = tf.add(upsample2, conv1x1_3, name = "skip1")
return tf.layers.conv2d_transpose(inputs = conv1x1_3,filters = num_classes,\
kernel_size = (16, 16),strides = (8, 8), \
padding = 'same',name = "upsample3")
tests.test_layers(layers)
def optimize(nn_last_layer, correct_label, learning_rate, num_classes):
"""
Build the TensorFLow loss and optimizer operations.
:param nn_last_layer: TF Tensor of the last layer in the neural network
:param correct_label: TF Placeholder for the correct label image
:param learning_rate: TF Placeholder for the learning rate
:param num_classes: Number of classes to classify
:return: Tuple of (logits, train_op, cross_entropy_loss)
"""
# 2d from 4d, pixel x class
logits = tf.reshape(nn_last_layer, (-1, num_classes),name="logits")
labels = tf.reshape(correct_label, (-1, num_classes))
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits = logits, labels = labels)
cross_entropy_loss = tf.reduce_mean(cross_entropy)
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cross_entropy_loss)
return logits, train_op, cross_entropy_loss
tests.test_optimize(optimize)
def train_nn(sess, epochs, batch_size, get_batches_fn, train_op, cross_entropy_loss, input_image,
correct_label, keep_prob, learning_rate):
"""
Train neural network and print out the loss during training.
:param sess: TF Session
:param epochs: Number of epochs
:param batch_size: Batch size
:param get_batches_fn: Function to get batches of training data. Call using get_batches_fn(batch_size)
:param train_op: TF Operation to train the neural network
:param cross_entropy_loss: TF Tensor for the amount of loss
:param input_image: TF Placeholder for input images
:param correct_label: TF Placeholder for label images
:param keep_prob: TF Placeholder for dropout keep probability
:param learning_rate: TF Placeholder for learning rate
"""
for epoch in range(epochs):
i = 0; losses = []
for image, label in get_batches_fn(batch_size):
_, loss = sess.run([train_op, cross_entropy_loss],feed_dict = {\
input_image: image, correct_label: label,\
keep_prob: KEEP_PROB, learning_rate: LEARNING_RATE })
i += 1
print("-------------- ",i, ". partial loss: ", loss)
losses.append(loss)
training_loss = sum(losses) / len(losses)
print("--------------")
print("epoch: ", epoch + 1, " of ", epochs, "training loss: ", training_loss)
print("--------------")
tests.test_train_nn(train_nn)
def run():
num_classes = 2
image_shape = (160, 576)
epochs = 20
batch_size = 1
data_dir = './data'
runs_dir = './runs'
tests.test_for_kitti_dataset(data_dir)
# Download pretrained vgg model
helper.maybe_download_pretrained_vgg(data_dir)
# OPTIONAL: Train and Inference on the cityscapes dataset instead of the Kitti dataset.
# You'll need a GPU with at least 10 teraFLOPS to train on.
# https://www.cityscapes-dataset.com/
with tf.Session() as sess:
# Path to vgg model
vgg_path = os.path.join(data_dir, 'vgg')
# Create function to get batches
get_batches_fn = helper.gen_batch_function(os.path.join(data_dir, 'data_road/training'), image_shape)
image_input, keep, layer3_out, layer4_out, layer7_out = load_vgg(sess, './data/vgg')
model_output = layers(layer3_out, layer4_out, layer7_out, num_classes)
logits, train_op, cross_entropy_loss = optimize(model_output, correct_label, learning_rate, num_classes)
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
saver = tf.train.Saver()
train_nn(sess, epochs, batch_size, get_batches_fn, train_op, cross_entropy_loss,\
image_input, correct_label, keep_prob, learning_rate)
saver.save(sess, './save/model')
helper.save_inference_samples(runs_dir, data_dir, sess, image_shape, logits, keep_prob, image_input)
if __name__ == '__main__':
run()
|
"""dnstools library."""
import ipaddress
from django.utils.translation import gettext as _
from modoboa.admin import lib as admin_lib
from . import constants
def _get_record_type_value(records, rr_type):
if records is None:
return None
for record in records:
# Multiple strings are separated by a space as described in:
# https://tools.ietf.org/html/rfc4408#section-3.1.3
value = str(record).replace('" "', '').strip('"')
if value.startswith('v={}'.format(rr_type)):
return value
return None
def get_spf_record(domain):
"""Return SPF record for domain (if any)."""
records = admin_lib.get_dns_records(domain, "TXT")
return _get_record_type_value(records, 'spf1')
def get_dkim_record(domain, selector):
"""Return DKIM records form domain (if any)."""
name = "{}._domainkey.{}".format(selector, domain)
records = admin_lib.get_dns_records(name, "TXT")
return _get_record_type_value(records, 'DKIM1')
def get_dmarc_record(domain):
"""Return DMARC record for domain (if any)."""
name = "_dmarc.{}".format(domain)
records = admin_lib.get_dns_records(name, "TXT")
return _get_record_type_value(records, 'DMARC1')
def _get_simple_record(name):
"""We just want to know if name is declared."""
for rdtype in ["A", "CNAME", "AAAA"]:
records = admin_lib.get_dns_records(name, rdtype)
if records is not None:
break
else:
return None
for record in records:
value = str(record).strip('"')
break
return value
def get_autoconfig_record(domain):
"""Return autoconfig record for domain (if any)."""
return _get_simple_record("autoconfig.{}".format(domain))
def get_autodiscover_record(domain):
"""Return autodiscover record for domain (if any)."""
return _get_simple_record("autodiscover.{}".format(domain))
class DNSSyntaxError(Exception):
"""Custom exception for DNS errors."""
pass
def check_spf_ip4(value):
"""Check syntax of ip4 mechanism."""
parts = value.split(":")
if len(parts) != 2:
raise DNSSyntaxError(_("Wrong ip4 mechanism syntax"))
try:
ipaddress.ip_network(parts[1], False)
except ValueError:
raise DNSSyntaxError(_("Wrong IPv4 address format"))
def check_spf_ip6(value):
"""Check syntax of ip6 mechanism."""
if not value.startswith("ip6:"):
raise DNSSyntaxError(_("Wrong ip6 mechanism syntax"))
value = value.replace("ip6:", "")
try:
ipaddress.ip_network(value, False)
except ValueError:
raise DNSSyntaxError(_("Wrong IPv6 address format"))
def _check_domain_and_mask(value, mechanism):
"""Check for valid domain / mask."""
domain = None
mask = None
if ":" in value:
mechanism, domain = value.split(":")
if "/" in domain:
domain, mask = domain.split("/")
elif "/" in value:
mechanism, mask = value.split("/")
else:
raise DNSSyntaxError(
_("Invalid syntax for {} mechanism").format(mechanism))
if mask and (not mask.isdigit() or int(mask) > 32):
raise DNSSyntaxError(_("Invalid mask found {}").format(mask))
def check_spf_a(value):
"""Check syntax of a mechanism."""
if value == "a":
return
_check_domain_and_mask(value, "a")
def check_spf_mx(value):
"""Check syntax of mx mechanism."""
if value == "mx":
return
_check_domain_and_mask(value, "mx")
def _check_simple(value, mechanism):
"""Simple check."""
if value == mechanism:
return
parts = value.split(":")
if len(parts) != 2:
raise DNSSyntaxError(
_("Invalid syntax for {} mechanism").format(mechanism))
def check_spf_ptr(value):
"""Check syntax of ptr mechanism."""
_check_simple(value, "ptr")
def check_spf_exists(value):
"""Check syntax of ptr mechanism."""
_check_simple(value, "exists")
def check_spf_include(value):
"""Check syntax of include mechanism."""
_check_simple(value, "include")
def check_spf_syntax(record):
"""Check if record has a valid SPF syntax."""
if not record.startswith("v=spf1"):
raise DNSSyntaxError(_("Not an SPF record"))
parts = record.split(" ")[1:]
modifiers = []
mechanisms = []
for part in parts:
if part == "":
continue
qualifier = None
if part[0] in ["+", "-", "~", "?"]:
qualifier = part[0]
part = part[1:]
if part == "all":
continue
for mechanism in constants.SPF_MECHANISMS:
if part.startswith(mechanism):
globals()["check_spf_{}".format(mechanism)](part)
mechanisms.append(mechanism)
break
else:
# Look for modifier
modifier = part.split("=")
if len(modifier) != 2:
raise DNSSyntaxError(_("Unknown mechanism {}").format(part))
if modifier[0] not in ["redirect", "exp"]:
raise DNSSyntaxError(_("Unknown modifier {}").format(
modifier[0]))
if modifier[0] in modifiers:
raise DNSSyntaxError(_("Duplicate modifier {} found").format(
modifier[0]))
modifiers.append(modifier[0])
if not len(mechanisms) and not len(modifiers):
raise DNSSyntaxError(_("No mechanism found"))
return None
def check_dkim_syntax(record):
"""Check if record has a valid DKIM syntax."""
if not record.startswith("v=DKIM1"):
raise DNSSyntaxError(_("Not a valid DKIM record"))
key = None
for tag in record.split(";")[1:]:
tag = tag.strip(" ")
if tag == "":
continue
parts = tag.split("=", 1)
if len(parts) != 2:
raise DNSSyntaxError(_("Invalid tag {}").format(tag))
name = parts[0].strip(" ")
if name == "p":
key = "".join(part.strip('"') for part in parts[1].split(" "))
if key is None:
raise DNSSyntaxError(_("No key found in record"))
return key
def check_dmarc_tag_string_value(tag, value):
"""Check if value is valid for tag."""
tdef = constants.DMARC_TAGS[tag]
error = _("Wrong value {} for tag {}").format(value, tag)
if "values" in tdef and value not in tdef["values"]:
raise DNSSyntaxError(error)
elif "regex" in tdef and tdef["regex"].match(value) is None:
raise DNSSyntaxError(error)
def check_dmarc_tag(tag, value):
"""Check if tag is valid."""
tdef = constants.DMARC_TAGS[tag]
ttype = tdef.get("type", "string")
if ttype == "list":
for svalue in value.split(","):
check_dmarc_tag_string_value(tag, svalue)
elif ttype == "int":
error = _("Wrong value {} for tag {}:").format(value, tag)
try:
value = int(value)
except ValueError:
raise DNSSyntaxError(error + _(" not an integer"))
if "min_value" in tdef and value < tdef["min_value"]:
raise DNSSyntaxError(
error + _(" less than {}").format(tdef["min_value"]))
if "max_value" in tdef and value > tdef["max_value"]:
raise DNSSyntaxError(
error + _(" greater than {}").format(tdef["max_value"]))
else:
check_dmarc_tag_string_value(tag, value)
def check_dmarc_syntax(record):
"""Check if record has a valid DMARC syntax."""
if not record.startswith("v=DMARC1"):
raise DNSSyntaxError(_("Not a valid DMARC record"))
tags = {}
for tag in record.split(";")[1:]:
if tag == "":
continue
tag = tag.strip(" ")
parts = tag.split("=")
if len(parts) != 2:
raise DNSSyntaxError(_("Invalid tag {}").format(tag))
name = parts[0].strip(" ")
if name not in constants.DMARC_TAGS:
raise DNSSyntaxError(_("Unknown tag {}").format(name))
value = parts[1].strip(" ")
check_dmarc_tag(name, value)
tags[name] = value
if "p" not in tags:
raise DNSSyntaxError(_("Missing required p tag"))
|
from random import randint
from random import seed
def roll_dice():
""" roll_dice
Generate random string from '一' to '六'
Arguements: None
Returns: a string
"""
chinese_dice_book = {
1:'\
\n\
\n\
\n\
一一一一一一一一\n\
\n\
\n',
2:'\
\n\
二二二二二二 \n\
\n\
\n\
二二二二二二二二\n\
\n',
3:'\
\n\
三三三三三三 \n\
\n\
三三三三 \n\
\n\
三三三三三三三三\n',
4:'\
四四四四四四四四\n\
四 四 四 四\n\
四 四 四 四\n\
四四 四四\n\
四 四\n\
四四四四四四四四\n',
5:'\
五五五五五五五\n\
五 \n\
五五五五五五 \n\
五 五 \n\
五 五 \n\
五五五五五五五五\n',
6:'\
六 \n\
六 \n\
六六六六六六六六\n\
六 六 \n\
六 六 \n\
六 六 \n',
}
roll = chinese_dice_book[randint(1, 6)]
return roll
if __name__ == "__main__":
seed()
print(roll_dice()) |
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# File: config_example.py
# Date: Fri Feb 21 12:21:57 2014 +0800
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
from os import path
config = {'onpassword': '/haha',
'offpassword': '/hehe',
'temp_exe_path': path.join(path.dirname(path.abspath(__file__)), 'temp')}
|
from django.test import TestCase
from organisations.tests.factories import (
DivisionGeographyFactory,
OrganisationDivisionFactory,
OrganisationDivisionSetFactory,
OrganisationFactory,
)
class TestElectionIDs(TestCase):
def test_organisation_factory(self):
o = OrganisationFactory()
assert o.slug == "org-{}".format(o.official_identifier)
def test_organisation_division_set_factory(self):
ods = OrganisationDivisionSetFactory()
assert ods.organisation.slug.startswith("org-")
def test_organisation_division_factory(self):
od = OrganisationDivisionFactory()
assert od.organisation.slug.startswith("org-")
def test_division_geography_factory(self):
DivisionGeographyFactory()
|
import numpy
entry = list(map(int,input().split()))
ans = numpy.eye(entry[0],entry[1],k=0)
ans = numpy.array(ans)
print(ans) |
import unittest
import homoglyphs2ascii
class TestHomoglyphs(unittest.TestCase):
cyrillic = 'МАРК8'
latin = 'MAPK8'
def test_latin_and_cyrillic_are_homoglyphs(self):
self.assertNotEqual(self.cyrillic, self.latin)
def test_to_ascii_makes_equal(self):
self.assertEqual(homoglyphs2ascii.homoglyphs2ascii(
self.cyrillic), homoglyphs2ascii.homoglyphs2ascii(self.latin))
if __name__ == '__main__':
unittest.main()
|
import copy
import typing
import random
import numpy as np
from pylo import Image
from pylo import CameraInterface
class DummyCamera(CameraInterface):
"""This class represents a dummy camera that records images with random
data.
Attributes
----------
tags : dict
Any values that should be saved for the camera
controller : Controller
The controller
imagesize : tuple of int
The image size
use_dummy_images : bool
Whether to use image objects created from the `DummyImage` class or
normal `pylo.Image` objects
"""
def __init__(self, *args, **kwargs) -> None:
"""Create a new camera interface object."""
super(DummyCamera, self).__init__(*args, **kwargs)
self.imagesize = (32, 32)
self.tags = {"Camera": "Dummy Camera"}
self.use_dummy_images = False
def recordImage(self, additional_tags: typing.Optional[dict]=None, **kwargs) -> "Image":
"""Get the image of the current camera.
Parameters
----------
additional_tags : dict, optional
Additonal tags to add to the image, note that they will be
overwritten by other tags if there are set tags in this method
Returns
-------
Image
The image object
"""
image_data = np.random.rand(self.imagesize[0], self.imagesize[1])
image_data = (image_data * 255).astype(dtype=np.uint8)
if isinstance(additional_tags, dict):
image_tags = copy.deepcopy(additional_tags)
else:
image_tags = {}
for i in range(random.randint(2, 6)):
if random.randint(0, 1) == 0:
image_tags[chr(i + 65)] = random.randint(0, 65535)
else:
image_tags[chr(i + 65)] = "Test value {}".format(i)
if self.use_dummy_images:
return DummyImage(image_data, image_tags)
else:
return Image(image_data, image_tags)
def resetToSafeState(self) -> None:
pass
class DummyImage(Image):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
"""An image object that cannot save itself."""
def saveTo(self, *args, **kwargs):
pass |
import argparse
def load_arg_parser():
parser = argparse.ArgumentParser()
parser = add_base_args(parser)
parser = add_gnn_args(parser)
args = parser.parse_args()
return args
def add_base_args(parser):
parser.add_argument('--gnn', type=str, default='GCN', choices=['GCN', 'GAT'])
parser.add_argument('--hidden_dim', type=int, default=100)
parser.add_argument('--num_layers', type=int, default=2)
parser.add_argument('--num_epochs', type=int, default=300)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--early_stop', type=int, default=0)
parser.add_argument('--visualize', action='store_true')
parser.add_argument('--seed', type=int, default=2020)
return parser
def add_gnn_args(parser):
parser.add_argument('--dropout', type=float, default=0.6)
# GCN
# no args
# GAT
parser.add_argument('--num_heads', type=int, default=4)
parser.add_argument('--merge', type=str, default='cat', choices=['cat', 'mean'])
return parser |
#CATEGORY DROPTOWN MENU SHORTCUT - for views.py
# def get_context_data(self, *args, **kwargs):
# cat_menu = Categories.objects.all()
# context = super(HomeView, self).get_context_data(*args, **kwargs)
# context['cat_menu'] = cat_menu
# return context |
class Solution:
def minBitFlips(self, start: int, goal: int) -> int:
res = 0
for i in range(31):
if (goal >> i) & 1 != (start >> i) & 1:
res += 1
return res
|
"""
Unit tests. Run with `pytest`.
"""
from lib import *
import pytest, os
def test_settings_paths():
"""
Test whether each of the settings paths resolve to locations that exist.
"""
settings = loadSettings()
for k,v in settings.iteritems():
path, _ = os.path.split(v)
print "Testing setting {} with base path '{}'".format(k, path)
if path:
assert(os.path.exists(path))
def test_scrubPrefixes():
names = ["Mr. Winnie", "Dr Crypto", "Mr Du-Pont", "Mrs De Marisole", "Paul"]
expected = ["Winnie", "Crypto", "Du-Pont", "De Marisole", "Paul"]
actual = [scrubPrefixes(x) for x in names]
assert(expected == actual)
def test_scrubSuffixes():
names = ["Rodriguez", "Walters III", "Hinami Jr. ", "Fausto Patel MA", "Sanchez-Johnson"]
expected = ["Rodriguez", "Walters", "Hinami", "Fausto Patel", "Sanchez-Johnson"]
actual = [scrubSuffixes(x) for x in names]
assert(expected == actual)
def test_normalize():
"""
Simple known-answer tests
"""
assert(normalize(["Avery","Bales"]) == ["AVERY", "BALES"])
assert(normalize(["Idell", "Leggett"]) == ["IDELL", "LEGGETT"])
assert(normalize(["Farah", "Sharkey"]) == ["FARAH", "SHARKEY"])
assert(normalize(["Alla", "Creamer"]) == ["ALLA", "CREAMER"])
def test_normalizeWithNone():
"""
Tests normalize is one param is None
"""
assert(normalize([None,"Bales"]) == ["BALES"])
assert(normalize(["Idell", None]) == ["IDELL"])
def test_normalizeNonAlpha():
"""
Tests that normalize handles non-alphanumeric characters
"""
assert(normalize(["Ave3ry","Bales1"]) == ["AVERY","BALES"])
assert(normalize(["Idell", "Legg`ett"]) == ["IDELL","LEGGETT"])
assert(normalize(["2Farah", "Sharkey"]) == ["FARAH","SHARKEY"])
assert(normalize(["Alla", "Creamer4"]) == ["ALLA","CREAMER"])
assert(normalize(["Lavinia\xe2", "Barnhart"]) == ["LAVINIA","BARNHART"])
def test_normalizeSpaces():
"""
Tests that normalize removes whitespace: spaces, tabs, newlines
"""
assert(normalize(["Florance", " Arevalo"]) == ["FLORANCE","AREVALO"])
assert(normalize(["Trinidad ", "Langley\n"]) == ["TRINIDAD","LANGLEY"])
assert(normalize(["Ro mona", "Da ly"]) == ["ROMONA","DALY"])
assert(normalize([" El freda ", " Micha ud "]) == ["ELFREDA","MICHAUD"])
assert(normalize([" Tamela", "Garris"]) == ["TAMELA","GARRIS"])
def test_normalizePrefixSuffix():
"""
Tests that normalize always matches reversed names
"""
assert(normalize(["Dr Avery","Bales"]) == ["AVERY","BALES"])
assert(normalize([" Idell", " Leggett III "]) == ["IDELL","LEGGETT"])
assert(normalize(["Farah", "Sharkey Jr "]) == ["FARAH","SHARKEY"])
assert(normalize(["Mrs Alla", "Creamer"]) == ["ALLA","CREAMER"])
assert(normalize(["Ms. Lavinia ", "Barnhart"]) == ["LAVINIA","BARNHART"])
def test_alternateDates():
d = date(2000, 5, 20)
expected = set([
# year +-10
date(1990, 5, 20),
date(2010, 5, 20),
# day +-1
date(2000, 5, 19),
date(2000, 5, 21)
])
actual = alternateDates(d,
dayOffsets=[1,-1],
yearOffsets=[10,-10],
swapMonthDay=True)
assert(set(actual) == expected)
def test_alternateDatesSwap():
founded = date(1848, 6, 5)
actual = [x for x in alternateDates(founded, yearOffsets=None, dayOffsets=None)]
assert(actual == [date(1848, 5, 6)])
def test_alternateDatesOutOfRange():
"""
Ensures that alternateDates exclude dates that are out of range.
"""
founded = date(1817, 01, 29)
expected = [date(1817, 01, 24)]
actual = [x for x in alternateDates(founded, yearOffsets=None,
dayOffsets=[-5,+5], swapMonthDay=False)]
assert(actual == expected)
def test_plusminus():
"""
Known-answer tests
"""
assert([x for x in plusminus(5,5)] == [0,1,2,3,4,6,7,8,9,10])
def test_dt():
"""
Known-answer tests
"""
assert(dt('2000-12-31') == date(2000, 12, 31))
assert(dt('1955-05-07') == date(1955, 5, 7))
def test_dtBadDate():
"""
Dates with out-of-range values
"""
with pytest.raises(ValueError):
dt('1999-31-12')
with pytest.raises(ValueError):
dt('99-12-12')
with pytest.raises(ValueError):
dt('1999-31-12')
def test_pkDecryptWithPubkey():
message = "Three may keep a secret, if two of them are dead."
jee = publicKeyEncrypt("samples/testkey-public.pem", message)
err, recoveredMessage = publicKeyDecrypt("samples/testkey-public.pem", jee)
print "err: ", err
print "recoveredMessage: ", recoveredMessage
assert(recoveredMessage is None)
assert(err == publicKeyDecryptError)
def test_pkEncryptRoundTrip():
message = "Three may keep a secret, if two of them are dead."
jee = publicKeyEncrypt("samples/testkey-public.pem", message)
err, recoveredMessage = publicKeyDecrypt("samples/testkey-private.pem", jee)
assert(err is None)
assert(message == recoveredMessage)
# This test is slow -- disabling
def _test_createKeyPair():
createPubkeyPair("./testkey")
def test_aesEncryptRoundTrip():
message = "This is a long message that we want to verify can be decrypted exactly"
key, ctext = aesEncrypt(message)
recoveredMessage = aesDescrypt(key, ctext)
assert(message == recoveredMessage)
def test_aesDetectsCtextError():
message = "This message is expected to fail verification"
key, ctext = aesEncrypt(message)
ctext = ctext[:20] + b'\x55' + ctext[21:]
with pytest.raises(ValueError, message="MAC check failed"):
recoveredMessage = aesDescrypt(key, ctext) |
# -*- coding: utf-8 -*-
import scrapy
from selenium import webdriver
from scrapy.http import request
from urllib import parse
from spider.cctv.cctv.items import CctvWorldItem
from spider.views import get_md5,clean_tag
import datetime
class NewsWorldSpider(scrapy.Spider):
name = 'news_world'
allowed_domains = ['http://news.cctv.com']
start_urls = ['http://news.cctv.com/world/']
agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36'
headers = {
"HOST": "news.cctv.com",
"Referer": "http://news.cctv.com/world/",
"User-Agent": agent
}
def __init__(self):
self.brower = webdriver.Chrome(executable_path="D:/python/chromedriver.exe")
super(NewsWorldSpider, self).__init__()
def parse(self, response):
# 解析列表页
url_nodes = response.css(".col_w660 .main #leftContent .ecoA9805_con02 ")
for url in url_nodes:
post_url = url.css("h3 .l a::attr(href)").extract_first()
title = url.css("h3 .l a::text").extract_first()
image_url = url.css(".text_box .l a img::attr(src)").extract_first()
summary = url.css(".text_box p::text").extract_first()
label = url.css(".text_box h4 a::text").extract()
release_time = url.css(".text_box h5 i::text").extract_first()
print(post_url)
yield request.Request(url=parse.urljoin(response.url, post_url), callback=self.parse_detail,
meta={'front_image_url': image_url, "title": title, "summary": summary,
'label': label, 'release_time': release_time}, headers=self.headers,
dont_filter=True)
def parse_detail(self, response):
world_info = CctvWorldItem()
world_info['url'] = response.url
world_info['news_id'] = get_md5(response.url)
world_info['title'] = response.meta.get("title", "")
world_info['from_news'] = response.css(".cnt_bd .function .info i a::text").extract_first("央视网")
world_info['summary'] = response.meta.get("summary", "")
world_info['label'] = ','.join(response.meta.get("label", ""))
world_info['release_time'] = response.meta.get("release_time", "")
world_info['front_image_url'] = response.meta.get("front_image_url", "")
world_info['content_image_url'] = response.css("p img::attr(src)").extract_first()
world_info['content'] = clean_tag(response.css(".cnt_bd p::text").extract())
if world_info['content']== '请点此安装最新Flash' :
world_info['content'] = response.xpath("/html/body/div[12]/div[1]/div[1]/p[4]/span/text()").extract_first()
if world_info['content']:
world_info['content'] = response.xpath("/html/body/div[12]/div[1]/div[1]/p[4]/span[2]/text()").extract_first()
world_info['create_date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
world_info.save()
yield scrapy.Request(response.url, callback=self.parse, headers=self.headers)
|
import aiohttp
import asyncio
import pydantic
from typing import Dict, List
class ProxyServer(pydantic.BaseModel):
country: Dict[str, str]
ip: str
anonymity: str
uptime: float
port: int
async def request(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
return await resp.json()
class ProxyCreator:
def list_proxy(self, country=None, port=None) -> List[dict]:
list_proxy = ProxyCreator().create_list_proxy()
list_proxy = [ProxyServer(**proxy) for proxy in list_proxy]
if type(country) == str:
country = country.upper()
list_proxy = list(filter(lambda x: x.country['iso3166a2'] == country, list_proxy))
if type(port) == int or type(port) == str:
port = int(port)
list_proxy = list(filter(lambda x: x.port == port, list_proxy))
return list_proxy
class ProxyCreator:
def create_list_proxy(self):
res = asyncio.run(request('http://api.foxtools.ru/v2/Proxy/'))['response']['items']
return res
if __name__ == '__main__':
proxy_createor = ProxyO()
list_proxy = proxy_createor.list_proxy()
print(list_proxy)
list_proxy = proxy_createor.list_proxy(country='RU')
print(list_proxy)
list_proxy = proxy_createor.list_proxy(port=80)
print(list_proxy)
|
import unittest
import sys
import os
from abc import ABC, abstractmethod
from drawers.ASCIIDrawer import ASCIIDrawer
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
tc = unittest.TestCase('__init__')
class AbstractBaseGeneratorTest(ABC):
@abstractmethod
def setUp(self):
pass
# check whether the maze has a valid entry point
def test_valid_entry(self):
self.log.debug("test_valid_entry")
entry = self.maze.get_entrance()
tc.assertIsNotNone(entry, 'Entry is None.')
tc.assertTrue(entry.get_x() == 0 or entry.get_y() == 0,
'Entry is neither on the left nor on top side of the Maze')
# check whether the maze has only one entry point
def test_one_entry(self):
self.log.debug("test_one_entry")
entries = 0
for i in range(self.size):
cell = self.maze.get_cell(i, 0)
if cell.get_left().is_removed():
entries += 1
cell = self.maze.get_cell(0, i)
if cell.get_top().is_removed():
entries += 1
drawer = ASCIIDrawer()
drawer.draw_maze(self.maze)
tc.assertEqual(entries, 1, 'There is more than one entry')
# check whether the maze has a valid exit point
def test_valid_exit(self):
self.log.debug("test_valid_exit")
maze_exit = self.maze.get_exit()
tc.assertIsNotNone(maze_exit, 'Exit is None.')
tc.assertTrue(maze_exit.get_x() == self.size - 1 or maze_exit.get_y() == self.size - 1,
'Exit is neither on the right nor on the bottom side of the Maze')
# check whether the maze has only one exit point
def test_one_exit(self):
self.log.debug("test_one_exit")
exits = 0
for i in range(self.size):
cell = self.maze.get_cell(i, self.size - 1)
if cell.get_right().is_removed():
exits += 1
cell = self.maze.get_cell(self.size - 1, i)
if cell.get_bottom().is_removed():
exits += 1
tc.assertEqual(exits, 1, 'There is more than one exit')
def test_custom_entry_exit(self):
self.log.debug("test_custom_entry_exit")
maze = self.generator.generate_custom_maze(self.size, 0, 3, 4, 4, seed=self.seed)
entry = maze.get_entrance()
tc.assertEquals(entry.get_x(), 0, 'Custom entry is not set correctly')
tc.assertEquals(entry.get_y(), 3, 'Custom entry is not set correctly')
exit = maze.get_exit()
tc.assertEqual(exit.get_x(), 4, 'Custom exit is not set correctly')
tc.assertEqual(exit.get_y(), 4, 'Custom exit is not set correctly')
def test_custom_entry_exit_with_coordinates(self):
self.log.debug("test_custom_entry_exit_with_coordinates")
maze = self.generator.generate_custom_maze(self.size, 0, 3, 2, 0, seed=self.seed, coordinates=True)
entry = maze.get_entrance()
tc.assertEquals(entry.get_x(), 1, 'Custom entry is not set correctly')
tc.assertEquals(entry.get_y(), 0, 'Custom entry is not set correctly')
exit = maze.get_exit()
tc.assertEqual(exit.get_x(), 4, 'Custom exit is not set correctly')
tc.assertEqual(exit.get_y(), 2, 'Custom exit is not set correctly')
|
import lasagne
import numpy as np
from braindecode.analysis.kaggle import (transform_to_time_activations,
transform_to_cnt_activations)
import logging
from braindecode.veganlasagne.layers import create_pred_fn,\
get_input_time_length, get_n_sample_preds, get_all_paths
from braindecode.datahandling.batch_iteration import compute_trial_start_end_samples
log = logging.getLogger(__name__)
def compute_trial_acts(model, i_layer, iterator, train_set):
"""Compute activations per trial per sample for given layer of the model.
Parameters
----------
model: Lasagne layer
Final layer of the model.
i_layer: int
Index of layer to compute activations for.
iterator: DatasetIterator
Iterator to get batches from.
train_set: Dataset (Cnt)
Dataset to use.
Returns
-------
trial_acts: 3darray of float
Activations per trial per sample. #trialx#channelx#sample
"""
# compute number of inputs per trial
i_trial_starts, i_trial_ends = compute_trial_start_end_samples(
train_set.y, check_trial_lengths_equal=True,
input_time_length=iterator.input_time_length)
# +1 since trial ends is inclusive
n_trial_len = i_trial_ends[0] - i_trial_starts[0] + 1
n_inputs_per_trial = int(np.ceil(n_trial_len / float(iterator.n_sample_preds)))
log.info("Create theano function...")
all_layers = lasagne.layers.get_all_layers(model)
all_out_fn = create_pred_fn(all_layers[i_layer])
assert(iterator.input_time_length == get_input_time_length(model))
assert(iterator.n_sample_preds == get_n_sample_preds(model))
log.info("Compute activations...")
all_outs_per_batch = [all_out_fn(batch[0])
for batch in iterator.get_batches(train_set, False)]
batch_sizes = [len(batch[0]) for batch in iterator.get_batches(train_set, False)]
all_outs_per_batch = np.array(all_outs_per_batch)
n_trials = len(i_trial_starts)
log.info("Transform to trial activations...")
trial_acts = get_trial_acts(all_outs_per_batch,
batch_sizes, n_trials=n_trials,
n_inputs_per_trial=n_inputs_per_trial,
n_trial_len=n_trial_len,
n_sample_preds=iterator.n_sample_preds)
log.info("Done.")
return trial_acts
def get_receptive_field_size(layer):
"""Receptive field size of a single output of the given layer.
Parameters
----------
layer: Lasagne layer
Layer to compute receptive field size of the outputs from.
Returns
-------
receptive_field_size:
How many samples one output has "seen"/is influenced by.
"""
_, ends = get_receptive_field_start_ends(layer)
return ends[0] + 1
def get_receptive_field_start_ends(layer):
"""First and last samples of the receptive field of a single output of the given layer.
Parameters
----------
layer: Lasagne layer
Layer to compute receptive field size of the outputs from.
Returns
-------
receptive_field_size:
How many samples one output has "seen"/is influenced by.
"""
all_paths = get_all_paths(layer)
all_starts = []
all_ends = []
for path in all_paths:
starts, ends = get_receptive_field_start_ends_for_path(path)
all_starts.append(starts)
all_ends.append(ends)
for starts, ends in zip(all_starts, all_ends):
assert len(starts) == len(all_starts[0])
assert len(ends) == len(starts)
min_starts = np.min(np.array(all_starts), axis=0)
max_ends = np.max(np.array(all_ends), axis=0)
return min_starts, max_ends
def get_receptive_field_start_ends_for_path(all_layers):
in_layer = all_layers[0]
receptive_field_end = np.arange(in_layer.shape[2])
receptive_field_start = np.arange(in_layer.shape[2])
for layer in all_layers:
filter_length = None
if hasattr(layer, 'filter_size'):
filter_length = layer.filter_size[0]
if hasattr(layer, 'pool_size'):
filter_length = layer.pool_size[0]
if (filter_length is not None) and (filter_length > 1):
receptive_field_end = receptive_field_end[filter_length-1:]
receptive_field_start = receptive_field_start[:-(filter_length-1)]
stride = None
if hasattr(layer,'stride'):
stride = layer.stride[0]
if hasattr(layer,'n_stride'):
stride = layer.n_stride
if (stride is not None) and (stride > 1):
receptive_field_end = receptive_field_end[::stride]
receptive_field_start = receptive_field_start[::stride]
# can happen that there is a partial pooling /conv region
# assume this is removed (maybe not correct for stridereshape?)
receptive_field_start = receptive_field_start[:len(receptive_field_end)]
assert len(receptive_field_start) == len(receptive_field_end)
return receptive_field_start, receptive_field_end
def get_trial_acts(all_outs_per_batch, batch_sizes, n_trials, n_inputs_per_trial,
n_trial_len, n_sample_preds):
"""Compute trial activations from activations of a specific layer.
Parameters
----------
all_outs_per_batch: list of 1darray
All activations of a specific layer for all batches from the iterator.
batch_sizes: list
All batch sizes of all batches.
n_trials: int
n_inputs_per_trial: int
How many inputs/rows are used to predict all samples of one trial.
Depends on trial length, number of predictions per input window.
n_trial_len: int
Number of samples per trial
n_sample_preds: int
Number of predictions per input window.
Returns
--------
trial_acts: 3darray (final empty dim removed)
Activations of this layer for all trials.
"""
time_acts = transform_to_time_activations(all_outs_per_batch,batch_sizes)
trial_batch_acts = np.concatenate(time_acts, axis=0).reshape(n_trials,n_inputs_per_trial,
time_acts[0].shape[1], time_acts[0].shape[2], 1)
trial_acts = [transform_to_cnt_activations(t[np.newaxis], n_sample_preds,
n_samples = n_trial_len)
for t in trial_batch_acts]
trial_acts = np.array(trial_acts)
return trial_acts
def model_structure_equal(final_layer_1, final_layer_2):
"""Compare if two networks have the same structure, i.e. same layers
with same sizes etc. Ignores if they have different parameters."""
all_equal = True
all_layers_1 = lasagne.layers.get_all_layers(final_layer_1)
all_layers_2 = lasagne.layers.get_all_layers(final_layer_2)
if len(all_layers_1) != len(all_layers_2):
log.warn("Unequal number of layers: {:d} and {:d}".format(
len(all_layers_1), len(all_layers_2)))
return False
for l1,l2 in zip(all_layers_1, all_layers_2):
ignore_keys = ['yaml_src', 'input_var', 'input_layer', '_srng', 'b', 'W', 'params',
'std', 'beta', 'mean', 'gamma', 'input_layers', 'reshape_layer']
if l1.__class__.__name__ != l2.__class__.__name__:
log.warn("Different classnames {:s} and {:s}".format(
l1.__class__.__name__, l2.__class__.__name__))
all_equal = False
for key in l1.__dict__:
if key in ignore_keys:
continue
if l1.__dict__[key] != l2.__dict__[key]:
all_equal = False
log.warn("Different attributes:\n{:s}: {:s} and {:s}".format(
key, l1.__dict__[key], l2.__dict__[key]))
return all_equal
def print_layers(model):
"""Print all layers, including all output shapes """
print(layers_to_str(model))
def layers_to_str(final_layer):
all_layers_str = ""
all_layers = lasagne.layers.get_all_layers(final_layer)
cur_shape = None
for i, layer in enumerate(all_layers):
layer_str = "{:25s}".format(layer.__class__.__name__)
# Add filter sizes and strides
filter_size = None
if hasattr(layer, 'filter_size'):
filter_size = layer.filter_size
if hasattr(layer, 'pool_size'):
filter_size = layer.pool_size
if filter_size is not None:
filter_str = "{:d}x{:d}".format(filter_size[0],
filter_size[1])
if (hasattr(layer,'stride') and layer.stride != (1,1)):
filter_str += " ::{:d} ::{:d}".format(layer.stride[0],
layer.stride[1])
if (hasattr(layer,'dilation') and layer.dilation != (1,1)):
filter_str += " ::{:d} ::{:d}".format(layer.dilation[0],
layer.dilation[1])
layer_str += "{:15s}".format(filter_str)
if hasattr(layer, 'min_val'):
# For Clip layers
layer_str += "{:f} {:f}".format(layer.min_val,
layer.max_val)
# Also for stride reshape layer
if hasattr(layer, 'n_stride'):
filter_str = " ::{:d} ::1".format(layer.n_stride)
layer_str += "{:15s}".format(filter_str)
if hasattr(layer, 'p'):
p_str = "{:.3f}".format(layer.p)
layer_str += "{:15s}".format(p_str)
layer_str = "{:2d}-{:50s}".format(i, layer_str)
# Possibly add nonlinearities
if (hasattr(layer, 'nonlinearity') and
hasattr(layer.nonlinearity, 'func_name') and
layer.nonlinearity.func_name != 'linear'):
layer_str += " {:15s}".format(layer.nonlinearity.func_name)
elif (hasattr(layer, 'nonlinearity') and
not hasattr(layer.nonlinearity, 'func_name') and
hasattr(layer.nonlinearity, 'name')):
layer_str += " {:15s}".format(layer.nonlinearity.name)
elif hasattr(layer, 'nonlinearity') and hasattr(layer.nonlinearity,
'func') and hasattr(layer.nonlinearity, 'kwargs'):
# ignoring args for now as usually not used
if hasattr(layer.nonlinearity.func, 'func_name'):
func_str = "{:s} {:s}".format(layer.nonlinearity.func.func_name,
str(layer.nonlinearity.kwargs))
elif hasattr(layer.nonlinearity.func, 'name'):
func_str = "{:s} {:s}".format(layer.nonlinearity.func.name,
str(layer.nonlinearity.kwargs))
layer_str += " {:15s}".format(func_str)
elif (hasattr(layer, 'merge_function')):
if hasattr(layer.merge_function, 'func_name'):
layer_str += " {:15s}".format(layer.merge_function.func_name)
elif hasattr(layer.merge_function, 'name'):
layer_str += " {:15s}".format(layer.merge_function.name)
elif (hasattr(layer, 'pool_size')):
layer_str += " {:15s}".format(layer.mode)
else:
layer_str += " {:15s}".format("")
# Possibly add changing output shape
if layer.output_shape != cur_shape:
layer_str += " {:s}".format(layer.output_shape)
cur_shape = layer.output_shape
all_layers_str += layer_str + "\n"
return all_layers_str
def recompute_bnorm_layer_statistics(final_layer, dataset, iterator):
all_layers = lasagne.layers.get_all_layers(final_layer)
bnorm_layers = [l for l in all_layers
if l.__class__.__name__ == 'BatchNormLayer']
for bnorm_layer in bnorm_layers:
log.info("Compiling bnorm layer...")
this_layer_pred_fn = create_pred_fn(bnorm_layer)
log.info("Predictions for bnorm layer...")
outs = [this_layer_pred_fn(b[0]) for b in iterator.get_batches(dataset,
shuffle=False)]
outs = np.concatenate(outs)
outs_before_transform = ((outs -
bnorm_layer.beta.get_value()[None,:,None,None]) /
bnorm_layer.gamma.get_value()[None,:,None,None])
outs_before_transform = ((outs_before_transform /
bnorm_layer.inv_std.get_value()[None,:,None,None]) +
bnorm_layer.mean.get_value()[None,:,None,None])
mean_this_layer = np.mean(outs_before_transform, axis=(0,2,3))
stds_this_layer = np.std(outs_before_transform, axis=(0,2,3))
bnorm_layer.mean.set_value(mean_this_layer)
bnorm_layer.inv_std.set_value(1.0 / stds_this_layer)
def set_to_new_input_layer(final_layer, new_input_layer):
all_layers = lasagne.layers.get_all_layers(final_layer)
old_ins = [l for l in all_layers
if l.__class__.__name__ == 'InputLayer']
if np.all([l == new_input_layer for l in old_ins]):
return
for l in all_layers[1:]:
if hasattr(l, 'input_layer') and l.input_layer in old_ins:
l.input_layer = new_input_layer
elif hasattr(l, 'input_layers'):
new_cur_in_layers = list(l.input_layers)
for i_l , old_cur_in in enumerate(l.input_layers):
if old_cur_in in old_ins:
new_cur_in_layers[i_l] = new_input_layer
l.input_layers = tuple(new_cur_in_layers)
|
import time
import logging
import sys
from mythril.mythril import Mythril
from web3 import Web3
from karl.exceptions import RPCError
from karl.sandbox.sandbox import Sandbox
from karl.sandbox.exceptions import SandboxBaseException
logging.basicConfig(level=logging.INFO)
class Karl:
"""
Karl main interface class.
"""
def __init__(
self,
rpc=None,
rpctls=False,
block_number=None,
output=None,
verbosity=logging.INFO,
):
"""
Initialize Karl with the received parameters
"""
if rpc is None:
raise (
RPCError("Must provide a valid --rpc connection to an Ethereum node")
)
# Ethereum node to connect to
self.rpc = rpc
self.rpc_tls = rpctls
# Send results to this output (could be stdout or restful url)
self.output = output
# ! hack to stop mythril logging
logging.getLogger().setLevel(logging.CRITICAL)
# Set logging verbosity
self.logger = logging.getLogger("Karl")
self.logger.setLevel(verbosity)
# Init web3 client
web3_rpc = None
if rpc == "ganache":
web3_rpc = "http://127.0.0.1:8545"
else:
infura_network = (
rpc.split("infura-")[1] if rpc.startswith("infura-") else None
)
if infura_network in ["mainnet", "rinkeby", "kovan", "ropsten"]:
web3_rpc = "https://{net}.infura.io".format(net=infura_network)
else:
try:
host, port = rpc.split(":")
if rpctls:
web3_rpc = "https://{host}:{port}".format(host=host, port=port)
else:
web3_rpc = "http://{host}:{port}".format(host=host, port=port)
except ValueError:
raise RPCError(
"Invalid RPC argument provided '{}', use "
"'ganache', 'infura-[mainnet, rinkeby, kovan, ropsten]' "
"or HOST:PORT".format(rpc)
)
if web3_rpc is None:
raise RPCError(
"Invalid RPC argument provided {}, use "
"'ganache', 'infura-[mainnet, rinkeby, kovan, ropsten]' "
"or HOST:PORT".format(rpc)
)
self.web3_rpc = web3_rpc
self.web3 = Web3(Web3.HTTPProvider(web3_rpc, request_kwargs={"timeout": 60}))
if self.web3 is None:
raise RPCError(
"Invalid RPC argument provided {}, use "
"'ganache', 'infura-[mainnet, rinkeby, kovan, ropsten]' "
"or HOST:PORT".format(rpc)
)
self.block_number = block_number or self.web3.eth.blockNumber
def run(self, forever=True):
self.logger.info("Starting scraping process")
# TODO: Refactor try-except statements
try:
while forever:
block = self.web3.eth.getBlock(
self.block_number, full_transactions=True
)
# If new block is not yet mined sleep and retry
if block is None:
time.sleep(1)
continue
self.logger.info(
"Processing block {block}".format(block=block.get("number"))
)
# Next block to scrape
self.block_number += 1
# For each transaction get the newly created accounts
for t in block.get("transactions", []):
# If there is no to defined or to is reported as address(0x0)
# a new contract is created
if (t["to"] is not None) and (t["to"] != "0x0"):
continue
try:
receipt = self.web3.eth.getTransactionReceipt(t["hash"])
if (receipt is None) or (
receipt.get("contractAddress", None) is None
):
self.logger.error(
"Receipt invalid for hash = {}".format(t["hash"].hex())
)
self.logger.error(receipt)
continue
address = str(receipt.get("contractAddress", None))
report = self._run_mythril(contract_address=address)
issues_num = len(report.issues)
if issues_num:
self.logger.info("Found %s issue(s)", issues_num)
self.output.send(report, contract_address=address)
self.logger.info("Firing up sandbox tester")
exploitable = self._run_sandbox(
block_number=block.get("number", None),
contract_address=address,
report=report,
rpc=self.web3_rpc,
)
if exploitable:
# TODO: Nice output
pass
else:
pass
else:
self.logger.info("No issues found")
except Exception as e:
self.logger.error("Exception: %s\n%s", e, sys.exc_info()[2])
except Exception as e:
self.logger.error("Exception: %s\n%s", e, sys.exc_info()[2])
def _run_mythril(self, contract_address=None):
myth = Mythril(onchain_storage_access=True, enable_online_lookup=True)
myth.set_api_rpc(rpc=self.rpc, rpctls=self.rpc_tls)
self.logger.info("Analyzing %s", contract_address)
myth.load_from_address(contract_address)
self.logger.debug("Running Mythril")
return myth.fire_lasers(
strategy="dfs",
modules=["ether_thief", "suicide"],
address=contract_address,
execution_timeout=45,
create_timeout=10,
max_depth=22,
transaction_count=3,
verbose_report=True,
)
def _run_sandbox(
self, block_number=None, contract_address=None, report=None, rpc=None
):
exploitable = False
try:
sandbox = Sandbox(
block_number=block_number,
contract_address=contract_address,
report=report,
rpc=rpc,
verbosity=self.logger.level,
)
except SandboxBaseException as e:
self.logger.error(e)
exploitable = sandbox.check_exploitability()
return exploitable
|
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# Natoms = 2000
Beta = [0,2,4,6,8,10,14,18,22]#26,30,35,40]
#)path = r"C:\\Users\\Daniel White\\Beta_data500000.csv" # 5 0's
L = len(Beta)
def Data(a):
'''[0] is data, [1] is Beta value'''
df = pd.read_excel('AAA_bE_Beta_data{}.xlsx'.format(a))
return df,df.columns[1]
HallD,BallD = [],[]
for i in range(L):
B = []
H = []
for index,row in Data(Beta[i])[0].iterrows():
B.append(row[0])
H.append(row[1])
BallD.append(B)
HallD.append(H)
plt.title('Density Distribution of Increasing Beta\nw/ 2000 Atoms, 150MHz AOM, RF 15MHz ',size=19)
plt.xlabel('Distance from source / m',size=17)
plt.ylabel('Particle Density',size=17)
# # # # # # # # # # ##
''' F W H M ''' # ##
FWHMs = []
for i in range(L):
HallD_ = HallD[i]
BallD_ = BallD[i]
Hpeak = [max(HallD_), int(round( np.median(np.where(HallD_==max(HallD_))) ))]
Lmin = max(np.where(HallD_[:Hpeak[1]] == min(HallD_[:Hpeak[1]] ) )[0])
Lmax = max(np.where(HallD_[Hpeak[1]:] == min(HallD_[Hpeak[1]:] ) )[0])
#print('Index Distance from center to edge R L= ', BallD[Hpeak[1]]-BallD[Lmin], BallD[Lmax]-BallD[Hpeak[1]])
#vLmin,vLmax = BinFactor* IS IT POSSIBLE TO CONVERT INTO ORGINAL VELOCITY = not needed right now
FWi = Lmax-Lmin
Bot = max(HallD_[Lmax],HallD_[Lmin])
HM = Bot + (Hpeak[0]+Bot)/2
lHM = np.abs(HallD_[:Hpeak[1]]-HM).argmin()
rHM = np.abs(HallD_[Hpeak[1]:]-HM).argmin()+Hpeak[1]
#print(lHM,rHM)
Skew = -1*(BallD_[Hpeak[1]]-BallD_[lHM]-BallD_[rHM]+BallD_[Hpeak[1]])
#print('Skew =',Skew,' +=MB')
FWHM = BallD_[rHM]-BallD_[lHM]
FWHMs.append(FWHM)
#print(FWHMs)
c = 3e8
def IrE(b):
xD = c*8.85e-12/2*b**2/10000
return xD
for i in range(1,L):
col = ( float((i/(L+1)+0.0001)), float((i/(L+1)+0.0001)**0.8), 0.5*float((i/(L+5)+0.0001)**0.7))
plt.plot(BallD[i], HallD[i],c=col,linewidth=5,label='Beta = '+str(Beta[i]/10)+', Vp-p = '+str(Beta[i]/20)+'V {}cm'.format(round(FWHMs[i]*100,4)))
plt.fill_between(BallD[i], HallD[i],color=col,alpha=0.2)
plt.legend(fontsize=25)
plt.show()
# FWHM is a bit broken, need way more bins
'''
3 D A t t e m p t
# # # # # # #
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_suBallD(1,1,1,projection='3d')
'''
#ax.plot_wireframe(BallD,HallD,Beta)
|
#################################################################################################################
#################################################################################################################
## Made by: Brandon Shaver
#################################################################################################################
#################################################################################################################
import random
import sys
#################################################################################################################
## Global Varible Declarations
#################################################################################################################
map_x = 15
map_y = 20
map_stage = 'move'
#################################################################################################################
## Classes
#################################################################################################################
class Hero():
def __init__( self ):
self.type = 'hero'
self.max_health = 25
self.health = self.max_health
self.attack = 6
self.accuracy = 75 # he hits 75% of the time
self.has_sword = False
self.health_potions = 0
self.gold = 10
self.is_running = False
self.wolves_killed = 0
self.orcs_killed = 0
self.dragons_killed = 0
self.kill_count = self.wolves_killed + self.orcs_killed + self.dragons_killed
self.sprite = 'H'
self.pos = [0,0]
class Wolf():
degrees = ['starved ','scarred ','winter ','forest ','dire ']
def __init__( self ):
self.type = 'enemy'
self.race = 'wolf'
self.degree = random.choice(Wolf.degrees)
self.degree_level = Wolf.degrees.index(self.degree) + 1
self.name = self.degree + self.race
self.health = random.randint(3,6) * self.degree_level
self.attack = random.randint(1,3) * self.degree_level
self.accuracy = random.randint(25,60) * self.degree_level
self.reward = random.randint(5,15)
self.sprite = 'W'
self.pos = [random.randint(0,map_x),random.randint(0,map_y)]
class Orc():
degrees = ['starved ','scarred ','bloodthirsty ','warlord ']
def __init__( self ):
self.type = 'enemy'
self.race = 'orc'
self.degree = random.choice(Orc.degrees)
self.degree_level = Orc.degrees.index(self.degree) + 1
self.name = self.degree + self.race
self.health = random.randint(3,18) * self.degree_level
self.attack = random.randint(1,16) * self.degree_level
self.accuracy = random.randint(25,60) * self.degree_level
self.reward = random.randint(5,45)
self.sprite = 'O'
self.pos = [random.randint(0,map_x),random.randint(0,map_y)]
class Dragon():
degrees = ['starved ','scarred ','bloodthirsty ','reverend ','elder ','ancient ','final ']
def __init__( self ):
self.type = 'enemy'
self.race = 'dragon'
self.degree = random.choice(Dragon.degrees)
self.degree_level = Dragon.degrees.index(self.degree) + 1
self.name = self.degree + self.race
self.health = random.randint(75,100) * self.degree_level
self.attack = random.randint(10,15) * self.degree_level
self.accuracy = random.randint(50,75) * self.degree_level
self.reward = random.randint( 500,1000 )
self.sprite = 'D'
self.pos = [random.randint(0,map_x),random.randint(0,map_y)]
class Store():
possible_inventory = ['health_potion','excalibur','golden_apple']
def __init__( self ):
possible_inventory = Store.possible_inventory
self.cost_for_sleep = random.randint(5,10)
self.cost_for_excalibur = 100
self.cost_for_health_potion = 15
self.cost_for_golden_apple = 60
self.cost_for_everything = self.cost_for_excalibur + self.cost_for_health_potion + self.cost_for_golden_apple
self.inventory = []
self.inventory_items = random.randint(1,len(possible_inventory))
for num in range( self.inventory_items ):
new_inventory_item = random.choice(possible_inventory)
if new_inventory_item not in self.inventory:
self.inventory.append( new_inventory_item )
self.type = 'store'
self.pos = [random.randint(0,map_x),random.randint(0,map_y)]
self.sprite = 'S'
#################################################################################################################
## Functions
#################################################################################################################
def Display_Map():
new_map = [ ['_' for n in range( map_x+1 )] for a in range( map_y+1 ) ]
for item in map_objects:
new_map[item.pos[1]][item.pos[0]] = item.sprite
for item in new_map:
line = str( item )
line = line.replace("'","")
line = line.replace(",","")
line = line.strip('[')
line = line.strip(']')
print( line )
print('')
def Get_Map_Action():
global valid_input
action = input('Enter your command: ')
if action == 'help':
return 'help'
elif action == 'move':
direction = input('What direction(U,D,L,R): ')
if direction == 'U':
if hero.pos[1] - 1 < 0:
hero.pos[1] = map_y
return 'out_of_range'
else:
hero.pos[1] -= 1
elif direction == 'D':
if hero.pos[1] + 1 > map_y:
hero.pos[1] = 0
return 'out_of_range'
else:
hero.pos[1] += 1
elif direction == 'L':
if hero.pos[0] - 1 < 0:
hero.pos[0] = map_x
return 'out_of_range'
else:
hero.pos[0] -= 1
elif direction == 'R':
if hero.pos[0] + 1 > map_x:
hero.pos[0] = 0
return 'out_of_range'
else:
hero.pos[0] += 1
else:
valid_input = False
elif action == 'w':
if hero.pos[1] - 1 < 0:
return 'at_edge'
else:
hero.pos[1] -= 1
hero.is_running = False
elif action == 'a':
if hero.pos[0] - 1 < 0:
return 'at_edge'
else:
hero.pos[0] -= 1
hero.is_running = False
elif action == 's':
if hero.pos[1] + 1 > map_y:
return 'at_edge'
else:
hero.pos[1] += 1
hero.is_running = False
elif action == 'd':
if hero.pos[0] + 1 > map_x:
return 'at_edge'
else:
hero.pos[0] += 1
hero.is_running = False
elif action == 'show health':
return 'health'
elif action == 'show attack':
return 'attack'
elif action == 'show gold':
return 'gold'
elif action == 'show stats' or action == 'show my stats':
return 'stats'
elif action == 'show wolf kills':
return 'wolf'
elif action == 'show orc kills':
return 'orc'
elif action == 'show dragon kills':
return 'dragon'
elif action == 'show kills':
return 'kills'
## Debugging Codes:
elif action == '``':
hero.gold = 1000
hero.max_health = 1000
hero.health = 1000
hero.attack = 1000
hero.accuracy = 100
hero.health_potions = 10
hero.has_sword = True
elif action == 'make_da_superstore':
da_shop = Store()
da_shop.__init__()
da_shop.inventory = da_shop.possible_inventory
da_shop.pos = hero.pos[:]
map_objects.append( da_shop )
elif action == 'quit' or action == 'exit':
sys.exit()
else:
return 'invalid'
def Check_Hero_Collision():
global map_stage
global collision
hero_pos = hero.pos
map_objects.reverse()
if hero.is_running == False and map_stage == 'move':
for item in map_objects:
if hero != item:
if hero.pos == item.pos:
if item.type == 'store':
print("You have found an inn.")
map_stage = 'shop'
elif item.type == 'enemy':
Clear_Screen()
if item.health <= 0:
print('You have found a dead '+item.name+'.')
else:
print('You have encountered a '+item.name+'!')
map_stage = 'attack'
else:
print("Something is not working correctly.")
collision = item
map_objects.reverse()
return None
def Clear_Screen():
[print('') for x in range(30)]
def Get_Encounter_Action():
encounter_move = input('What would you like to do(examine,attack,use potion,run): ')
if encounter_move == 'examine':
if collision.health > 0:
print('The '+ collision.race +' has '+ str(collision.health) +' health and '+ str(collision.attack) +' attack power.')
else:
print('You stand atop a dead '+ collision.race +'.')
elif encounter_move == 'attack':
if collision.health > 0:
Attack()
else:
print('The '+ collision.race +' is already dead.')
elif encounter_move == 'use potion':
potion_type = input('What kind of potion(health potion): ')
if potion_type == 'health potion' and hero.health_potions > 0:
print("Your hero's health has been restored. You now have "+str(hero.health)+" health.")
hero.health = hero.max_health
hero.health_potions -= 1
elif potion_type == 'health potion' and hero.health_potions == 0:
print("You do not have any of these potions.")
elif encounter_move == 'run':
global map_stage
map_stage = 'move'
hero.is_running = True
Clear_Screen()
Display_Map()
def Attack():
print("You attempt to attack the "+ collision.race +".")
hit_success = random.randint(0,100)
if hero.accuracy >= hit_success:
collision.health -= hero.attack
if collision.health > 0:
print("Your attack succeeded. The "+ collision.race +" now has "+ str(collision.health) +" health.")
elif collision.health <= 0:
hero.gold += collision.reward
print("Your attack succeeded.")
print("You have killed the "+ collision.race +". You gained "+ str(collision.reward) +" gold "+
"and now have a total of "+ str(hero.gold) +" gold.")
collision.sprite = 'x'
if collision.race == 'orc':
hero.orcs_killed += 1
orc = Orc()
orc.__init__()
map_objects.append(orc)
elif collision.race == 'wolf':
hero.wolves_killed += 1
wolf = Wolf()
wolf.__init__()
map_objects.append(wolf)
elif collision.race == 'dragon':
hero.dragons_killed += 1
dragon = Dragon()
dragon.__init__()
map_objects.append(dragon)
hero.max_health += 1
hero.attack += 1
elif hero.accuracy < hit_success:
print("Your attack missed.")
if collision.health > 0:
hit_success = random.randint(0,100)
if collision.accuracy >= hit_success:
hero.health -= collision.attack
if hero.health > 0:
print('The '+ collision.race +' has counter attacked and dealt '+ str(collision.attack) +' damage. '+
'You now have '+ str(hero.health) +' health left.')
elif hero.health <= 0:
print('The '+ collision.race +' has counter attacked.')
print('Your hero has been killed.')
sys.exit()
elif collision.accuracy < hit_success:
pass
def Shop():
action = input("What would you like to do here(buy,sell,sleep/rest,leave):")
if action == 'help':
print('Here you can buy, sell, sleep, or leave.')
elif action == 'buy':
print('This store has '+ str(collision.inventory) +' for sale.')
print("What would you like to buy?")
item = input()
if item in collision.inventory:
if item == 'excalibur':
print('This item costs '+ str(collision.cost_for_excalibur) +' gold, and will boost your attack damage by 100')
if hero.has_sword == True:
print('You already own this sword.')
elif hero.gold >= collision.cost_for_excalibur:
answer = input('Would you like to buy this(yes,no): ')
if answer == 'yes':
hero.attack += 100
hero.gold -= collision.cost_for_excalibur
elif item == 'health_potion':
print("This item costs "+ str(collision.cost_for_health_potion) +" gold, and will restore your hero's health.")
answer = input('Would you like to buy this(yes, no): ')
if answer == 'yes':
hero.health_potions += 1
hero.gold -= collision.cost_for_health_potion
elif item == 'golden_apple':
print("This item costs "+ str(collision.cost_for_golden_apple) +" gold, and will raise your hero's max health by 40.")
answer = input('Would you like to buy this(yes, no): ')
if answer == 'yes':
hero.max_health += 40
hero.health = hero.max_health
hero.gold -= collision.cost_for_golden_apple
elif item == 'nothing':
print("It would be a shame to come all the way here and not buy something.")
elif item == 'everything':
if hero.gold >= collision.cost_for_everything:
hero.has_sword = True
hero.attack += 100
hero.health_potions += 1
hero.health += 40
pass
else:
print('This store does not sell that.')
elif action == 'sell':
print("What would you like to sell?")
item = input()
elif action == 'sleep' or action == 'rest':
if hero.gold >= collision.cost_for_sleep:
print('It will cost '+ str(collision.cost_for_sleep) +' gold to sleep here. '+
'You currently have '+ str(hero.gold) +'. Are you sure you want to sleep here?')
answer = input()
if answer == 'yes':
hero.gold -= collision.cost_for_sleep
hero.health = hero.max_health
print("Your hero's health has been restored!")
else:
print("Sorry but you don't have enough gold.")
elif action == 'leave':
global map_stage
map_stage = 'move'
def Move_Map_Characters():
for character in map_objects:
if character.type == 'enemy':
if character.health > 0:
x_change = random.randint(-1,1)
y_change = random.randint(-1,1)
x_or_y = random.randint(0,1)
if x_or_y == 0:
character.pos[0] += x_change
else:
character.pos[1] += y_change
if character.pos[0] < 0 or character.pos[0] > map_x:
character.pos[0] -= x_change
if character.pos[1] < 0 or character.pos[1] > map_y:
character.pos[1] -= y_change
def Check_Map_Objects_Collision():
global current_positions
global character
current_positions = {}
for character in map_objects:
the_pos = tuple(character.pos)
if character.type != 'hero':
if the_pos not in current_positions:
current_positions[the_pos] = character
else:
other_char = current_positions[the_pos]
if character.type == 'store':
print("A store has been destroyed!")
map_objects.remove(character)
elif other_char.type == 'store':
print("A store has been destroyed!")
map_objects.remove(other_char)
elif character.type == 'enemy' and other_char.type == 'enemy':
if character.health > 0 and other_char.health > 0:
print("There has been a skirmish between a "+character.race+" and a "+other_char.race+"!")
if character.health > other_char.health:
print("The "+character.race+" won the skirmish.")
other_char.health = 0
other_char.sprite = 'x'
else:
print("The "+other_char.race+" won the skirmish.")
character.health = 0
character.sprite = 'x'
#################################################################################################################
## Initial Character Creation
#################################################################################################################
hero = Hero()
hero.__init__()
def CreateMapCharacters():
global map_objects
map_objects = [hero]
for x in range( random.randint(2,5) ):
wolf = Wolf()
wolf.__init__()
map_objects.append(wolf)
for x in range( random.randint(2,4) ):
orc = Orc()
orc.__init__()
map_objects.append(orc)
store = Store()
store.__init__()
map_objects.append(store)
dragon = Dragon()
dragon.__init__()
map_objects.append(dragon)
CreateMapCharacters()
#################################################################################################################
## Main
#################################################################################################################
def Main():
global map_stage
global valid_input
print('')
print('-----------------------------------------------------------------------')
print('You are the hero(H) and there are many things to be found on the map.')
print('You will start of in the top left corner. Your goal is to kill a dragon(D).')
print('To move, use W,A,S,D or type "move".')
print('Try typing "help" for a complete list of commands.')
print('Press "enter" to enter a command.')
print('-----------------------------------------------------------------------')
print('')
print('Press "enter" to begin.')
print('')
begin = input()
Clear_Screen()
Display_Map()
build_store_chance = 5
while True:
build_store = random.randint(1,100)
if map_stage == 'move':
action = Get_Map_Action()
if build_store <= build_store_chance:
store = Store()
store.__init__()
map_objects.append(store)
if action == 'out_of_range':
CreateMapCharacters()
Clear_Screen()
Display_Map()
Check_Hero_Collision()
if action == 'invalid':
print('That was not a valid input.')
elif action == 'health':
print('Your hero currently has '+ str(hero.health) +' health.')
elif action == 'attack':
print("Your hero currently has "+ str(hero.attack) +" attack damage.")
elif action == 'gold':
print("Your hero currently has "+ str(hero.gold) +" gold.")
elif action == 'stats':
print('Your hero has '+str(hero.health)+' health, '+str(hero.attack)+' attack, '+str(hero.potions)+'.')
elif action == 'wolf':
print('Your hero has slain '+str(hero.wolves_killed)+' wolves.')
elif action == 'orc':
print('Your hero has slain '+str(hero.orcs_killed)+' orcs.')
elif action == 'dragon':
print('Your hero has slain '+str(hero.dragons_killed)+' dragons.')
elif action == 'kills':
print('Your hero has slain '+str(hero.wolves_killed)+' wolves, '+str(hero.orcs_killed)+' orcs, '
+str(hero.dragons_killed)+' dragons, and '+str(hero.kill_count)+' beasts in total.')
elif action == 'help':
print('Possible actions are: ')
print(' move, w, a, s, d, show health, show attack, show gold,')
print(' show wolf kills, show orc kills, show dragon kills, show kills')
print('')
print('To enter these commands, type them and then press "enter".')
print('')
elif action == 'at_edge':
print("You are at the edge of this map. In order to move into the next area, "+
"you must use the 'move' command.")
Move_Map_Characters()
Check_Map_Objects_Collision()
Check_Hero_Collision()
elif map_stage == 'attack':
Get_Encounter_Action()
elif map_stage == 'shop':
Shop()
print('')
Main()
|
def reverse(a):
str_a = "".join(a)[::-1]
output = []
iter = 0
for x in a:
l = len(x)
output.append(str_a[iter:iter+l])
iter+=l
return output
'''
Task
Given an array of strings, reverse them and their order in such way that
their length stays the same as the length of the original inputs.
Example:
Input: {"I", "like", "big", "butts", "and", "I", "cannot", "lie!"}
Output: {"!", "eilt", "onn", "acIdn", "ast", "t", "ubgibe", "kilI"}
'''
|
n = int(input('Masukkan tinggi : '))
print(' ')
for i in range (n, 0, -1):
for j in range (0, n-i):
print(" ", end ="")
for j in range (0,i):
print ("* ", end="")
print('')
for i in range (0,n):
for j in range (0,n-i-1):
print (" ", end="")
for j in range (0,i+1):
print ("* ", end="")
print('')
|
import pandas as pd
import dgl
from time import time
import torch
from sklearn.decomposition import PCA
import numpy as np
from torchlight import set_seed
def load_tissue(params=None):
random_seed = params.random_seed
dense_dim = params.dense_dim
set_seed(random_seed)
# 400 0.7895
# 200 0.5117
# 100 0.3203
# 50 0.2083
"""
root = '../data/mammary_gland'
num = 2915
data_path = f'{root}/mouse_Mammary_gland{num}_data.csv'
type_path = f'{root}/mouse_Mammary_gland{num}_celltype.csv'
"""
data_path = '../data/mouse_data/mouse_brain_2915_data.csv'
type_path = '../data/mouse_data/mouse_brain_2915_celltype.csv'
# load celltype file then update labels accordingly
cell2type = pd.read_csv(type_path, index_col=0)
cell2type.columns = ['cell', 'type']
id2label = cell2type['type'].drop_duplicates(keep='first').tolist()
label2id = {label: idx for idx, label in enumerate(id2label)}
print(f'{len(id2label)} classes in total')
cell2type['id'] = cell2type['type'].map(label2id)
assert not cell2type['id'].isnull().any(), 'something wrong about celltype file.'
# load data file
data = pd.read_csv(data_path, index_col=0)
data = data.transpose(copy=True)
assert cell2type['cell'].tolist() == data.index.tolist()
print(f'{data.shape[0]} cells, {data.shape[1]} genes.')
# genes
id2gene = data.columns.tolist()
gene2id = {gene: idx for idx, gene in enumerate(id2gene)}
# construct graph and add nodes and edges
graph = dgl.DGLGraph()
start = time()
# 1. add all genes as nodes
num_genes = len(id2gene)
graph.add_nodes(num_genes)
# maintain a kind of sparse idx for Graph
row_idx, col_idx = data.to_numpy().nonzero()
row_idx = row_idx + num_genes
# 2. add cell nodes and edges
num_cells = data.shape[0]
graph.add_nodes(num_cells)
graph.add_edges(row_idx, col_idx)
graph.add_edges(col_idx, row_idx)
print(f'Added {num_cells} nodes and {len(row_idx)} edges.')
print(f'#Nodes: {graph.number_of_nodes()}, #Edges: {graph.number_of_edges()}.')
print(data.head())
# reduce sparse features to dense features
cell_pca = PCA(n_components=dense_dim, random_state=random_seed)
cell_pca.fit(data.values)
cell_feat = cell_pca.transform(data.values)
cell_feat = torch.FloatTensor(cell_feat)
gene_pca = PCA(n_components=dense_dim, random_state=random_seed)
gene_pca.fit(data.T.values)
gene_feat = gene_pca.transform(data.T.values)
gene_feat = torch.FloatTensor(gene_feat)
feat = torch.cat([gene_feat, cell_feat], dim=0)
# feat = torch.zeros(graph.number_of_nodes(), dense_dim).normal_()
cell_evr = sum(cell_pca.explained_variance_ratio_) * 100
gene_evr = sum(gene_pca.explained_variance_ratio_) * 100
print(f'[PCA] Cell EVR: {cell_evr:.2f}%. Gene EVR: {gene_evr:.2f} %.')
# generate labels for training and testing
labels = torch.LongTensor(cell2type['id'].tolist())
train_mask = torch.zeros(num_cells, dtype=torch.bool)
train_randidx = torch.randperm(num_cells)[:int(num_cells * 0.8)]
# generate mask
train_mask[train_randidx] = True
test_mask = ~train_mask
return num_cells, num_genes, graph, feat, labels, train_mask, test_mask
if __name__=='__main__':
load_tissue()
|
# Endi Pythonda arifmetik amallar ya'ni arifmetik operations larni ko'rib chiqamiz
# ' ** ' bu degani soni kvadratga oshirish yoki kubga ....
s=3**2
print(s)
a=544
b=3
c=a//b # bu degani bo'linga sonni butun qismini ol degani bu bo'lgandan keyin sonni yaxlitlash emas
print(c)
d=a%b # bu sonni qoldiqli bo'lib qoldigíni ol degani
print(d) |
#!/usr/bin/python3
from ftplib import FTP
'''
ftp=FTP("linux.linuxidc.com")
ftp.login(user='www.linuxidc.com',passwd='www.linuxidc.com')
ftp.cwd('/2017年资料/1月/2日/在Ubuntu 14.04上Sublime Text无法输入中文的解决方法/')
def grabfile():
filename ="sublime-imfix-master.zip"
localfile=open(filename,'wb')
ftp.retrbinary('RETR '+filename, localfile.write,1024)
ftp.quit()
localfile.close()
def placefile():
filename= 'filename.txt'
ftp.storbinary('STOR '+filename,open(filename,"rb"))
ftp.quit()
grabfile()
print("over!")
'''
ftp=FTP("www.python.org")
ftp.login(user='www.linuxidc.com',passwd='www.linuxidc.com')
ftp.cwd('/ftp/')
def grabfile():
filename ="README.txt"
localfile=open(filename,'wb')
ftp.retrbinary('RETR '+filename, localfile.write,1024)
ftp.quit()
localfile.close()
def placefile():
filename= 'filename.txt'
ftp.storbinary('STOR '+filename,open(filename,"rb"))
ftp.quit()
grabfile()
print("over!")
|
# Author: ambiguoustexture
# Date: 2020-03-11
import pickle
from scipy import io
from sklearn.cluster import KMeans
file_t_index_dict = './stuffs_96/t_index_dict_countries'
file_matrix = './stuffs_96/matrix_countries'
with open(file_t_index_dict, 'rb') as t_index_dict:
t_index_dict = pickle.load(t_index_dict)
matrix = io.loadmat(file_matrix)['matrix_countries']
predictions = KMeans(n_clusters=5).fit_predict(matrix)
result = zip(t_index_dict.keys(), predictions)
for country, group in sorted(result, key=lambda x: x[1]):
print('{}\t{}'.format(group, country))
|
#!/usr/bin/env python
# http://click.pocoo.org/6/commands/#group-invocation-without-command
import click
@click.group(invoke_without_command=False)
@click.pass_context
def cli(ctx):
if ctx.invoked_subcommand is None:
click.echo('I was invoked without subcommand')
else:
click.echo('I am about to invoke %s' % ctx.invoked_subcommand)
@cli.command()
def sync():
click.echo('The subcommand')
if __name__ == '__main__':
cli()
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import functools
import inspect
import re
from abc import ABCMeta
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Iterable, Sequence, TypeVar, cast
from pants.engine.env_vars import EnvironmentVars, EnvironmentVarsRequest
from pants.engine.internals.selectors import AwaitableConstraints, Get
from pants.engine.unions import UnionMembership, UnionRule, distinct_union_type_per_subclass
from pants.option.errors import OptionsError
from pants.option.option_types import OptionsInfo, collect_options_info
from pants.option.option_value_container import OptionValueContainer
from pants.option.options import Options
from pants.option.scope import Scope, ScopedOptions, ScopeInfo, normalize_scope
from pants.util.strutil import softwrap
if TYPE_CHECKING:
# Needed to avoid an import cycle.
from pants.core.util_rules.environments import EnvironmentTarget
from pants.engine.rules import Rule
_SubsystemT = TypeVar("_SubsystemT", bound="Subsystem")
class _SubsystemMeta(ABCMeta):
"""Metaclass to link inner `EnvironmentAware` class with the enclosing subsystem."""
def __init__(self, name, bases, namespace, **k):
super().__init__(name, bases, namespace, **k)
if (
not (name == "Subsystem" and bases == ())
and self.EnvironmentAware is not Subsystem.EnvironmentAware
):
# Only `EnvironmentAware` subclasses should be linked to their enclosing scope
if Subsystem.EnvironmentAware not in self.EnvironmentAware.__bases__:
# Allow for `self.EnvironmentAware` to not need to explicitly derive from
# `Subsystem.EnvironmentAware` (saving needless repetitive typing)
self.EnvironmentAware = type(
"EnvironmentAware",
(
self.EnvironmentAware,
Subsystem.EnvironmentAware,
*self.EnvironmentAware.__bases__,
),
{},
)
self.EnvironmentAware.subsystem = self
class Subsystem(metaclass=_SubsystemMeta):
"""A separable piece of functionality that may be reused across multiple tasks or other code.
Subsystems encapsulate the configuration and initialization of things like JVMs,
Python interpreters, SCMs and so on.
Set the `help` class property with a description, which will be used in `./pants help`. For the
best rendering, use soft wrapping (e.g. implicit string concatenation) within paragraphs, but
hard wrapping (`\n`) to separate distinct paragraphs and/or lists.
"""
options_scope: str
help: ClassVar[str | Callable[[], str]]
# Subclasses may override these to specify a deprecated former name for this Subsystem's scope.
# Option values can be read from the deprecated scope, but a deprecation warning will be issued.
# The deprecation warning becomes an error at the given Pants version (which must therefore be
# a valid semver).
deprecated_options_scope: str | None = None
deprecated_options_scope_removal_version: str | None = None
_scope_name_re = re.compile(r"^(?:[a-z0-9_])+(?:-(?:[a-z0-9_])+)*$")
_rules: ClassVar[Sequence[Rule] | None] = None
class EnvironmentAware(metaclass=ABCMeta):
"""A separate container for options that may be redefined by the runtime environment.
To define environment-aware options, create an inner class in the `Subsystem` called
`EnvironmentAware`. Option fields share their scope with their enclosing `Subsystem`,
and the values of fields will default to the values set through Pants' configuration.
To consume environment-aware options, inject the `EnvironmentAware` inner class into
your rule.
Optionally, it is possible to specify environment variables that are required when
post-processing raw values provided by users (e.g. `<PATH>` special strings) by specifying
`env_vars_used_by_options`, and consuming `_options_env` in your post-processing property.
These environment variables will be requested at construction time.
"""
subsystem: ClassVar[type[Subsystem]]
env_vars_used_by_options: ClassVar[tuple[str, ...]] = ()
options: OptionValueContainer
env_tgt: EnvironmentTarget
_options_env: EnvironmentVars = EnvironmentVars()
def __getattribute__(self, __name: str) -> Any:
from pants.core.util_rules.environments import resolve_environment_sensitive_option
# Will raise an `AttributeError` if the attribute is not defined.
# MyPy should stop that from ever happening.
default = super().__getattribute__(__name)
# Check to see whether there's a definition of this attribute at the class level.
# If it returns `default` then the attribute on the instance is the same object
# as defined at the class, or the attribute does not exist on the class,
# and we don't really need to go any further.
v = getattr(type(self), __name, default)
if v is default:
return default
# Resolving an attribute on the class object will return the underlying descriptor.
# If the descriptor is an `OptionsInfo`, we can resolve it against the environment
# target.
if isinstance(v, OptionsInfo):
# If the the value is not defined in the `EnvironmentTarget`, return the value
# from the options system.
override = resolve_environment_sensitive_option(v.flag_names[0], self)
return override if override is not None else default
# We should just return the default at this point.
return default
def _is_default(self, __name: str) -> bool:
"""Returns true if the value of the named option is unchanged from the default."""
from pants.core.util_rules.environments import resolve_environment_sensitive_option
v = getattr(type(self), __name)
assert isinstance(v, OptionsInfo)
return (
# vars beginning with `_` are exposed as option names with the leading `_` stripped
self.options.is_default(__name.lstrip("_"))
and resolve_environment_sensitive_option(v.flag_names[0], self) is None
)
@classmethod
def rules(cls: Any) -> Iterable[Rule]:
# NB: This avoids using `memoized_classmethod` until its interaction with `mypy` can be improved.
if cls._rules is None:
from pants.core.util_rules.environments import add_option_fields_for
from pants.engine.rules import Rule
# nb. `rules` needs to be memoized so that repeated calls to add these rules
# return exactly the same rule objects. As such, returning this generator
# directly won't work, because the iterator needs to be replayable.
def inner() -> Iterable[Rule]:
yield cls._construct_subsystem_rule()
if cls.EnvironmentAware is not Subsystem.EnvironmentAware:
yield cls._construct_env_aware_rule()
yield from (cast(Rule, i) for i in add_option_fields_for(cls.EnvironmentAware))
cls._rules = tuple(inner())
return cast("Sequence[Rule]", cls._rules)
@distinct_union_type_per_subclass
class PluginOption:
pass
@classmethod
def register_plugin_options(cls, options_container: type) -> UnionRule:
"""Register additional options on the subsystem.
In the `rules()` register.py entry-point, include `OtherSubsystem.register_plugin_options(<OptionsContainer>)`.
`<OptionsContainer>` should be a type with option class attributes, similar to how they are
defined for subsystems.
This will register the option as a first-class citizen.
Plugins can use this new option like any other.
"""
return UnionRule(cls.PluginOption, options_container)
@classmethod
def _construct_subsystem_rule(cls) -> Rule:
"""Returns a `TaskRule` that will construct the target Subsystem."""
# Global-level imports are conditional, we need to re-import here for runtime use
from pants.engine.rules import TaskRule
partial_construct_subsystem: Any = functools.partial(_construct_subsytem, cls)
# NB: We must populate several dunder methods on the partial function because partial
# functions do not have these defined by default and the engine uses these values to
# visualize functions in error messages and the rule graph.
snake_scope = normalize_scope(cls.options_scope)
name = f"construct_scope_{snake_scope}"
partial_construct_subsystem.__name__ = name
partial_construct_subsystem.__module__ = cls.__module__
partial_construct_subsystem.__doc__ = cls.help
_, class_definition_lineno = inspect.getsourcelines(cls)
partial_construct_subsystem.__line_number__ = class_definition_lineno
return TaskRule(
output_type=cls,
input_selectors=(),
input_gets=(
AwaitableConstraints(
output_type=ScopedOptions, input_types=(Scope,), is_effect=False
),
),
masked_types=(),
func=partial_construct_subsystem,
canonical_name=name,
)
@classmethod
def _construct_env_aware_rule(cls) -> Rule:
"""Returns a `TaskRule` that will construct the target Subsystem.EnvironmentAware."""
# Global-level imports are conditional, we need to re-import here for runtime use
from pants.core.util_rules.environments import EnvironmentTarget
from pants.engine.rules import TaskRule
snake_scope = normalize_scope(cls.options_scope)
name = f"construct_env_aware_scope_{snake_scope}"
# placate the rule graph visualizer.
@functools.wraps(_construct_env_aware)
async def inner(*a, **k):
return await _construct_env_aware(*a, **k)
inner.__line_number__ = 0 # type: ignore[attr-defined]
return TaskRule(
output_type=cls.EnvironmentAware,
input_selectors=(cls, EnvironmentTarget),
input_gets=(
AwaitableConstraints(
output_type=EnvironmentVars,
input_types=(EnvironmentVarsRequest,),
is_effect=False,
),
),
masked_types=(),
func=inner,
canonical_name=name,
)
@classmethod
def is_valid_scope_name(cls, s: str) -> bool:
return s == "" or cls._scope_name_re.match(s) is not None
@classmethod
def validate_scope(cls) -> None:
options_scope = getattr(cls, "options_scope", None)
if options_scope is None:
raise OptionsError(f"{cls.__name__} must set options_scope.")
if not cls.is_valid_scope_name(options_scope):
raise OptionsError(
softwrap(
"""
Options scope "{options_scope}" is not valid.
Replace in code with a new scope name consisting of only lower-case letters,
digits, underscores, and non-consecutive dashes.
"""
)
)
@classmethod
def create_scope_info(cls, **scope_info_kwargs) -> ScopeInfo:
"""One place to create scope info, to allow subclasses to inject custom scope args."""
return ScopeInfo(**scope_info_kwargs)
@classmethod
def get_scope_info(cls) -> ScopeInfo:
"""Returns a ScopeInfo instance representing this Subsystem's options scope."""
cls.validate_scope()
return cls.create_scope_info(scope=cls.options_scope, subsystem_cls=cls)
@classmethod
def register_options_on_scope(cls, options: Options, union_membership: UnionMembership):
"""Trigger registration of this Subsystem's options.
Subclasses should not generally need to override this method.
"""
register = options.registration_function_for_subsystem(cls)
plugin_option_containers = union_membership.get(cls.PluginOption)
for options_info in collect_options_info(cls):
register(*options_info.flag_names, **options_info.flag_options)
for options_info in collect_options_info(cls.EnvironmentAware):
register(*options_info.flag_names, environment_aware=True, **options_info.flag_options)
for options_info in (
option
for container in plugin_option_containers
for option in collect_options_info(container)
):
register(*options_info.flag_names, **options_info.flag_options)
# NB: If the class defined `register_options` we should call it
if "register_options" in cls.__dict__:
cls.register_options(register) # type: ignore[attr-defined]
def __init__(self, options: OptionValueContainer) -> None:
self.validate_scope()
self.options = options
def __eq__(self, other: Any) -> bool:
if type(self) != type(other):
return False
return bool(self.options == other.options)
async def _construct_subsytem(subsystem_typ: type[_SubsystemT]) -> _SubsystemT:
scoped_options = await Get(ScopedOptions, Scope(str(subsystem_typ.options_scope)))
return subsystem_typ(scoped_options.options)
async def _construct_env_aware(
subsystem_instance: _SubsystemT,
env_tgt: EnvironmentTarget,
) -> Subsystem.EnvironmentAware:
t: Subsystem.EnvironmentAware = type(subsystem_instance).EnvironmentAware()
# `_SubSystemMeta` metaclass should ensure that `EnvironmentAware` actually subclasses
# `EnvironmentAware`, but if an implementer does something egregious, it's best we
# catch it.
assert isinstance(t, Subsystem.EnvironmentAware)
t.options = subsystem_instance.options
t.env_tgt = env_tgt
if t.env_vars_used_by_options:
t._options_env = await Get(
EnvironmentVars, EnvironmentVarsRequest(t.env_vars_used_by_options)
)
return t
|
import random
# 1
for i in range(1,51):
if(i%5 == 0):
print(i)
else :
print(i, end='\t')
# 2
st = "Python basic program language"
st2 = ""
for i in range(0,len(st)):
if st[i] == " ":
continue
else:
st2 += st[i]
print(st2)
# 3
st = ""
for i in range(0,10):
st += chr(random.randint(ord("A"),ord("Z")+1))
print(st)
# 4
st = ""
a = random.randint(0,ord("Z")+1)
random.randint(ord("A"),ord("Z")+1)
random.randint(ord("a"),ord("z")+1)
random.randint( 0, 10)
|
# This is a hacky way of re-plotting graphs...
from plot import Plot
from bandit_algorithms import IncrementalUniformAlgorithm
from bandit_algorithms import UCBAlgorithm
from bandit_algorithms import EpsilonGreedyAlgorithm
from bandit import SBRDBandit
# load old plot
arm_params = [(1,1)] # dummy params
b = SBRDBandit(arm_params, 'custom_bandit')
num_pulls = 10001
num_trials = 1000
plot_sample_rate = 1
algorithms = [IncrementalUniformAlgorithm(b), UCBAlgorithm(b), EpsilonGreedyAlgorithm(b)]
plot = Plot(num_pulls, num_trials, [a.get_name() for a in algorithms], plot_sample_rate)
print "loading data..."
plot.load('custom_bandit_data.npz')
# new plot
print "creating plots..."
sample_rate = 1
end_index = 501
plot.plot_cumulative_regret('new_'+b.get_name(), sample_rate, end_index)
plot.plot_simple_regret('new_'+b.get_name(), sample_rate, end_index)
|
from __future__ import print_function
import DecisionTree
import csv
import time
main_folder = "/Users/mengqizhou/Desktop/datamining"
folder1 = "/Users/mengqizhou/Desktop/datamining/datasplit_by_3_fold"
folder2 = "/Users/mengqizhou/Desktop/datamining/datasplit_by_5fold"
address = []
address.append(folder1)
address.append(folder2)
num_class = 119
num_feature = 456
#num_article = 8391
for folder in address:
feature_num_class = []
result = open(folder+"/results.txt","w")
#training_data = []
with open(main_folder+"/feature_num_of_classes.csv", 'rU') as f:
reader=csv.reader(f)
for row in reader:
feature_num_class.append(row)
f.close()
#with open(folder+"decision_tree_training_dataset.csv",'rb') as f:
# reader = csv.reader(f)
#for row in reader:
#training_data.append(row)
#f.close()
start_time = time.time()
root_nodes = []
decision_trees =[]
file=folder+"/decision_tree_training_dataset.csv"
for i in range(0,num_class):
feature_num = []
for number in feature_num_class[i]:
if number == '':
break
feature_num.append(int(number)+1)
dt = DecisionTree.DecisionTree( training_datafile = file,
csv_class_column_index = (1+num_feature+i),
csv_columns_for_features = feature_num,
entropy_threshold = 1,
max_depth_desired = 100,
symbolic_to_numeric_cardinality_threshold = 10,
)
dt.get_training_data()
dt.calculate_first_order_probabilities()
dt.calculate_class_priors()
root_node = dt.construct_decision_tree_classifier()
decision_trees.append(dt)
root_nodes.append(root_node)
modeling_time = time.time()-start_time
print("modeling_time = ",modeling_time," s", file = result)
print("number of decision tree created = ", len(decision_trees), file = result)
#print modeling_time
test_data = []
features = []
with open(main_folder +"/features_after_trim.csv", 'rb') as f:
reader = csv.reader(f)
for row in reader:
features.append(row)
f.close()
with open(folder + "/test_feature_vectors.csv", 'rb') as f:
reader = csv.reader(f)
for row in reader:
test_data.append(row)
f.close()
start_time = time.time()
i = 0
with open(folder+"/classified_binary_labels.csv",'wb') as f:
writer = csv.writer(f)
for article_feature in test_data:
binary_labels = []
for j in range(0,num_class):#feature_numbers in feature_num_class:
data = []
for number in feature_num_class[j]:
if number=='':
break
data.append(features[0][int(number)]+" = "+article_feature[int(number)])
prob_hash = decision_trees[j].classify(root_nodes[j],data)
for item in prob_hash:
if (item is not 'solution_path') and (prob_hash[item] >=0.5):
binary_label = item[-1:]
break
#print ("test = : ", (binary_label),file = result )
binary_labels.append(binary_label)
#print ("article number = ", i, " labels: ", binary_label, file = result)
writer.writerow(binary_labels)
i+=1
f.close()
classifying_time = time.time()-start_time
print("classifying_time = ",classifying_time," s", file = result)
true_binary_labels = []
classified_binary_labels = []
with open(folder+"/training_binary_class_labels.csv", 'rb') as f:
reader = csv.reader(f)
for row in reader:
true_binary_labels.append(row)
f.close()
with open(folder+"/classified_binary_labels.csv", 'rb') as f:
reader = csv.reader(f)
for row in reader:
classified_binary_labels.append(row)
f.close()
total = 0
correct = 0
num_article = len(classified_binary_labels)
for i in range(0,num_article):
if classified_binary_labels[i][0]=='':
break
for j in range(0,num_class):
total += 1
if (classified_binary_labels[i][j]==true_binary_labels[i][j]):
correct +=1
accuracy = float (correct)/total
print("accuracy = ",accuracy, file = result)
|
from math import *
import hdr_g
import cv2
images = []
for i in range(6):
image_r = "./data/0"+str(i)+".png"
#image_r = "./example/sample2-0"+str(i+1)+".jpg"
image = cv2.imread(image_r,0)
images.append(image)
log_exposure_times = [int(i) for i in [log(1/1000),log(1/500),log(1/250),log(1/125),log(1/64),log(1/32)]]
image_g = hdr_g.computeHDR(images, log_exposure_times, smoothing_lambda=100., gamma=0.8)
image_w = "./data/result.png"
cv2.imwrite(image_w,image_g) |
import os
import sys
# this is for python 27, in py 3 this changed to tkinter i believe
import Tkinter as tkm
import ttk as ttkm
import tkFileDialog
import os
import threading
import time
from librf import arkivemanager
#-----------------------------------------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------------- global synced state
# rf_mutex = threading.BoundedSemaphore(value=1)
rf_mutex = threading.RLock()
rf_mutex_next_job = None
# rf_mutex_progress_pct can be a number between 0 and 100 to indicate percentage of current job done
# None to indicate no job is running right now
rf_mutex_progress_pct = None
#-----------------------------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------------------------
# width is in chars
_ENTRY_BOX_DEFAULT_WIDTH = 60
_REPLICA_COUNT_POSSIBLE_VALUES = tuple(range(2,100)) + tuple(range(100, 1040, 10)) + tuple(range(1050, 15100, 100))
# width is in chars
_REPLICA_COUNT_BOX_DEFAULT_WIDTH = 10
_PHYSICAL_LAYOUTS = ("Distributed", "Sequential", "Random")
_BLOCK_SIZES = (256, 512, 1024, 2048, 4096, 8192, 16384, 32768)
_PROGRESS_BAR_UPDATE_PERIOD = 500
def _get_current_version():
version = None
try:
import librf._version as vm
version = vm.__version__
except:
pass
if not version:
version = 'unknown'
return version
def _progress_report_callback(pct_complete=None):
""" Given a number between 0 and 100, indicating percentage of progress done so far,
update the UIs progress state, if pct_complete was not supplied or was None do nothing. """
#print "_progress_report_callback() called with: " + str(pct_complete)
global rf_mutex
global rf_mutex_progress_pct
if None == pct_complete:
return
if not (isinstance(pct_complete, int) or isinstance(pct_complete, float) ):
return
if (pct_complete < 0) or (pct_complete > 100):
return
rf_mutex.acquire()
rf_mutex_progress_pct = pct_complete
rf_mutex.release()
def _worker_thread_entry():
print "worker thread entry point called. "
global rf_mutex
global rf_mutex_next_job
global rf_mutex_progress_pct
while True:
# sleep for some time, argument is in seconds. (0.5 == half a second)
time.sleep(0.5)
tmp_job = None
rf_mutex.acquire()
tmp_job = rf_mutex_next_job
rf_mutex_next_job = None
rf_mutex.release()
# if tmp_job existed, process this job wait for it to finish (ok to block the worker thread) and then continue
# if tmp job did not exist continue to the top of the loop right away
if (tmp_job) and (isinstance(tmp_job, RFJob) ):
print "worker thread found work"
# UNPACK the job and call _make_arkive or _xtract_arkive
if Action.CREATE == tmp_job.action:
_make_arkive(src_filename=tmp_job.src, out_filename=tmp_job.dest, replica_count=tmp_job.replica_count)
elif Action.XTRACT == tmp_job.action:
_xtract_arkive(src_filename=tmp_job.src, out_directory=tmp_job.dest)
# wait for librf to finish (that is make or xtract returns)
rf_mutex.acquire()
rf_mutex_progress_pct = 100
rf_mutex.release()
def _make_arkive(src_filename, out_filename, replica_count):
""" Create a new redundant arkive. """
print "------------------------------------------------------------------------------------------------------------"
print "creating arkive, plz standby. "
print "input file: " + str(src_filename)
print "output file: " + str(out_filename)
print "replica count: " + str(replica_count)
#
arkiver = arkivemanager.RFArkiver(replica_count=replica_count, progress_callback=_progress_report_callback)
arkiver.redundantize_and_save(src_filename=src_filename, out_filename=out_filename)
print "Done. librf arkiver returned."
def _xtract_arkive(src_filename, out_directory):
print "------------------------------------------------------------------------------------------------------------"
print "xtracting arkive, plz standby. "
print "input file: " + str(src_filename)
print "output folder: " + str(out_directory)
xtractor = arkivemanager.RFUnarkiver( progress_callback=_progress_report_callback)
xtractor.recover_and_save(src_filename=src_filename, out_directory=out_directory)
print "Done. librf xtractor returned."
class RFJob(object):
"""" a struct (as close to it as possible in python) to keep track of a job. """
def __init__(self, action=None, src=None, dest=None, replica_count=None,
physical_layout=None, block_size=None):
super(RFJob, self).__init__()
# TODO more error checking here, also figure out how to take out asserts in the pyinstaller version
assert (action == Action.CREATE) or (action == Action.XTRACT)
self.action = action
self.src = src
self.dest = dest
self.replica_count = replica_count
self.physical_layout = physical_layout
self.block_size = block_size
class Action(object):
"""" Enumerate different modes of operation of the GUI. """
CREATE = 1
XTRACT = 2
class RedFileGui(object):
def __init__(self):
super(RedFileGui, self).__init__()
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------- create the worker thread
print "main thread will now create the worker thread"
self.rf_worker_thread = threading.Thread(target=_worker_thread_entry)
self.rf_worker_thread.daemon = True
self.rf_worker_thread.start()
print "main thread has created worker thread successfully and started it."
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------
self.version = _get_current_version()
self.root = tkm.Tk()
self.root.title('redfile ' + self.version)
self.root.grid_rowconfigure(0, weight=1)
self.root.grid_columnconfigure(0, weight=1)
self.root.grid_rowconfigure(1, weight=1)
self.root.grid_columnconfigure(1, weight=1)
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------- choose xtract/create
action_group = ttkm.LabelFrame(self.root, text="Action")
# anchor is set to CENTER by default.
#action_group.pack(padx=10, pady=10, anchor=ttkm.CENTER)
action_group.grid(row=0, column=0, padx=10, pady=10)
# w = ttkm.Entry(action_group)
# w.grid()
self.action_control_var = tkm.IntVar()
self.last_action = None
option1_text = "Recover original data from redundant file."
option2_text = "Make a redundant file"
xtract_radio_btn = ttkm.Radiobutton(action_group, text=option1_text, variable=self.action_control_var,
value=Action.XTRACT, command=self.action_changed_callback)
xtract_radio_btn.pack(anchor=tkm.W)
create_radio_btn = ttkm.Radiobutton(action_group, text=option2_text, variable=self.action_control_var,
value=Action.CREATE, command=self.action_changed_callback)
create_radio_btn.pack(anchor=tkm.W)
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------ file path box
file_names_group = ttkm.Frame(self.root)
file_names_group.grid(row=0, column=1, padx=10, pady=10)
self.source_file_control_var = tkm.StringVar()
self.output_file_control_var = tkm.StringVar()
self.autoname_checkbox_control_var = tkm.IntVar()
src_label = ttkm.Label(file_names_group, text='Source file:')
src_btn = ttkm.Button(file_names_group, text='Browse', command=self.browse_source_file_btn_clicked)
src_entry = ttkm.Entry(file_names_group, textvariable=self.source_file_control_var, width=_ENTRY_BOX_DEFAULT_WIDTH)
self.output_label_control_var = tkm.StringVar()
self.output_label_control_var.set('Output: ')
output_label = ttkm.Label(file_names_group, textvariable=self.output_label_control_var)
self.browse_output_filename_btn = ttkm.Button(file_names_group, text='Browse', command=self.browse_output_file_btn_clicked)
autoname_checkbox = ttkm.Checkbutton(file_names_group, var=self.autoname_checkbox_control_var,
text='Auto name output', command=self.autoname_checkbox_clicked)
self.output_filename_entry = ttkm.Entry(file_names_group, textvariable=self.output_file_control_var, width=_ENTRY_BOX_DEFAULT_WIDTH)
src_label.grid(row=0, column=0, padx=5, pady=5, sticky=tkm.W)
src_btn.grid(row=0, column=1, padx=5, pady=5, sticky=tkm.W)
src_entry.grid(row=1, column=0, padx=5, pady=1, columnspan=3)
output_label.grid(row=2, column=0, padx=5, pady=(20, 5), sticky=tkm.W)
self.browse_output_filename_btn.grid(row=2, column=1, padx=5, pady=(20, 5), sticky=tkm.W)
autoname_checkbox.grid(row=2, column=2, padx=5, pady=(20, 5), sticky=tkm.W)
self.output_filename_entry.grid(row=3, column=0, padx=5, pady=1, columnspan=3)
file_names_group.grid_columnconfigure(0, weight=1)
file_names_group.grid_columnconfigure(1, weight=1)
file_names_group.grid_columnconfigure(2, weight=5)
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------- new redfile options box
hint_color = '#999'
new_arkive_options_group = ttkm.Frame(self.root)
new_arkive_options_group.grid(row=1, column=0, padx=10, pady=10)
# --------- replica count widgets
# sticky='w' or sticky=tkm.W to make it west aligned with its master
replica_count_label = ttkm.Label(new_arkive_options_group, text='Replica count')
self.replica_count_spinbox = tkm.Spinbox(new_arkive_options_group, values= _REPLICA_COUNT_POSSIBLE_VALUES,
width=_REPLICA_COUNT_BOX_DEFAULT_WIDTH)
self.replica_count_spinbox.delete(0, tkm.END)
self.replica_count_spinbox.insert(0, 4)
self.replica_count_spinbox.configure(state='readonly')
# u can pass css/html like color to foreground. i.e. #fff is white #ffffff is also white.
replica_count_desc_label = ttkm.Label(new_arkive_options_group, text='Min: 2, recommended: 4 or more',
foreground=hint_color)
# --------- physical layout widgets
phy_layout_desc = ttkm.Label(new_arkive_options_group, text='Default: distributed', foreground=hint_color)
phy_layout_desc2 = ttkm.Label(new_arkive_options_group, text='Random is not recommended', foreground=hint_color)
phy_layout = ttkm.Label(new_arkive_options_group, text='Physical layout')
self.phy_layout_combo = ttkm.Combobox(new_arkive_options_group, values=_PHYSICAL_LAYOUTS)
self.phy_layout_combo.delete(0, tkm.END)
self.phy_layout_combo.insert(0, _PHYSICAL_LAYOUTS[0])
self.phy_layout_combo.configure(state='readonly')
# --------- block size widgets
block_size_desc = ttkm.Label(new_arkive_options_group, text='Default: 512 bytes', foreground=hint_color)
block_size_label = ttkm.Label(new_arkive_options_group, text='Block Size')
self.block_size_combo = ttkm.Combobox(new_arkive_options_group, values= _BLOCK_SIZES)
self.block_size_combo.delete(0, tkm.END)
self.block_size_combo.insert(0, _BLOCK_SIZES[1])
# --------- position them all
replica_count_label.grid(row=0, column=0, padx=5, pady=2, sticky=tkm.W)
self.replica_count_spinbox.grid(row=0, column=1, padx=5, pady=2, sticky=tkm.W)
replica_count_desc_label.grid(row=1, column=0, padx=5, pady=2, columnspan=2, sticky=tkm.W)
phy_layout.grid(row=2, column=0, padx=5, pady=(20, 5), sticky=tkm.W)
self.phy_layout_combo.grid(row=2, column=1, padx=5, pady=(20, 5))
phy_layout_desc.grid(row=3, column=0, padx=5, pady=(2, 1), columnspan=2, sticky=tkm.W)
phy_layout_desc2.grid(row=4, column=0, padx=5, pady=(1, 5), columnspan=2, sticky=tkm.W)
block_size_label.grid(row=5, column=0, padx=5, pady=5, sticky=tkm.W)
self.block_size_combo.grid(row=5, column=1, padx=5, pady=5, sticky=tkm.W)
block_size_desc.grid(row=6, column=0, padx=5, pady=5, columnspan=2, sticky=tkm.W)
self.replica_count_spinbox.configure(state=tkm.DISABLED)
self.phy_layout_combo.configure(state=tkm.DISABLED)
self.block_size_combo.configure(state=tkm.DISABLED)
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------ go box
go_group = ttkm.Frame(self.root)
go_group.grid(row=1, column=1, padx=10, pady=10)
#ttkm.Label(go_group, text='progress bar coming soon :D')
self.go_btn = ttkm.Button(go_group, text='Go', command=self.go_btn_clicked)
self.progress_control_var = tkm.DoubleVar()
self.progress_bar = ttkm.Progressbar(go_group, orient=tkm.HORIZONTAL, variable=self.progress_control_var,
mode='determinate', maximum=100, length=250)
self.progress_control_var.set(0)
self.go_btn.grid(row=0, column=0, padx=5, pady=(5,10) )
self.progress_bar.grid(row=1, column=0, padx=5, pady=(10,5) )
# TODO remove this. should be done when some1 clicks go button.
# or just schedule it to run once every while.
self.progress_bar.after(ms=_PROGRESS_BAR_UPDATE_PERIOD, func=self.update_progress_bar)
def update_progress_bar(self):
""" Update the progress bar. Called by progress bar itself on the main loop to update itself. """
global rf_mutex
global rf_mutex_progress_pct
rf_mutex.acquire()
temp_progress = rf_mutex_progress_pct
rf_mutex.release()
if None == temp_progress:
self.progress_control_var.set(0)
self.go_btn.configure(state=tkm.NORMAL)
else:
self.progress_control_var.set( temp_progress )
if 100 == temp_progress:
self.go_btn.configure(state=tkm.NORMAL)
# re-schedule another update
self.progress_bar.after(ms=_PROGRESS_BAR_UPDATE_PERIOD, func=self.update_progress_bar)
def run_main_loop(self):
self.root.mainloop()
def action_changed_callback(self):
print "action radio button callback called"
selected_action = self.action_control_var.get()
if selected_action == Action.XTRACT:
self.replica_count_spinbox.configure(state=tkm.DISABLED)
self.phy_layout_combo.configure(state=tkm.DISABLED)
self.block_size_combo.configure(state=tkm.DISABLED)
self.output_label_control_var.set('Output Folder:')
elif selected_action == Action.CREATE:
self.replica_count_spinbox.configure(state='readonly')
self.phy_layout_combo.configure(state='readonly')
self.block_size_combo.configure(state='readonly')
self.output_label_control_var.set('Output File: ')
if (selected_action == Action.XTRACT) and (selected_action != self.last_action):
print "user switched to xtract"
if (selected_action == Action.CREATE) and (selected_action != self.last_action):
print "user switched to create"
# remember last action.
self.last_action = selected_action
if self.autoname_checkbox_control_var.get():
self._find_unused_output_name_if_possible()
def go_btn_clicked(self):
self.go_btn.configure(state=tkm.DISABLED)
global rf_mutex
global rf_mutex_next_job
global rf_mutex_progress_pct
src = self.source_file_control_var.get()
if None == self.last_action :
# TODO maybe show the user why this go click is being ignored (i.e. ask them to choose action)
print "last action does not exist returning."
return
if not os.path.isfile(src):
print "src file does not exist returning."
return
# now figure out the output filename/dirname
dest = self.output_file_control_var.get()
if self.last_action == Action.CREATE:
rc = None
try:
rc = int(self.replica_count_spinbox.get())
except:
pass
block_size = None
try:
block_size = int(self.block_size_combo.get())
except:
pass
phy_layout = self.phy_layout_combo.get()
print "adding job to make a new arkive plz standby"
print "replica count is: " + str(rc)
print "src is: " + str(src)
print "dest is: " + str(dest)
print "physical layout is: " + str(self.phy_layout_combo.get())
print "block size is: " + str(self.block_size_combo.get())
# _make_arkive(src_filename=src_filename, out_filename=output_filename, replica_count=rc)
rf_mutex.acquire()
rf_mutex_next_job = RFJob(action=Action.CREATE, src=src, dest=dest, replica_count=rc,
block_size=block_size, physical_layout=phy_layout)
rf_mutex_progress_pct = 0
rf_mutex.release()
elif self.last_action == Action.XTRACT:
print "adding job to recover original data from arkive plz standby"
print "src filename is: " + str(src)
print "output filename is: " + str(dest)
#_xtract_arkive(src_filename=src_filename, out_filename=output_filename)
rf_mutex.acquire()
rf_mutex_next_job = RFJob(action=Action.XTRACT, src=src, dest=dest)
rf_mutex_progress_pct = 0
rf_mutex.release()
def browse_source_file_btn_clicked(self):
user_chosen_filename = tkFileDialog.askopenfilename()
print "user chosen source filename: " + str(user_chosen_filename)
self.source_file_control_var.set(user_chosen_filename)
if self.autoname_checkbox_control_var.get():
self._find_unused_output_name_if_possible()
def browse_output_file_btn_clicked(self):
user_chosen_path = None
if self.last_action == Action.CREATE:
user_chosen_path = tkFileDialog.asksaveasfilename()
print "user chosen output filename: " + str(user_chosen_path)
else:
user_chosen_path = tkFileDialog.askdirectory()
print "user chosen output directory: " + str(user_chosen_path)
self.output_file_control_var.set(user_chosen_path)
def autoname_checkbox_clicked(self):
""" """
print "autoname toggled. "
print "autoname contorl var: " + str(self.autoname_checkbox_control_var.get())
# TODO scan the dir for existing files and add -1 -2 -21 -2221 if need be.
if self.autoname_checkbox_control_var.get():
# autoname on, disable the filename chooser
self.output_filename_entry.configure(state=tkm.DISABLED)
self.browse_output_filename_btn.configure(state=tkm.DISABLED)
self._find_unused_output_name_if_possible()
else:
# autoname off, enable the filename chooser
self.output_filename_entry.configure(state=tkm.NORMAL)
self.browse_output_filename_btn.configure(state=tkm.NORMAL)
def _find_unused_output_name_if_possible(self):
""" Find an unused filename for output if possible and set the corresponding control variable. """
if self.last_action == Action.XTRACT:
src_filename = self.source_file_control_var.get()
if not os.path.isfile(src_filename):
print "src path is not a file. cant generate output name for it."
return
src_directory = os.path.dirname(src_filename)
self.output_file_control_var.set(src_directory)
return
elif self.last_action == Action.CREATE:
postfix = '.rff'
src_filename = self.source_file_control_var.get()
if not os.path.isfile(src_filename):
print "src path is not a file. cant generate output name for it."
return
candidate_name = src_filename + postfix
if not os.path.isfile(candidate_name):
self.output_file_control_var.set(candidate_name)
return
for i in range(2, 100):
candidate_name = src_filename + '-' + str(i) + postfix
if not os.path.isfile(candidate_name):
self.output_file_control_var.set(candidate_name)
return
else:
print "no action is selected yet. wont try to autoname anything atm."
return
def _start_gui():
app = RedFileGui()
app.run_main_loop()
if __name__ == '__main__':
_start_gui()
|
import math
def inicializaMatrizQuadrada(tamanho, matriz):
for i in range(tamanho):
matriz.append([])
for j in range(tamanho):
matriz[i].append(math.inf)
def printMatriz(matriz,tamanho):
for i in range(tamanho):
print(matriz[i])
def custoTotal(matriz,solucao):
custo = 0
for i in range(len(solucao)-1): custo += matriz[solucao[i]][solucao[i+1]]
return custo
def copiaLista(lista):
aux = []
for i in range(len(lista)): aux.append(lista[i])
return aux
def melhorConstrucao(matriz,maisProximo, maisAfastado, maisBarato):
melhorcontrucao, melhorCusto = copiaLista(maisProximo), custoTotal(matriz,maisProximo)
if(custoTotal(matriz,maisAfastado) < melhorCusto):
melhorCusto = custoTotal(matriz,maisAfastado)
melhorcontrucao = copiaLista(maisAfastado)
if(custoTotal(matriz,maisBarato) < melhorCusto):
melhorCusto = custoTotal(matriz,maisBarato)
melhorcontrucao = copiaLista(maisBarato)
return melhorcontrucao, melhorCusto
def exibeTempo(tempo):
tempo = (tempo / 60) / 60
horas = str(math.trunc(tempo))
tempo = (tempo - math.trunc(tempo)) * 60
minutos = str(math.trunc(tempo))
tempo = (tempo - math.trunc(tempo)) * 60
segundos = str(math.trunc(tempo))
return (horas + " h: "+ minutos+" m: " + segundos+" s") |
# coding=utf-8
import sys
from ualfred import Workflow3,notify
log = None
def main(wf):
import json
# Get args from Workflow, already in normalized Unicode
args = wf.args
result = json.loads(str(args[0]))
wf.store_data('cy-city', result)
# Add an item to Alfred feedback
log.debug(result[3])
print(result)
if __name__ == '__main__':
# Create a global `Workflow` object
wf = Workflow3()
# Call your entry function via `Workflow.run()` to enable its helper
# functions, like exception catching, ARGV normalization, magic
# arguments etc.
log = wf.logger
sys.exit(wf.run(main))
|
from PIL import Image
import numpy
import scipy.signal
import matplotlib.pyplot as plt
def gaussian(dimension=5, sigma=1):
"""
Computes Gaussian kernel
Parameters
----------
dimension : int
The dimension of the computed Gaussian kernel
sigma : int
The standard deviation of the computed Gaussian kernel
Returns
-------
Gaussian kernel of defined dimension and standard deviation
"""
kx = numpy.linspace(-1*(dimension - 1) / 2.0, (dimension - 1) / 2.0, dimension)
x2, y2 = numpy.meshgrid(kx, kx)
w = numpy.exp((-1/2.0) * (numpy.square(x2) + numpy.square(y2)) / numpy.square(sigma))
res = w / numpy.sum(w)
return res
def box(dimension=5):
"""
Computes Box kernel
Parameters
----------
dimension : int
The dimension of the computed Box kernel
Returns
-------
Box kernel of defined dimension
"""
w = numpy.ones((dimension, dimension), dtype=float)
res = (1/float(dimension**2)) * w
return res
# Retrieving images
dog = Image.open('dog.jpg').convert("L")
snoop_dog = Image.open('snoopdogg.jpg').convert("L")
# Making image dimensions consistent
snoop_dog = snoop_dog.resize((271, 361))
# Creating kernels
gaussian_kernel = gaussian(13, 4)
box_kernel = box(13)
# Low-pass filtering images
dog_low_pass = scipy.signal.convolve2d(dog, gaussian_kernel, boundary='symm', mode='same')
snoop_dog_low_pass = scipy.signal.convolve2d(snoop_dog, gaussian_kernel, boundary='symm', mode='same')
# Constructing low and high frequency portions of hybrid image
low_freq = dog_low_pass
high_freq = snoop_dog - snoop_dog_low_pass
# Creating hybrid image
hybrid = low_freq + high_freq
# Displaying result
plt.subplot(1, 3, 1)
plt.imshow(dog, cmap='gray')
plt.title('Far View Component')
plt.axis('off')
plt.subplot(1, 3, 2)
plt.imshow(snoop_dog, cmap='gray')
plt.title('Close View Component')
plt.axis('off')
plt.subplot(1, 3, 3)
plt.imshow(hybrid, cmap='gray')
plt.title('Hybrid Image')
plt.axis('off')
plt.show()
# Printing kernel used
numpy.set_printoptions(precision=3)
print(f"\n\nKernel used:\n ", gaussian_kernel)
|
# Copyright 2021 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import rclpy
from rclpy.executors import ExternalShutdownException
from rclpy.node import Node
from rclpy.qos_overriding_options import QosCallbackResult
from rclpy.qos_overriding_options import QoSOverridingOptions
from std_msgs.msg import String
class Talker(Node):
def __init__(self):
super().__init__('qos_overrides_talker')
self.i = 0
self.pub = self.create_publisher(
String, 'qos_overrides_chatter', 10,
qos_overriding_options=QoSOverridingOptions.with_default_policies(
callback=self.qos_callback,
# entity_id='my_custom_id', # Use this if you want a custo qos override id.
))
timer_period = 1.0
self.tmr = self.create_timer(timer_period, self.timer_callback)
def timer_callback(self):
msg = String()
msg.data = 'Hello World: {0}'.format(self.i)
self.i += 1
self.get_logger().info('Publishing: "{0}"'.format(msg.data))
self.pub.publish(msg)
def qos_callback(self, qos):
result = QosCallbackResult()
if qos.depth <= 10:
result.successful = True
return result
result.successful = False
result.reason = 'expected qos depth less than 10'
return result
def main(args=None):
rclpy.init(args=args)
node = Talker()
try:
rclpy.spin(node)
except (KeyboardInterrupt, ExternalShutdownException):
pass
finally:
node.destroy_node()
rclpy.try_shutdown()
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
import tweepy
import csv
consumer_key = ""
consumer_secret = ""
access_key = ""
access_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api = tweepy.API(auth,wait_on_rate_limit=True)
def analyze_status(text):
if 'RT' in text[0:3]:
return 0;
else:
return 1;
def time_analyzer(time):
print("the tweets was created in",time)
class MyStreamListener(tweepy.StreamListener):
def on_status(self, status):
if(analyze_status(status.text)==1):
writer.writerow([status.created_at,status.text,status.id])
print(status.created_at,status.text)
def on_error(self, status_code):
if status_code == 420:
# returning False in on_data disconnects the stream
return False
with open('UCL_match2.csv', 'w',newline='', encoding='utf-8') as tf:
writer = csv.writer(tf, delimiter=',')
writer.writerow(['created_at','Text','Tweet_ID' ])
myStreamListener = MyStreamListener()
myStream = tweepy.Stream(auth = api.auth, listener=myStreamListener)
myStream.filter(track=['#ليفربول_توتنهام'])
tf.close()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from time import sleep
__mtime__ = '2019/5/11'
from page.login_page import LoginPge
from selenium import webdriver
driver = webdriver.Firefox()
a = LoginPge(driver)
a.login()
# 直接打开编辑页面
driver.get("http://127.0.0.1:82/zentao/bug-create-1-0-moduleID=0.html")
# 如果加载慢的时候会出错,所以在这里需要加入sleep方法等待一会
sleep(3)
body = "步骤:" \
"结果" \
"期望结果"
js='document.getElementsByClassName("ke-edit-iframe")[0].contentWindow.document.body.innerHTML=("%s")'% body
driver.execute_script(js)
|
#Given an array of integers, find the first missing positive integer in linear time and constant space. In other words, find the lowest positive integer that does not exist in the #array. The array can contain duplicates and negative numbers as well. For example, the input [3, 4, -1, 1] should give 2. The input [1, 2, 0] should give 3.
print("Enter a list of integers that can include postive and negative values incremented by 1 with atleast one postive missing postive ")
a=[int(x) for x in input().split()]
a.sort()
b=[]
x1=0
for i in a:
if i not in b and i>=0:
b.append(i)
print(b)
for j in range(b[0], len(b)+1):
if x1==len(b)-1:
print("All values are in order, nothing missing")
break
elif (b[x1]-j>0):
print("The first missing number is", j)
break
else:
x1=x1+1
continue
print(x1)
|
import pymysql as db
import numpy as np
import os.path as op
import time
from sklearn.externals import joblib
from numba import jit
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB, MultinomialNB, BernoulliNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import SGDClassifier
import lightgbm.sklearn as gbm
from sklearn.ensemble import RandomForestClassifier
# 这是工具类文件!!!!!!!!!!!!!!
root_path = '../'
# 获取时间
def get_time():
return time.asctime(time.localtime(time.time()))
# 归一化信号强度,1-10
def normal(val):
ret = 0
if val>=-10:
ret = 0
elif val <= -90:
ret = 9
else:
ret = int(-val / 10)
return 10 - ret
# 获取一个数据库连接
def get_db_conn():
return db.connect(host='localhost', port=3306, user='root', passwd='199477', db='tianchi2')
# 获取所有商场
# @jit
def get_malls():
conn = get_db_conn()
cur = conn.cursor()
sql = 'SELECT DISTINCT mall_id FROM shop_info'
cur.execute(sql)
malls = [r[0] for r in cur.fetchall()]
cur.close()
conn.close()
return malls
# 获取存储文件名,固定格式
def get_file(type, mall_id):
switcher = {
'data' : root_path + 'data/' + mall_id + '_data',
'tar' : root_path + 'data/' + mall_id + '_tar'
}
return switcher.get(type, 'no_data')
# 获取存储文件名,固定格式
def get_file_loc(type, mall_id):
switcher = {
'data' : root_path + 'data_loc/' + mall_id + '_data',
'tar' : root_path + 'data_loc/' + mall_id + '_tar'
}
return switcher.get(type, 'no_data')
# 获取数据,如果数据已经存储在文件中,直接读取文件,否则进行数据库查询并构建数据
# 增加经纬度信息
def get_data_loc(mall_id):
if op.exists(get_file_loc('data', mall_id) + '.npy') and op.exists(get_file_loc('tar', mall_id) + '.npy'):
matrix = np.load(get_file_loc('data', mall_id) + '.npy')
tar = np.load(get_file_loc('tar', mall_id) + '.npy')
else:
print('start to storage data with location of ',mall_id)
conn = get_db_conn()
cur = conn.cursor()
# 查出所有wifi,排序
sql = 'SELECT DISTINCT wifi_ssid FROM {m} ORDER BY wifi_ssid'.format(m=mall_id)
cur.execute(sql)
wifi_ssids = [r[0] for r in cur.fetchall()]
vec_mod = [0 for x in range(0, len(wifi_ssids))]
# print(wifi_ssids)
# 建立最终矩阵
matrix = []
matrix_loc = []
weight_conn = 1.5 # 连接为true时的权重
# 以上三个矩阵分别存储wifi信息,消费时间是周几的信息,消费时间是几点的信息,最后合并三个矩阵,作为全部数据
# 创建答案数组
tar = []
# 查出所有数据,按照 data_id, wifi_ssid 排序
sql = 'SELECT row_id,wifi_ssid,wifi_db,shop_id,wifi_conn,latitude,longitude ' \
'FROM {m} ORDER BY row_id,wifi_ssid'.format(m=mall_id)
cur.execute(sql)
r = cur.fetchone()
data_id = r[0]
vec = vec_mod[:]
matrix_loc.append([int(float(r[5])),int(float(r[6]))])
# vec.append(r[5])
# vec.append(r[6])
shop_id = r[3]
vec[wifi_ssids.index(r[1])] = normal(r[2]) if r[4] == 'false' else int(weight_conn * normal(r[2]) )
for r in cur.fetchall():
if r[0] != data_id:
matrix.append(vec)
matrix_loc.append([r[5],r[6]])
tar.append(shop_id)
data_id = r[0]
vec = vec_mod[:]
# vec.append(r[5])
# vec.append(r[6])
shop_id = r[3]
vec[wifi_ssids.index(r[1])] = normal(r[2]) if r[4] == 'false' else int(weight_conn * normal(r[2]) )
matrix.append(vec)
tar.append(shop_id)
tar = np.array(tar)
matrix = np.hstack([matrix_loc,matrix])
np.save(get_file_loc('data', mall_id), matrix)
np.save(get_file_loc('tar', mall_id), tar)
sql = "INSERT INTO storaged_data SET mall_id='{m}' ON duplicate KEY UPDATE mall_id='{m}'".format(m=mall_id)
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
print(mall_id, ' is finished')
return matrix, tar
# 获取数据,如果数据已经存储在文件中,直接读取文件,否则进行数据库查询并构建数据
# @jit
def get_data(mall_id):
if op.exists(get_file('data', mall_id) + '.npy') and op.exists(get_file('tar', mall_id) + '.npy'):
matrix = np.load(get_file('data', mall_id) + '.npy')
tar = np.load(get_file('tar', mall_id) + '.npy')
else:
print('start to storage data of ',mall_id)
conn = get_db_conn()
cur = conn.cursor()
# 查出所有wifi,排序
sql = 'SELECT DISTINCT wifi_ssid FROM {m} ORDER BY wifi_ssid'.format(m = mall_id)
cur.execute(sql)
wifi_ssids = [r[0] for r in cur.fetchall()]
vec_mod = [0 for x in range(0,len(wifi_ssids))]
vec_mod_day = [0 for x in range(0,7)]
vec_mod_hour = [0 for x in range(0,24)]
# print(wifi_ssids)
# 建立最终矩阵
matrix = []
weight_conn = 1.5 # 连接为true时的权重
matrix_day = []
weight_day = 3 # [0, 0, 3, 0, 0, 0, 0]
matrix_hour = []
# 以上三个矩阵分别存储wifi信息,消费时间是周几的信息,消费时间是几点的信息,最后合并三个矩阵,作为全部数据
weight_hour = 3 # [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# 创建答案数组
tar = []
# 查出所有数据,按照 data_id, wifi_ssid 排序
sql = 'SELECT row_id,wifi_ssid,wifi_db,shop_id,wifi_conn,DAYOFWEEK(time_stamp),HOUR(time_stamp),MINUTE(time_stamp) FROM {m} ORDER BY row_id,wifi_ssid'.format(m = mall_id)
cur.execute(sql)
r = cur.fetchone()
data_id = r[0]
vec = vec_mod[:]
vec_day = vec_mod_day[:]
vec_day[ r[5] - 1 ] = weight_day
vec_hour = vec_mod_hour[:]
hour = (r[6]+1) if r[7]>=30 else r[6]
vec_hour[0 if hour > 23 else hour] = weight_hour
shop_id = r[3]
vec[wifi_ssids.index(r[1])] = normal(r[2]) if r[4] == 'false' else weight_conn * normal(r[2])
for r in cur.fetchall():
if r[0] != data_id:
matrix.append(vec)
matrix_day.append(vec_day)
matrix_hour.append(vec_hour)
tar.append(shop_id)
data_id = r[0]
vec = vec_mod[:]
vec_day = vec_mod_day[:]
vec_day[r[5] - 1] = weight_day
vec_hour = vec_mod_hour[:]
hour = (r[6] + 1) if r[7] >= 30 else r[6]
vec_hour[0 if hour > 23 else hour] = weight_hour
shop_id = r[3]
vec[wifi_ssids.index(r[1])] = normal(r[2])
matrix.append(vec)
matrix_day.append(vec_day)
matrix_hour.append(vec_hour)
tar.append(shop_id)
matrix = np.hstack([matrix_day,matrix_hour,matrix])
tar = np.array(tar)
np.save(get_file('data', mall_id), matrix)
np.save(get_file('tar', mall_id), tar)
sql = "INSERT INTO storaged_data SET mall_id='{m}'".format(m=mall_id)
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
print(mall_id, ' is finished')
return matrix,tar
# 获取一个商场的xgb模型的路径,用于存储和读取
def get_model_path_xgb(mall_id):
return root_path + 'model/xgb_%s_model.m' % mall_id
# 获取一个商场的xgb模型
def get_model_xgb(mall_id):
path = get_model_path_xgb(mall_id)
if op.exists(path):
return joblib.load(path)
else:
return 0
# 获取一个商场的RF模型的路径,用于存储和读取
def get_model_path_rf(mall_id):
mall_id_D = ['m_690', 'm_1293', 'm_1377', 'm_1920', 'm_2467', 'm_3005', 'm_3839', 'm_4079', 'm_4094', 'm_4422', 'm_5825', 'm_6337', 'm_7168', 'm_7800']
if mall_id in mall_id_D:
return 'D:/RF_1000_%s_model.m' % mall_id
else:
return root_path + 'model/RF_1000_%s_model.m' % mall_id
# 获取一个商场的RF模型
def get_model_rf(mall_id):
path = get_model_path_rf(mall_id)
if op.exists(path):
return joblib.load(path)
else:
return 0
def get_model(algorithm_name):
if algorithm_name == 'knn_5': # 根据算法名称使用不同算法
clf = KNeighborsClassifier(n_neighbors=5)
elif algorithm_name == 'RF_1000':
clf = RandomForestClassifier(n_estimators=1000, random_state=0, n_jobs=7)
elif algorithm_name == 'DT':
clf = DecisionTreeClassifier()
elif algorithm_name == 'SGD':
clf = SGDClassifier()
elif algorithm_name == 'MNB':
clf = MultinomialNB()
elif algorithm_name == 'GBM':
clf = gbm.LGBMClassifier()
elif algorithm_name == 'GNB':
clf = GaussianNB()
elif algorithm_name == 'BNB':
clf = BernoulliNB()
else:
print('wrong input!') # 输入错误直接退出
exit()
return clf |
print("Welcome to the coffee shop. Check out beverages we offer today: ")
menu = {
'espresso':{'price': 3, 'mililiters': 60, 'code':1},
'cappuccino':{'price': 8,'mililiters': 180, 'code':2},
'latte_machiato':{'price': 15, 'mililiters': 200,'code':3},
'tea':{'price': 5, 'mililiters': 250, 'code':4}
}
for x, y in menu.items():
print(x)
for d in y:
print(d + ':', y[d])
bar = int(input("enter code of the item you wish to order: "))
sum = 0
price = []
products = []
for key in menu:
if menu[key]['code'] == int(bar):
print("beverage was added to your basket")
print(key)
print("price: %s" % menu[key]['price'])
price.append(menu[key]['price'])
products.append(key)
while True:
choice = input("Would you like to buy another beverage? y/n ")
if choice == 'y':
bar = int(input("enter code of the item you wish to order: "))
for key in menu:
if menu[key]['code'] == int(bar):
print("beverage was added to your basket")
print(key)
print("price: %s" % menu[key]['price'])
price.append(menu[key]['price'])
products.append(key)
if choice == 'n':
for x in range(0, len(price)):
sum = sum + price[x]
print("Total cost: ", sum)
break
promocode = str(input("Please input your promotion code: "))
promocode_list = ["1234", "DISCOUNT", "IloveCoffee"]
if promocode in promocode_list:
print("Discount was added")
else:
print("Your promotion code is not valid")
choice = input("Would you like to retype? y/n ")
if choice == 'y':
promocode_two = str(input("Please input your promotion code: "))
if promocode in promocode_list:
print("Discount was added")
else:
print("There's been an error")
if choice == 'n':
print("You will be redirected to checkout")
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
from .coffee_started import CoffeeStarted
__all__ = [
'CoffeeStarted'
]
|
f = open('exercise\marks.txt',encoding='utf-8')
lines = f.readlines()
f.close()
#个人成绩集合
score_and_name = {}
score_collect = []
item = lines[0].split()
item.append("总分")
item.append("平均分")
#print(item) # 标签列
for line in lines[1:]:
personal_record = line.split()
element = personal_record[1:]
sum = 0
for i in range(1,len(element)):#计算总分
sum += int(element[i])
personal_record.append(sum) #把总数加进去
average = sum/int(len(element))
personal_record.append(float('%.2f'%average))#把平均数加进列表
score_collect.append(personal_record)
#按平均分重新排序并添加名次
def order(personal_record):
return personal_record[-1]
score_collect.sort(key = order, reverse=True)
for personal_record in score_collect:
rank = score_collect.index(personal_record) + 1
personal_record.insert(0,rank)
#print(score_collect)
#每科平均分
item_average = [0,'平均']
sum_average = 0
for j in range(2,len(element)+2):
sum_course = 0
for personal_record in score_collect:
sum_course += int(personal_record[j])
average = sum_course/len(score_collect)
item_average.append(float('%.2f'%average))
sum_average += average
average_all = sum_average/len(element)
item_average.append(float('%.2f' % average_all))
#print(item_average)
#合并
score_collect.insert(0,item_average)
score_collect.insert(0,item)
for n in score_collect[2:]: #把列表里原本为字符串格式的分数全部转化为数字
n[2:-2]=[int(x) for x in n[2:-2]]
#print(score_collect)
#替换不及格
for n in score_collect[1:]:
for j in range(2,len(n)):
if n[j] < 60:
n[j] = '不及格'
#把列表转化成字符串
#写入新文件
score_all = []
for record in score_collect:
new_record = " ".join('%s' % id for id in record)
score_all.append(new_record)
score_all.append('\n')
output = open('score_all.txt', 'w', encoding='utf-8')
output.writelines(score_all)
output.close()
|
# coding=utf-8
"""
题目:
输入一个整数数组,实现一个函数来调整该数组中数字的顺序,使得所有奇数位于数组的前半部分,所有偶数位于数组的后半部分
"""
def is_even(number):
if number & 1 == 0:
return True
return False
def record_odd_even(nums, func):
# 如果数组为空
if not nums:
return nums
# 如果数组只有一个数字
if len(nums) == 1:
return nums
begin = 0
end = len(nums) - 1
while begin < end:
# 找偶数,但是不要找过头了
while begin < end and not func(nums[begin]):
begin += 1
# 找奇数,但是不要找过头了
while begin < end and func(nums[end]):
end -= 1
if begin != end:
nums[begin], nums[end] = nums[end], nums[begin]
if __name__ == '__main__':
nums = [1, 2, 3, 4, 5, 0]
record_odd_even(nums, is_even)
print nums
|
import requests
from bs4 import BeautifulSoup
url = 'https://www.thehindu.com/tag/1142-1138-1073/'
resp = requests.get(url).content
soup = BeautifulSoup(resp, 'html.parser')
e_news = soup.find('span', class_='fts-menu')
print(e_news.text)
link = []
storys = []
story_card = soup.find_all('div', class_='story-card')
k = 1
for story in story_card:
a_tag = story.find('a')
h3_tag = story.find('h3')
st_card = story.text
st_cards = storys.append(st_card)
print(k, h3_tag.text)
a_link = a_tag.get('href')
tag_link = link.append(a_link)
# print(a_tag.get('href'))
print('see more at :', a_link)
k += 1
print(18 * '-------------')
|
import urllib
from bs4 import BeautifulSoup
from selenium import webdriver
import time
import os
import html.parser
def execute_times(times, driver):
for i in range(times):
# 滑动到浏览器底部
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(2) # 等待页面加载
try:
# 选中并点击页面底部的加载更多
driver.find_element_by_css_selector('button.QuestionMainAction').click()
time.sleep(1) # 等待页面加载
except:
continue
def save_info(driver, name, url):
txt_path = "D:\\workspace\\Python\\Zhihu_Girls\\" + name + "\\"
pic_path = "D:\\workspace\\Python\\Zhihu_Girls\\" + name + "\\pic\\"
if not os.path.exists(txt_path):
os.mkdir(txt_path)
else:
print("the folder has existed!")
return
if not os.path.exists(pic_path):
os.mkdir(pic_path)
driver.get(url)
execute_times(10, driver)
result_raw = driver.page_source
result_soup = BeautifulSoup(result_raw, "html.parser")
result_bf = result_soup.prettify()
with open(txt_path+name+".txt", 'w', encoding="utf8") as savetxt:
savetxt.write(result_bf)
savetxt.close()
print("save raw data successfully!")
with open(txt_path+"noscript_meta.txt", 'w', encoding="utf8") as savenoscript:
noscript_nodes = result_soup.find_all("noscript")
noscript_inner_all = ""
for noscript in noscript_nodes:
noscript_inner = noscript.get_text()
noscript_inner_all += noscript_inner + "\n"
noscript_all = html.parser.unescape(noscript_inner_all)
savenoscript.write(noscript_all)
savenoscript.close()
print("save noscript meta data successfully!")
img_soup = BeautifulSoup(noscript_all, "html.parser")
img_nodes = img_soup.find_all("img")
with open(txt_path + "pic_url.txt", 'w', encoding="utf8") as savepic:
cnt = 0
for img in img_nodes:
if img.get("src") is not None:
img_url = img.get("src")
line = str(cnt) + "\t" + img_url + "\n"
savepic.write(line)
urllib.request.urlretrieve(img_url, pic_path + str(cnt) + ".jpg")
cnt += 1
savepic.close()
print("save url and pic successfully!")
def main():
driver = webdriver.Chrome()
mydict = dict(你的日常搭配是什么样子="https://www.zhihu.com/question/35931586",
女生腿好看胸平是一种什么体验="https://www.zhihu.com/question/61235373",
腿长是一种什么体验="https://www.zhihu.com/question/28481779",
拍照时怎样摆姿势好看="https://www.zhihu.com/question/19671417",
女性胸部过大会有哪些困扰与不便="https://www.zhihu.com/question/20196263",
短发女孩要怎么拍照才性感="https://www.zhihu.com/question/46458423",
身材好是一种怎样的体验="https://www.zhihu.com/question/26037846")
for Name, url in mydict.items():
try:
save_info(driver, Name, url)
except:
print("error")
continue
if __name__ == "__main__":
main()
|
from __future__ import unicode_literals
import re
from pyaib.plugins import keyword, observe, plugin_class
@plugin_class('karma')
class Karma(object):
def __init__(self, ctx, config):
self._db = ctx.db.get('karma')
self._re = re.compile('^(\w+)(\+{2}|\-{2})$')
@staticmethod
def is_set(value):
# Pyaib's default value is apparently an empty dict.
# Luckily, we know that in this case all values are ints.
return type(value) is int
@keyword('karma')
@keyword.autohelp
def get(self, ctx, msg, trigger, args, kwargs):
'''[entity] :: !karma [entity] to show karma for an entity. Adjust karma by typing ${thing}++ or ${thing--}.'''
if len(args) != 1:
return msg.reply('I can only show karma for one (1) entity at a time!')
db = self._db
key = args[0]
value = db.get(key).value
if not Karma.is_set(value):
return msg.reply('(karma for `%(key)s` not found)' % { 'key': key })
msg.reply('%(key)s == %(value)d' % { 'key': key, 'value': value })
@observe('IRC_MSG_PRIVMSG')
def update(self, ctx, msg):
message = msg.message.strip()
matches = self._re.match(msg.message.strip())
if matches is None:
return
db = self._db
key = matches.group(1)
value = db.get(key).value
direction = matches.group(2)
if not Karma.is_set(value):
value = 0
if direction == '--':
value -= 1
elif direction == '++':
value += 1
db.set(key, value)
msg.reply('%(key)s == %(value)d' % { 'key': key, 'value': value })
|
from newsletter.views import SubscribeView
from django.views.generic.base import TemplateView
from django.conf.urls import url
urlpatterns = [
url(r'^$', SubscribeView.as_view(), name='subscribe'),
url(
r'^success/',
TemplateView.as_view(template_name='success.html'),
name='subscribe'
),
]
|
from collections import defaultdict
import pandas as pd
import trueskill as ts
from typing import Iterable, Tuple
DATE_COLUMN = "Date"
def compute_rank(score1: float, score2: float) -> Tuple[float, float]:
# Lower rank is better.
if score1 > score2:
return [0, 1]
if score1 < score2:
return [1, 0]
return [0, 0]
def append(
name: str, players: dict, team: Iterable[str], skills: Iterable[ts.Rating]
) -> Tuple[Iterable[str], Iterable[ts.Rating]]:
if name is not None and name != "":
return team + [name], skills + [players[name]]
return team, skills
def update(players: dict, team: Iterable[str], ratings: Iterable[ts.Rating]):
for name, rating in zip(team, ratings):
players[name] = rating
def main(games_file: str, ratings_file: str):
# Load data.
players = defaultdict(lambda: ts.Rating())
games = pd.read_csv(games_file)
# Sort data based on date.
games.sort_values(by=[DATE_COLUMN], inplace=True)
games.fillna("", inplace=True)
for _, (date, pl1, pl2, sc1, sc2, pl3, pl4) in games.iterrows():
team1 = []
skills1 = []
team2 = []
skills2 = []
team1, skills1 = append(pl1, players, team1, skills1)
team1, skills1 = append(pl2, players, team1, skills1)
team2, skills2 = append(pl3, players, team2, skills2)
team2, skills2 = append(pl4, players, team2, skills2)
ranks = compute_rank(sc1, sc2)
ratings1, ratings2 = ts.rate([skills1, skills2], ranks=ranks)
update(players, team1, ratings1)
update(players, team2, ratings2)
# Save ratings.
result = []
for name, rating in players.items():
result.append({"Name": name, "Mu": rating.mu, "Sigma": rating.sigma})
df = pd.DataFrame(result)
df.to_csv(ratings_file, index=False)
if __name__ == "__main__":
main("data/games.csv", "data/results.csv")
|
__version__ = "0.1.0"
__author__ = "Sam Ireland"
from .matrix import Matrix
from .functions import create_vertex
|
from xml.etree import ElementTree as ET
def message(**kwargs):
msg = ET.Element("message")
msg.set("xmlns", "jabber:component:accept")
for k, v in kwargs.items():
if k == 'mfrom':
msg.set("from", v)
elif k == 'mto':
msg.set('to', v)
elif k == 'mtype':
msg.set("type", v)
elif k == 'body':
body = ET.Element("body")
body.text = v
msg.append(body)
else:
msg.set(k, v)
return msg
def presence(**kwargs):
msg = ET.Element("presence")
msg.set("xmlns", "jabber:component:accept")
for k, v in kwargs.items():
if k == 'pfrom':
msg.set("from", v)
elif k == 'pto':
msg.set('to', v)
elif k == 'ptype':
msg.set("type", v)
else:
msg.set(k, v)
return msg
def iq(**kwargs):
msg = ET.Element("iq")
msg.set("xmlns", "jabber:component:accept")
for k, v in kwargs.items():
if k == 'ifrom':
msg.set("from", v)
elif k == 'ito':
msg.set('to', v)
elif k == 'itype':
msg.set("type", v)
else:
msg.set(k, v)
return msg
|
import cv2
import os
import imutils
def record(recording):
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
frame = imutils.resize(frame, width=600)
(height, width) = frame.shape[:2]
path = "/Users/yenji/Desktop/Emotion-Detection"
fourcc = cv2.VideoWriter_fourcc("C","J","P","G")
Output = cv2.VideoWriter(os.path.join(path, "Output" + "2" + ".mov"), fourcc, 20, (width, height))
# Use pretrained face detection cascade classifier available with OpenCV
faceCascade = cv2.CascadeClassifier("/Users/yenji/opencv/data/haarcascades/haarcascade_frontalface_default.xml")
# Use fisher_face face detector that has been trained to detect emotions.
fisher_face = cv2.face.FisherFaceRecognizer_create()
fisher_face.read('/Users/yenji/Desktop/Emotion-Detection/emotion_detection_model_Haar(fisher).xml')
emotions = ["neutral", "anger", "disgust", "fear", "happy", "sadness", "surprise"] # Removed Contempt
while(recording == True):
# Capture frame-by-frame
ret, frame = cap.read()
frame = imutils.resize(frame, width=600)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
face = faceCascade.detectMultiScale(gray)
if len(face) == 1:
# Draw rectangle around face
for (x, y, w, h) in face: # get coordinates and size of rectangle containing face
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
gray = gray[y:y + h, x:x + w] # Cut rectangle to face size
gray = cv2.resize(gray, (350, 350))
label, confidence = fisher_face.predict(gray) # Get current emotion in face
cv2.putText(frame, emotions[label], (x, y),
cv2.FONT_HERSHEY_SIMPLEX, 0.65, (0, 255, 0),
1) # Put emotion found in face on rectangle containing face
Output.write(frame)
# Display the resulting frame
#cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
recording = False
break
# When everything done, release the capture
cap.release()
Output.release()
cv2.destroyAllWindows() |
platform_map = dict(Linux64="Linux")
arch_platform_map = {v: k for k, v in platform_map.items()}
|
#! /Users/jaeseoklee/Documents/Programming/Scraping/scraping/bin/python3
from bs4 import BeautifulSoup
from urllib.request import urlopen
import sys
import re
html = urlopen("https://en.wikipedia.org/wiki/C_(programming_language)")
bsObj = BeautifulSoup(html, "html.parser")
for link in bsObj.find("div", {"id":"bodyContent"}).findAll("a", {"href":re.compile("^(/wiki/)(?!:)*$")}):
if 'href' in link.attrs:
print(link.attrs['href']);
|
from django import forms
from .custom_formfields import pdfFileUpload
class FileUploadform(forms.Form):
# file = forms.FileField(widget=forms.ClearableFileInput(attrs={'multiple': True,'class': 'browse-ip'}))
file = pdfFileUpload(label="",widget=forms.ClearableFileInput(
attrs={'style':'display: none;','accept':'.pdf','multiple': True})
) |
# -*- coding: utf-8 -*-
"""
Created on Mon May 13 12:45:35 2019
@author: Markus.Meister1
"""
def xlsx(fname,sheet, skip=0, header=0):
import zipfile
from xml.etree.ElementTree import iterparse
import re
z = zipfile.ZipFile(fname)
if 'xl/sharedStrings.xml' in z.namelist():
# Get shared strings
strings = [element.text for event, element
in iterparse(z.open('xl/sharedStrings.xml'))
if element.tag.endswith('}t')]
sheetdict = { element.attrib['name']:element.attrib['sheetId'] for event,element in iterparse(z.open('xl/workbook.xml'))
if element.tag.endswith('}sheet') }
rows = []
row = {}
value = ''
n_row = 0
if sheet in sheetdict:
sheetfile = 'xl/worksheets/sheet'+sheetdict[sheet]+'.xml'
#print(sheet,sheetfile)
for event, element in iterparse(z.open(sheetfile)):
# get value or index to shared strings
if element.tag.endswith('}v') or element.tag.endswith('}t'):
value = element.text
# If value is a shared string, use value as an index
if element.tag.endswith('}c'):
if element.attrib.get('t') == 's':
value = strings[int(value)]
# split the row/col information so that the row leter(s) can be separate
letter = re.sub('\d','',element.attrib['r'])
row[letter] = value
value = ''
if element.tag.endswith('}row'):
rows.append(row)
row = {}
return rows |
def no_idea():
n, m = input().split()
array = input().split()
a = set(input().split())
b = set(input().split())
sum = 0
for i in range(int(n)):
if array[i] in a:
sum = sum + 1
elif array[i] in b:
sum = sum - 1
return sum
print(no_idea()) |
'''
Created on Jan 18, 2016
@author: Andrei Padnevici
'''
from builtins import input
name = input('Enter file:')
handle = open(name, 'r')
text = handle.read()
words = text.split()
counts = dict()
for word in words:
counts[word] = counts.get(word, 0) + 1
bigcount = None
bigword = None
for word, count in counts.items():
if bigcount is None or count > bigcount:
bigword = word
bigcount = count
print(bigword, bigcount)
|
""" Handles all passlib crypto for the project """
from passlib.context import CryptContext
from os import urandom
HASHER = CryptContext(schemes=["argon2"], deprecated="auto")
#hashedPass should be the hasher .hash string output
def verify_password(password : str, hashed_pass : str) -> bool:
return HASHER.verify(password, hashed_pass)
def hash_password(password : str) -> str:
hashed_pass = HASHER.hash(password)
print('argon2 salted hash is: '+hashed_pass)
return hashed_pass
|
from random import shuffle
def rota(rooms):
result = []
for _ in xrange(0, 7, len(rooms)):
shuffle(rooms)
result.extend(rooms)
return result[:7]
|
import itertools
from dataclasses import dataclass
from typing import Union
@dataclass(frozen=True)
class Point:
xpos: int
ypos: int
zpos: int
def __add__(self, rhs: 'Point') -> 'Point':
return Point(self.xpos + rhs.xpos, self.ypos + rhs.ypos, self.zpos + rhs.zpos)
def get_neighbors(self) -> list['Point']:
OFFSET = [-1, 0, 1]
return [self + Point(x,y,z) for x,y,z in itertools.product(OFFSET, OFFSET, OFFSET)
if x != 0 or y != 0 or z != 0]
@dataclass(frozen=True)
class Point4D:
xpos: int = 0
ypos: int = 0
zpos: int = 0
wpos: int = 0
def __add__(self, rhs: 'Point4D') -> 'Point4D':
return Point4D(self.xpos + rhs.xpos, self.ypos + rhs.ypos,
self.zpos + rhs.zpos, self.wpos + rhs.wpos)
def get_neighbors(self) -> list['Point4D']:
OFFSET = [-1, 0, 1]
return [self + Point4D(x,y,z,w)
for x,y,z,w in itertools.product(OFFSET, OFFSET, OFFSET, OFFSET)
if x != 0 or y != 0 or z != 0 or w != 0]
PointType = Union[Point, Point4D]
class Grid():
def __init__(self, initial_data: list[str], point_type: type):
self.grid: dict[PointType, str] = {}
self.active_points: list[PointType] = []
for y_pos, line in enumerate(initial_data):
for x_pos, char in enumerate(line):
self.grid[point_type(x_pos, y_pos, 0)] = char
if char == '#':
self.active_points.append(point_type(x_pos, y_pos, 0))
def transform(self):
points_to_check = set(itertools.chain.from_iterable(
point.get_neighbors() for point in self.active_points
))
new_grid = dict(self.grid)
for point in points_to_check:
neighbors = point.get_neighbors()
nearby_active_states = [self.grid.get(n, '.') for n in neighbors].count('#')
char = self.grid.get(point, '.')
if ((char == '#' and nearby_active_states in (2,3)) or
(char == '.' and nearby_active_states == 3)):
new_grid[point] = '#'
self.active_points.append(point)
else:
new_grid[point] = '.'
self.grid = new_grid
def get_number_of_active_states(self):
return list(self.grid.values()).count('#')
def get_active_squares_after_n_cycles(data: list[str], num: int,
point_type: type = Point) -> int:
grid = Grid(data, point_type)
for _ in range(num):
grid.transform()
return grid.get_number_of_active_states()
with open("input/input17.txt") as input_file:
STARTING_DATA = input_file.read().strip().split("\n")
if __name__ == "__main__":
print(get_active_squares_after_n_cycles(STARTING_DATA, 6))
print(get_active_squares_after_n_cycles(STARTING_DATA, 6, Point4D))
|
# -*- coding: utf-8 -*-
"""Tests for the API views."""
from django.core.urlresolvers import reverse
from .base import TestCase
class TestViews(TestCase):
def test_home(self):
response = self.client.get('')
self.assertEqual(response.status_code, 200)
def test_view_feature(self):
response = self.client.get(
reverse('view_feature', kwargs={'feature_id': 1}))
self.assertEqual(response.status_code, 200)
def test_browse_app(self):
response = self.client.get(reverse('browse'))
self.assertEqual(response.status_code, 200)
|
import tensorflow as tf
import argparse
from data import mergeData
from model import multiTaskModel
import logging
import datetime
from sklearn.metrics import classification_report, accuracy_score, confusion_matrix
import os
import time
# configure the logger
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
today = datetime.date.today().strftime("%Y%m%d")
def tuning_parameters(
data_path,
learning_rate=0.001,
num_classes=4,
max_epochs=10,
display_step=100,
batch_size=128,
dropout_ratio=0.0,
train_ratio=0.7,
val_ratio=0.15,
dense_units=128,
lstm_units=64,
lstm_num_layers=1,
pos_weight=1.0
):
tf.reset_default_graph()
with tf.Session() as sess:
model_timestamp = int(time.time() * 1000)
logger.info("Model timestamp: {}".format(str(model_timestamp)))
data = mergeData(
path = data_path,
num_classes = num_classes,
batch_size = batch_size,
train_ratio = train_ratio,
val_ratio = val_ratio,
display_step=100,
useless_columns = ["stock_code", "time", "time_rank", "fin_rank", "news_rank", "time_rank_x", "time_rank_y"],
target_news = "mood",
target_fin = "st"
)
input_dimenion = data.input_dimension
max_steps = data.max_steps
multi_task_model = multiTaskModel(
sess = sess,
max_steps = max_steps,
input_dimenion = input_dimenion,
learning_rate=learning_rate,
num_classes=num_classes,
max_epochs=max_epochs,
display_step=display_step,
batch_size=batch_size,
dropout_ratio=dropout_ratio,
dense_units=dense_units,
lstm_units = lstm_units,
lstm_num_layers = lstm_num_layers,
pos_weight = pos_weight
)
test_pred, test_y, test_loss, test_acc = multi_task_model.train(data)
# evaluation metrics
acc = accuracy_score(test_y, test_pred)
logger.info("Accuracy: {}".format(acc))
logger.info("Classification report: ")
logger.info(classification_report(test_y, test_pred))
logger.info("Confusion matrix: ")
logger.info(confusion_matrix(test_y, test_pred))
# specify results path
result_path = "/results/results_notes.csv"
if not os.path.exists("/results"):
os.mkdir("/results")
if not os.path.exists(result_path):
with open(result_path, 'a') as file:
file.write("input_dimenion,dense_units,max_num_steps,"
"learning_rate,num_classes,max_epochs,"
"batch_size,lstm_units,dropout_ratio,pos_weight,"
"accuracy,date")
file.write("\n")
# save results
with open(result_path, 'a') as file:
file.write(",".join([str(i) for i in [input_dimenion, dense_units,max_steps,
learning_rate, num_classes, max_epochs,
batch_size, lstm_units, dropout_ratio, pos_weight,
acc, today]]))
file.write("\n")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--learning_rate", help="learning rate", required=False, default=0.001, type=float)
parser.add_argument("--num_classes", help="number of classes", required=False, default=4, type=int)
parser.add_argument("--max_epochs", help="max epochs", required=False, default=10, type=int)
parser.add_argument("--display_step", help="number of steps to display", required=False, default=100, type=int)
parser.add_argument("--batch_size", help="batch size", required=False, default=64, type=int)
parser.add_argument("--dropout_ratio", help="dropout ratio", required=False, default=0.0, type=float)
parser.add_argument("--train_ratio", help="training ratio", required=False, default=0.7, type=float)
parser.add_argument("--val_ratio", help="validation ratio", required=False, default=0.15, type=float)
parser.add_argument("--data_path", help="data path", required=False, default="./Data/merge_fin_news_try.pkl", type=str)
parser.add_argument("--dense_units", help="dense_units", required=False, default=128, type=int)
parser.add_argument("--lstm_units", help="number of lstm units", required=False, default=64, type=int)
parser.add_argument("--lstm_num_layers", help="number of lstm layers", required=False, default=1, type=int)
parser.add_argument("--pos_weight", help="positive weights", required=False, default=1.0, type=float)
args = parser.parse_args()
# train and test
tuning_parameters(
data_path=args.data_path,
learning_rate=args.learning_rate,
num_classes=args.num_classes,
max_epochs=args.max_epochs,
display_step=args.display_step,
batch_size=args.batch_size,
dropout_ratio=args.dropout_ratio,
train_ratio=args.train_ratio,
val_ratio=args.val_ratio,
dense_units=args.dense_units,
lstm_units=args.lstm_units,
lstm_num_layers=args.lstm_num_layers,
pos_weight=args.pos_weight
)
logger.info("Finished.")
|
import json
import os
import database
from sqlalchemy import select
from database import Watcher, Price
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from webdriver_manager.utils import ChromeType
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import NoSuchElementException
from datetime import date, datetime
from types import SimpleNamespace
# Config
# Get the current porject path so we can schedule/run from anywhere
project_path = os.path.dirname(__file__)
with open(os.path.join(project_path, 'config/config.json'), 'r') as c:
config = json.load(c)
# Constants
HEADLESS_DRIVER=config['use_headless_driver']
def main():
print('Setting up database and session')
session = database.setup_db_session()
print('DB and session setup complete')
driver = setup_driver()
watchers = session.query(Watcher).all()
prices = check_prices_for_watchers(driver, watchers)
database.commit_prices(prices, session)
def check_prices_for_watchers(driver, watchers):
prices = []
for watcher in watchers:
driver.get(watcher.url)
price = get_element_by_xpath(driver, watcher.xpath)
if price:
price = float(format_price(price.text))
price_object = Price(watcher_id=watcher.id, price=price, date_time=datetime.now())
prices.append(price_object)
else:
print("Couldn't find price object for product: [{}{}]".format(watcher.name, watcher.url))
return prices
# surrounding in try catch when we need to call from multiple places
def get_element_by_xpath(driver, xpath):
try:
return driver.find_element_by_xpath(xpath)
except NoSuchElementException as err:
print(err.msg)
# Helper functions
def format_price(price):
return price.strip('£')
def setup_driver():
options = Options()
if HEADLESS_DRIVER:
options.headless = True
options.add_argument("--window-size=1920,1200")
driver = webdriver.Chrome(ChromeDriverManager(chrome_type=ChromeType.CHROMIUM).install(), options=options)
return driver
if __name__ == "__main__":
main() |
#!/usr/bin/python
"""
Copyright 1999 Illinois Institute of Technology
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL ILLINOIS INSTITUTE OF TECHNOLOGY BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Illinois Institute
of Technology shall not be used in advertising or otherwise to promote
the sale, use or other dealings in this Software without prior written
authorization from Illinois Institute of Technology.
"""
import os
import sys
import unittest
from musclex import __version__
from musclex.ui.pyqt_utils import *
from musclex.utils.file_manager import getImgFiles
from musclex.utils.exception_handler import handlers
from musclex.tests.module_test import MuscleXTest
from musclex.tests.musclex_tester import MuscleXGlobalTester
from musclex.tests.environment_tester import EnvironmentTester
if sys.platform in handlers:
sys.excepthook = handlers[sys.platform]
def main(arguments=None):
in_types = ['.adsc', '.cbf', '.edf', '.fit2d', '.mar345', '.marccd', '.pilatus', '.tif', '.tiff', '.smv']
h5_types = ['.h5', '.hdf5']
if arguments is None:
arguments = sys.argv
run = True
if len(arguments) == 2:
prog = arguments[1]
if prog == 'eq':
app = QApplication(sys.argv)
from musclex.ui.EQStartWindow import EQStartWindow
myapp = EQStartWindow() # Even if 'myapp' isn't used after, it is necessary for the windows to show on the screen
sys.exit(app.exec_())
elif prog == 'qf':
app = QApplication(sys.argv)
from musclex.ui.QuadrantFoldingGUI import QuadrantFoldingGUI
myapp = QuadrantFoldingGUI()
sys.exit(app.exec_())
elif prog == 'di':
app = QApplication(sys.argv)
from musclex.ui.ScanningDiffractionGUI import \
ScanningDiffractionGUI
myapp = ScanningDiffractionGUI()
sys.exit(app.exec_())
elif prog == 'dc':
from musclex.ui.diffraction_centroids import \
DiffractionCentroidStartWindow
app = QApplication(sys.argv)
myapp = DiffractionCentroidStartWindow()
sys.exit(app.exec_())
elif prog == 'ddf':
from musclex.ui.ddf_processor import DDFWindow
app = QApplication(sys.argv)
myapp = DDFWindow()
sys.exit(app.exec_())
elif prog == 'pt':
from musclex.ui.ProjectionTracesGUI import ProjectionTracesGUI
app = QApplication(sys.argv)
myapp = ProjectionTracesGUI()
sys.exit(app.exec_())
elif prog == 'aise':
from musclex.ui.AddIntensitiesSingleExp import AddIntensitiesSingleExp
app = QApplication(sys.argv)
myapp = AddIntensitiesSingleExp()
sys.exit(app.exec_())
elif prog == 'aime':
from musclex.ui.AddIntensitiesMultExp import AddIntensitiesMultExp
app = QApplication(sys.argv)
myapp = AddIntensitiesMultExp()
sys.exit(app.exec_())
elif prog == 'xv':
from musclex.ui.XRayViewerGUI import XRayViewerGUI
app = QApplication(sys.argv)
myapp = XRayViewerGUI()
sys.exit(app.exec_())
elif prog == 'gui':
from musclex.launcher import LauncherForm
app = QApplication(sys.argv)
myapp = LauncherForm.main()
sys.exit(app.exec_())
elif prog == 'test_global':
suite = unittest.TestSuite()
suite.addTest(MuscleXGlobalTester("testHeadlessMarEquator"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerEquator"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusEquator"))
suite.addTest(MuscleXGlobalTester("testHeadlessMarQuadrantFolder"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerQuadrantFolder"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusQuadrantFolder"))
suite.addTest(MuscleXGlobalTester("testHeadlessMarDiffraction"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerDiffraction"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusDiffraction"))
suite.addTest(MuscleXGlobalTester("testHeadlessMarProjectionTraces"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerProjectionTraces"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusProjectionTraces"))
runner = unittest.TextTestRunner()
runner.run(suite)
sys.exit()
elif prog == 'test_impl':
from musclex.utils.zip_download import download_zip_pickles
download_zip_pickles(os.path.dirname(__file__))
suite = unittest.TestSuite()
suite.addTest(MuscleXTest("testEquatorImage"))
suite.addTest(MuscleXTest("testQuadrantFolder"))
suite.addTest(MuscleXTest("testDiffractionCentroids"))
suite.addTest(MuscleXTest("testProjectionTraces"))
suite.addTest(MuscleXTest("testScanningDiffraction"))
suite.addTest(MuscleXTest("testHDFRead"))
suite.addTest(MuscleXTest("testOpenCLDevice"))
suite.addTest(MuscleXTest("testGPUIntegratePyFAI"))
runner = unittest.TextTestRunner()
runner.run(suite)
sys.exit()
elif prog == 'test_env':
suite = unittest.TestSuite()
suite.addTest(EnvironmentTester("testEnvironment"))
runner = unittest.TextTestRunner()
runner.run(suite)
sys.exit()
elif prog == 'test_gpu':
suite = unittest.TestSuite()
suite.addTest(MuscleXTest("testOpenCLDevice"))
suite.addTest(MuscleXTest("testGPUIntegratePyFAI"))
runner = unittest.TextTestRunner()
runner.run(suite)
sys.exit()
else:
run = False
elif len(arguments) >= 5 and arguments[1]=='eq' and arguments[2]=='-h':
inputsetting=False
delcache=False
run=True
i=3
settingspath="empty"
while i < len(arguments):
if arguments[i]=='-s':
inputsetting=True
if i+1<len(arguments) and len(arguments[i+1])>5:
_, ext = os.path.splitext(str(arguments[i+1]))
if ext==".json" and os.path.isfile(arguments[i+1]):
i=i+1
settingspath=arguments[i]
else:
print("Please provide the right settings file")
run=False
elif arguments[i]=='-d':
delcache=True
elif arguments[i]=='-i' or arguments[i]=='-f':
i=i+1
filename=arguments[i]
else:
run=False
break
i=i+1
if run:
from musclex.headless.EQStartWindowh import EQStartWindowh
EQStartWindowh(filename, inputsetting, delcache, settingspath)
sys.exit()
elif len(arguments)>=5 and arguments[1]=='di' and arguments[2]=='-h':
inputsetting=False
delcache=False
run=True
i=3
settingspath='empty'
processFolder=False
while i < len(arguments):
if arguments[i]=='-s':
inputsetting=True
if i+1<len(arguments) and len(arguments[i+1])>5:
_, ext = os.path.splitext(str(arguments[i+1]))
if ext==".json" and os.path.isfile(arguments[i+1]):
i=i+1
settingspath=arguments[i]
else:
print("Please provide the right settings file")
run=False
elif arguments[i]=='-d':
delcache=True
elif arguments[i]=='-i' or arguments[i]=='-f':
if arguments[i]=='-f':
processFolder=True
i=i+1
fullfilename=arguments[i]
is_hdf5 = os.path.splitext(fullfilename)[1] in h5_types
if not processFolder and not is_hdf5:
filePath, fileName = os.path.split(fullfilename)
else:
filePath=fullfilename
else:
run=False
break
i=i+1
if run:
if not processFolder and not is_hdf5:
from musclex.headless.DIImageWindowh import DIImageWindowh
DIImageWindowh(str(fileName), str(filePath), inputsetting, delcache, settingspath)
sys.exit()
else:
from musclex.headless.DIBatchWindowh import DIBatchWindowh
DIBatchWindowh(str(filePath), inputsetting, delcache, settingspath)
sys.exit()
elif len(arguments) >= 5 and arguments[1]=='qf' and arguments[2]=='-h':
inputsetting=False
delcache=False
run=True
i=3
settingspath="empty"
while i < len(arguments):
if arguments[i]=='-s':
inputsetting=True
if i+1<len(arguments) and len(arguments[i+1])>5:
_, ext = os.path.splitext(str(arguments[i+1]))
if ext==".json" and os.path.isfile(arguments[i+1]):
i=i+1
settingspath=arguments[i]
else:
print("Please provide the right settings file")
run=False
elif arguments[i]=='-d':
delcache=True
elif arguments[i]=='-i' or arguments[i]=='-f':
is_file = arguments[i]=='-i'
i=i+1
filename=arguments[i]
else:
run=False
break
i=i+1
if run:
from musclex.headless.QuadrantFoldingh import QuadrantFoldingh
if is_file and os.path.splitext(str(filename))[1] not in h5_types:
QuadrantFoldingh(filename, inputsetting, delcache, settingspath)
else:
from multiprocessing import Lock, Process, cpu_count
lock = Lock()
procs = []
imgList = os.listdir(filename) if not is_file else [filename]
imgList.sort()
for image in imgList:
file_name=os.path.join(filename,image) if not is_file else filename
if os.path.isfile(file_name):
_, ext = os.path.splitext(str(file_name))
if ext in in_types:
print("filename is", file_name)
# QuadrantFoldingh(file_name, inputsetting, delcache, settingspath)
proc = Process(target=QuadrantFoldingh, args=(file_name, inputsetting, delcache, settingspath, lock,))
procs.append(proc)
proc.start()
elif ext in h5_types:
hdir_path, himgList, _, hfileList, _ = getImgFiles(str(file_name), headless=True)
for ind in range(len(himgList)):
print("filename is", himgList[ind])
proc = Process(target=QuadrantFoldingh, args=(file_name, inputsetting, delcache, settingspath, lock, hdir_path, himgList, ind, hfileList, ext,))
procs.append(proc)
proc.start()
if len(procs) % cpu_count() == 0:
for proc in procs:
proc.join()
procs = []
if len(procs) % cpu_count() == 0:
for proc in procs:
proc.join()
procs = []
for proc in procs:
proc.join()
sys.exit()
elif len(arguments) >= 5 and arguments[1]=='pt' and arguments[2]=='-h':
inputsetting=False
delcache=False
run=True
i=3
settingspath="empty"
while i < len(arguments):
if arguments[i]=='-s':
inputsetting=True
if i+1<len(arguments) and len(arguments[i+1])>5:
_, ext = os.path.splitext(str(arguments[i+1]))
if ext==".json" and os.path.isfile(arguments[i+1]):
i=i+1
settingspath=arguments[i]
else:
print("Please provide the right settings file")
run=False
elif arguments[i]=='-d':
delcache=True
elif arguments[i]=='-i' or arguments[i]=='-f':
is_file = arguments[i]=='-i'
i=i+1
filename=arguments[i]
else:
run=False
break
i=i+1
if run:
from musclex.headless.ProjectionTracesh import ProjectionTracesh
if is_file and os.path.splitext(str(filename))[1] not in h5_types:
ProjectionTracesh(filename, inputsetting, delcache, settingspath)
else:
from multiprocessing import Lock, Process, cpu_count
lock = Lock()
procs = []
imgList = os.listdir(filename) if not is_file else [filename]
imgList.sort()
for image in imgList:
file_name=os.path.join(filename,image) if not is_file else filename
if os.path.isfile(file_name):
_, ext = os.path.splitext(str(file_name))
if ext in in_types:
print("filename is", file_name)
# QuadrantFoldingh(file_name, inputsetting, delcache, settingspath)
proc = Process(target=ProjectionTracesh, args=(file_name, inputsetting, delcache, settingspath, lock,))
procs.append(proc)
proc.start()
elif ext in h5_types:
hdir_path, himgList, _, hfileList, _ = getImgFiles(str(file_name), headless=True)
for ind in range(len(himgList)):
print("filename is", himgList[ind])
proc = Process(target=ProjectionTracesh, args=(file_name, inputsetting, delcache, settingspath, lock, hdir_path, himgList, ind, hfileList, ext,))
procs.append(proc)
proc.start()
if len(procs) % cpu_count() == 0:
for proc in procs:
proc.join()
procs = []
if len(procs) % cpu_count() == 0:
for proc in procs:
proc.join()
procs = []
for proc in procs:
proc.join()
sys.exit()
else:
run = False
if not run:
print("\nYou're using Muscle X version "+str(__version__))
print("\nPlease specify the program shortcut that you want to run")
print("")
print(" $ musclex [program]")
print("")
print(" xv - X-Ray Viewer")
print(" eq [<-h>] - Equator (-h for headless version)")
print(" di [<-h>] - Scanning Diffraction (-h for headless version)")
print(" qf [<-h>] - Quadrant Folding (-h for headless version)")
print(" pt [<-h>] - Projection Traces (-h for headless version)")
print(" ddf - DDF Processor")
print(" aise - Add Intensities Single Experiment")
print(" aime - Add Intensities Multiple Experiments")
print(" dc - Diffraction Centroids (DEPRECATED)")
print("")
print(" gui - GUI Launcher")
print(" test_global - Run Global Tests")
print(" test_impl - Run Detailed Implementation Tests")
print(" test_env - Run Environment Tests")
print(" test_gpu - Run GPU Testing Module")
print("")
print("For example,")
print("\t$ musclex eq")
print("\t$ musclex eq -h -i test.tif -s config.json")
print("")
print("** Musclex headless arguments (works for eq, di, qf and pt):")
print(" $ musclex eq|di|qf|pt -h -i|-f <file.tif|testfolder> [-s config.json] [-d] ")
print("arguments:")
print("-f <foldername> or -i <filename>")
print("-d (optional) delete existing cache")
print("-s (optional) <input setting file>")
print("")
print("Note: To generate the setting file, use the interactive muclex, set parameter in it, then select save the current settings. \nThis will create the necessary setting file. If a setting file is not provided, default settings will be used")
print("Note: If a hdf file does not exist, the program will use the default file. You can generate a hdf step size file using the interactive version (set step size, click ok, the file will be automaticly saved)")
print("")
print("More details : https://musclex.readthedocs.io")
print("Submit Feedback or issues : https://www.github.com/biocatiit/musclex/issues\n\n")
if __name__ == "__main__":
main(sys.argv)
|
from django.urls import path
from playlist.Controllers import Album_controllers, Charts_controllers, Composer_contollers, Genre_controllers, Music_controllers, Radio_contollers,Index_controllers,Charts_controllers,registration_controller
urlpatterns = [
path('', Index_controllers.index, name='index'),
path('composers/', Composer_contollers.list_composers, name='composers'),
path('composer/add/', Composer_contollers.add_composer, name='add_composer'),
path('composer/edit/<int:composer_id>', Composer_contollers.edit_composer, name='edit_composer'),
path('composer/delete/<int:composer_id>', Composer_contollers.delete_composer, name='delete_composer'),
path('music/add/', Music_controllers.add_music, name='add_music'),
path('music/edit/<int:music_id>', Music_controllers.edit_music, name='edit_music'),
path('music/delete/<int:music_id>', Music_controllers.delete_music, name='delete_music'),
path('radio/add/', Radio_contollers.add_radio, name='add_radio'),
path('radio/edit/<int:radio_id>', Radio_contollers.edit_radio, name='edit_radio'),
path('radio/delete/<int:radio_id>', Radio_contollers.delete_radio, name='delete_radio'),
path('genres/', Genre_controllers.list_genres, name='genres'),
path('music/', Music_controllers.list_musics, name='musics'),
path('radio/', Radio_contollers.list_radio, name='radios'),
path('albums/', Album_controllers.list_albums, name='albums'),
path('charts/', Charts_controllers.list_Charts, name='charts'),
path('charts/add/', Charts_controllers.add_Charts, name='add_charts'),
path('charts/edit/<int:charts_id>', Charts_controllers.edit_Charts, name='edit_charts'),
path('charts/delete/<int:charts_id>', Charts_controllers.delete_Charts, name='delete_charts'),
path('album/add/', Album_controllers.add_album, name='add_album'),
path('album/edit/<int:album_id>', Album_controllers.edit_album, name='edit_album'),
path('album/delete/<int:album_id>', Album_controllers.delete_album, name='delete_album'),
path('register', registration_controller.index, name='register'),
]
|
#!/usr/bin/env python3
proto = ["ssh", "http", "https"]
print (proto)
print (proto[1])
proto.extend("dns") ## appends each letter as an element to the list
print(proto)
|
"""GPSTakip URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from django.urls import path,include
from django.contrib.auth import views
from users.forms import LoginForm
import users.urls as user_urls
import home.urls as home_urls
import iot.urls as iot_urls
urlpatterns = [
path('',views.login,{'template_name': 'users/login.html','authentication_form':LoginForm,
'redirect_authenticated_user': True},name='login'),
path('logout',views.logout,name='logout'),
path('home', include(home_urls)),
path('users/',include(user_urls)),
path('iot/',include(iot_urls))
]
urlpatterns += static(settings.STATIC_URL,document_root=settings.STATIC_ROOT)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.