text stringlengths 8 6.05M |
|---|
class Solution:
def findDisappearedNumbers(self, nums: List[int]) -> List[int]:
#return list((set(nums)^set([x for x in range(1,len(nums)+1)])))
if not nums:
return []
for i in range(len(nums)):
if nums[abs(nums[i])-1]>0:
nums[abs(nums[i])-1] *= -1
ans = []
for i in range(len(nums)):
if nums[i]>0:
ans.append(i+1)
return ans
#Time complexity = O(n) iterating twice through list
#Space complexity = O(1)
#All test cases passed
#Tried using XOR operation on the 2 list and seemed to work fine too. |
import urllib.request, re, time, random, time, winsound, webbrowser
TARGET="June"
URL="https://store.htcvivecart.com/store/htcus/en_US/quickcart/ThemeID.40533800/OfferID.48383055501"
isFound=False
count=0
def check():
f=urllib.request.urlopen(URL)
source=f.read()
res=re.match(".*"+TARGET,str(source),re.DOTALL)
if res != None:
print(TARGET + " found!")
winsound.Beep(1000, 400)
if isFound==False:
global isFound
isFound=True
webbrowser.open(URL)
else:
print(TARGET + " not found." + str(count))
while True:
check()
count+=1
time.sleep(1)
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# This is a dictionary script
# Author: Eason
import json
dict = {}
flag = 'a'
tod = 'p'
di = 'n'
while flag == 'a' or tod == 'p':
flag = raw_input("请输入选择项,(a)添加姓名,(s)查找姓名: ")
if flag == 'a':
print "请输入姓名、年龄和部门,谢谢。"
dict ['姓名'] = raw_input("姓名: ")
dict['年龄'] = raw_input("年龄: ")
dict['部门'] = raw_input("所属部门:")
print "添加成功。"
tod = raw_input("查看添加的字典,(p)查看:")
if tod == 'p':
dicts = json.dumps(dict,encoding='utf-8',ensure_ascii=False)
print "该字典为:", dicts
else:
continue
elif flag == 's':
ch_word = raw_input("请输入查找的姓名:")
for key in sorted(dict.values()):
if str(ch_word) == key:
dicts = json.dumps(dict, encoding='utf-8', ensure_ascii=False)
print key, dicts
break
else:
di == 'n'
print "字典中不存在该姓名。"
else:
print "输入出错,执行结束。"
break
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(unique=True, max_length=255, verbose_name='email address', db_index=True)),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('phone_number', models.CharField(blank=True, max_length=15, validators=[django.core.validators.RegexValidator(regex=b'^\\+?1?\\d{9,15}$', message=b"Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.")])),
('street_address', models.CharField(max_length=128)),
('city', models.CharField(max_length=64)),
('zip', models.CharField(max_length=5)),
('date_updated', models.DateField()),
('company_name', models.CharField(max_length=200)),
('company_email_address', models.EmailField(max_length=255)),
('company_street_address', models.CharField(max_length=128)),
('company_city', models.CharField(max_length=64)),
('company_phone_number', models.CharField(blank=True, max_length=15, validators=[django.core.validators.RegexValidator(regex=b'^\\+?1?\\d{9,15}$', message=b"Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.")])),
('company_extension', models.CharField(max_length=5)),
('company_cubicle_or_mail_room', models.CharField(max_length=5)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'verbose_name': 'Member account',
'verbose_name_plural': 'Member accounts',
},
bases=(models.Model,),
),
]
|
# encoding:utf-8
__author__ = 'hanzhao'
import urllib
def run(msg):
if '<br/>' in msg: #为群聊消息时候
[FromUser,msg] = msg.split('<br/>')
else: #为个人消息时候
pass
if msg in ['打开英雄榜','.英雄榜']:
return 'http://www.battlenet.com.cn/wow/zh/'
if msg.startswith('英雄榜') and ' ' in msg:
command = msg.split(' ')
if len(command)==2 :
if command[1]=='':
return '请输入服务器以及角色名称\t 例如 英雄榜 塞拉摩 宿舍再见'
else:
return 'http://www.battlenet.com.cn/wow/zh/search?q=' + urllib.quote(command[1])
elif len(command)==3 :
if command[2]=='':
return '请输入服务器以及角色名称\t 例如 英雄榜 塞拉摩 宿舍再见'
else:
return 'http://www.battlenet.com.cn/wow/zh/character/'+urllib.quote(command[1])+'/'+urllib.quote(command[2])+'/simple'
else:
return '正确格式为 英雄榜+空格+服务器名+空格+角色名\n 例如<英雄榜 塞拉摩 宿舍再见>'
elif msg in ['打开多玩魔兽']:
return 'http://wow.duowan.com/'
elif msg in ['打开NGA','打开nga','.nga']:
return 'http://bbs.ngacn.cc/'
elif msg in ['baidu','打开百度','.baidu']:
return 'http://www.baidu.com/'
else :
return None |
#coding=utf-8
flag = bin(int('flag{0123456789abcdef}'.encode('hex'),16))[2:]
s='01' # or '10'
for i in range(len(flag)):
if flag[i]=='1':
s+=s[-2:][::-1]
else:
s+=s[-2:]
print hex(int(s,2))[2:-1]
#6565659565569a99665959555956a6a55959596aa696a69aa69959aaa6569aa9655a9aa69a95656965656669
r=""
tmp = 0
for i in xrange(len(s)/2):
c = s[i*2]
if c == s[i*2 - 1]:
r += '1'
else:
r += '0'
print hex(int(r,2))[2:-1].decode('hex')
#flag{0123456789abcdef}
|
"""
views has functions that are mapped to the urls in urls.py
"""
import datetime
import io
from collections import OrderedDict
import xlsxwriter
from fuzzywuzzy import fuzz
from django.core import serializers
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseForbidden, HttpResponse
from django.contrib.auth import logout, authenticate, get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.db.models import Q
from django.views import generic
from django.views.generic import TemplateView, View
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import PasswordChangeForm
from django.shortcuts import render, redirect
from django.contrib import messages
from django.http import HttpResponseNotFound
from django.utils.dateparse import parse_datetime
from notifications.signals import notify
from notifications.models import Notification
from .forms import UpdateAdminProfileForm
from .models import Announcement, EventInviteeRelation, EventAttendeeRelation, Ally, StudentCategories, \
AllyStudentCategoryRelation, Event, AllyMentorRelation, AllyMenteeRelation
User = get_user_model()
def make_notification(request, notifications, user, msg, action_object=''):
"""
Makes notifications based on the request, the users existing notifications, the recipient user, and the message.
Limiting notificaions to 10 based on database usage concerns.
@param notifications: notifications have recipient id = user.id
@param request: request that came from the client
@param user: user notification being sent to
@param msg: message to send
@action_object: django object (optional)
"""
if notifications.exists():
announcements_and_events = []
for notification in notifications:
if notification.action_object:
if notification.action_object == action_object:
notification.delete()
elif notification.action_object._meta.verbose_name == 'event' or \
notification.action_object._meta.verbose_name == 'announcement':
announcements_and_events.append(notification)
length = len(announcements_and_events)
while length >= 10:
announcements_and_events[length - 1].delete()
length -= 1
if action_object == '':
notify.send(request.user, recipient=user, verb=msg)
else:
notify.send(request.user, recipient=user, verb=msg, action_object=action_object)
def login_success(request):
"""
Redirects users based on whether they are staff or not
"""
if request.user.is_authenticated:
if request.user.is_staff:
return redirect('sap:sap-dashboard')
return redirect('sap:ally-dashboard')
return redirect('sap:home')
def logout_request(request):
"""
function to log an user out
"""
logout(request)
return redirect('sap:home')
class AccessMixin(LoginRequiredMixin):
"""
Redirect users based on whether they are staff or not
"""
def dispatch(self, request, *args, **kwargs):
if not request.user.is_staff:
return self.handle_no_permission()
return super().dispatch(request, *args, **kwargs)
def add_mentor_relation(ally_id, mentor_id):
"""
helper function for adding mentor relation
"""
AllyMentorRelation.objects.create(ally_id=ally_id,
mentor_id=mentor_id)
def add_mentee_relation(ally_id, mentee_id):
"""
helper function for adding mentee relation
"""
AllyMenteeRelation.objects.get_or_create(ally_id=ally_id,
mentee_id=mentee_id)
class ViewAllyProfileFromAdminDashboard(View):
"""
Class that contains admin dashboard view
"""
@staticmethod
def get(request, ally_username=''):
"""
method to retrieve all ally information
"""
try:
req_user = User.objects.get(username=ally_username)
ally = Ally.objects.get(user=req_user)
try:
mentor = AllyMentorRelation.objects.get(ally_id=ally.id)
mentor = Ally.objects.get(pk=mentor.mentor_id)
except ObjectDoesNotExist:
mentor = []
try:
mentees_queryset = AllyMenteeRelation.objects.filter(ally_id=ally.id)
mentees = []
for mentee in mentees_queryset:
mentees.append(
Ally.objects.get(pk=mentee.mentee_id))
except ObjectDoesNotExist: # pragma: no cover
mentees = []
return render(request, 'sap/admin_ally_table/view_ally.html', {
'ally': ally,
'mentor': mentor,
'mentees': mentees
})
except ObjectDoesNotExist: # pragma: no cover
print(ObjectDoesNotExist)
return HttpResponseNotFound()
class CreateAnnouncement(AccessMixin, HttpResponse):
"""
Create annoucnemnnts
"""
@classmethod
def create_announcement(cls, request):
"""
Enter what this class/method does
"""
notifications = Notification.objects.all()
users = User.objects.all()
if request.user.is_staff:
post_dict = dict(request.POST)
curr_user = request.user
title = post_dict['title'][0]
description = post_dict['desc'][0]
announcement = Announcement.objects.create(
username=curr_user.username,
title=title,
description=description,
created_at=datetime.datetime.utcnow()
)
for user in users:
if not user.is_staff:
user_notifications = notifications.filter(recipient=user.id)
msg = 'Announcement: ' + announcement.title
make_notification(request, user_notifications, user, msg, action_object=announcement)
messages.success(request, 'Annoucement created successfully !!')
return redirect('sap:sap-dashboard')
return HttpResponseForbidden()
class DeleteAllyProfileFromAdminDashboard(AccessMixin, View):
"""
Enter what this class/method does
"""
def get(self, request):
"""Enter what this class/method does"""
username = request.GET['username']
try:
user = User.objects.get(username=username)
ally = Ally.objects.get(user=user)
ally_categories=AllyStudentCategoryRelation.objects.filter(ally_id=ally.id)
categories=StudentCategories.objects.filter(id=ally_categories[0].student_category_id)
ally.delete()
user.delete()
categories[0].delete()
messages.success(request, 'Successfully deleted the user ' + username)
return redirect('sap:sap-dashboard')
except ObjectDoesNotExist:
return HttpResponseNotFound("")
class ChangeAdminPassword(View):
"""
Change the password for admin
"""
def get(self, request):
"""Enter what this class/method does"""
form = PasswordChangeForm(request.user)
return render(request, 'sap/change_password.html', {
'form': form
})
def post(self, request):
"""Enter what this class/method does"""
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(
request, 'Password Updated Successfully !')
return redirect('sap:change_password')
messages.error(request, "Could not Update Password !")
return render(request, 'sap/change_password.html', {
'form': form
})
class CalendarView(TemplateView):
"""
Show calendar to allies so that they can signup for events
"""
def get(self, request):
"""
This function gets all the events to be shown on Calendar
"""
if request.user.is_staff:
role = "admin"
else:
role = "ally"
events_list = []
curr_user = request.user
if not curr_user.is_staff:
curr_ally = Ally.objects.get(user_id=curr_user.id)
curr_events = EventInviteeRelation.objects.filter(ally_id=curr_ally.id)
for event in curr_events:
events_list.append(Event.objects.get(id=event.event_id))
else:
events_list = Event.objects.all()
for event in events_list:
event.num_invited = EventInviteeRelation.objects.filter(event_id=event.id).count()
event.num_attending = EventAttendeeRelation.objects.filter(event_id=event.id).count()
event.save()
events = serializers.serialize('json', events_list)
return render(request, 'sap/calendar.html',
context={
"events": events,
"user": curr_user,
"role": role,
})
class EditAdminProfile(View):
"""
Change the profile for admin
"""
def get(self, request):
"""Enter what this class/method does"""
form = UpdateAdminProfileForm()
return render(request, 'sap/profile.html', {
'form': form
})
def post(self, request):
"""Enter what this class/method does"""
curr_user = request.user
form = UpdateAdminProfileForm(request.POST)
new_username = form.data['username']
new_email = form.data['email']
if not User.objects.filter(username=new_username).exists():
curr_user.username = new_username
curr_user.email = new_email
curr_user.save()
messages.success(request, "Profile Updated !")
return redirect('sap:sap-admin_profile')
messages.error(
request, "Could not Update Profile ! Username already exists")
return render(request, 'sap/profile.html', {
'form': form
})
class Announcements(TemplateView):
"""Enter what this class/method does"""
def get(self, request):
"""Enter what this class/method does"""
announcments_list = Announcement.objects.order_by('-created_at')
if request.user.is_staff:
role = "admin"
else:
role = "ally"
for announcment in announcments_list:
announcment.created_at = announcment.created_at.strftime(
"%m/%d/%Y, %I:%M %p")
return render(request, 'sap/announcements.html', {'announcments_list': announcments_list, 'role': role})
class AlliesListView(AccessMixin, TemplateView):
"""Enter what this class/method does"""
def get(self, request):
"""Renders the dashboard with the allies and categories as django template variables."""
allies_list = Ally.objects.order_by('-id')
tmp = {}
for ally in allies_list:
if ally.user.is_active:
tmp[ally] = False
allies_list = tmp
return render(request, 'sap/dashboard.html', {'allies_list': allies_list})
def post(self, request):
"""Filters and returns allies based on selected criteria"""
if request.POST.get("form_type") == 'filters':
post_dict = dict(request.POST)
if 'stemGradCheckboxes' in post_dict:
stemfields = post_dict['stemGradCheckboxes']
exclude_from_aor_default = False
else:
exclude_from_aor_default = True
stemfields = []
if 'undergradYear' in post_dict:
exclude_from_year_default = False
undergrad_year = post_dict['undergradYear']
else:
exclude_from_year_default = True
undergrad_year = []
if 'idUnderGradCheckboxes' in post_dict:
student_categories = post_dict['idUnderGradCheckboxes']
exclude_from_sc_default = False
else:
exclude_from_sc_default = True
student_categories = []
if 'roleSelected' in post_dict:
user_types = post_dict['roleSelected']
exclude_from_ut_default = False
else:
exclude_from_ut_default = True
user_types = []
if 'mentorshipStatus' in post_dict:
mentorship_status = post_dict['mentorshipStatus'][0]
exclude_from_ms_default = False
else:
exclude_from_ms_default = True
mentorship_status = []
major = post_dict['major'][0]
if major != '':
exclude_from_major_default = False
else:
exclude_from_major_default = True
allies_list = Ally.objects.order_by('-id')
if not (
exclude_from_year_default and exclude_from_aor_default and exclude_from_sc_default and exclude_from_ut_default
and exclude_from_ms_default and exclude_from_major_default):
for ally in allies_list:
exclude_from_aor = exclude_from_aor_default
exclude_from_year = exclude_from_year_default
exclude_from_sc = exclude_from_sc_default
exclude_from_ut = exclude_from_ut_default
exclude_from_ms = exclude_from_ms_default
exclude_from_major = exclude_from_major_default
if (major != '') and (fuzz.ratio(ally.major, major) < 90):
exclude_from_major = True
if ally.area_of_research:
aor = ally.area_of_research.split(',')
else:
aor = []
if stemfields and (not bool(set(stemfields) & set(aor))):
exclude_from_aor = True
if mentorship_status != []:
if (mentorship_status == 'Mentor') and (ally.interested_in_mentoring is False):
exclude_from_ms = True
elif (mentorship_status == 'Mentee') and (ally.interested_in_being_mentored is False):
exclude_from_ms = True
try:
categories = AllyStudentCategoryRelation.objects.filter(
ally_id=ally.id).values()[0]
categories = StudentCategories.objects.filter(
id=categories['student_category_id'])[0]
except KeyError: # pragma: no cover
student_categories = []
if student_categories:
for cat in student_categories:
if (cat == 'First generation college-student') and (categories.first_gen_college_student is False):
exclude_from_sc = True
elif (cat == 'Low-income') and (categories.low_income is False):
exclude_from_sc = True
elif (cat == 'Underrepresented racial/ethnic minority') and \
(categories.under_represented_racial_ethnic is False):
exclude_from_sc = True
elif (cat == 'LGBTQ') and (categories.lgbtq is False):
exclude_from_sc = True
elif (cat == 'Rural') and (categories.rural is False):
exclude_from_sc = True
elif (cat == 'Disabled') and (categories.disabled is False):
exclude_from_sc = True
if undergrad_year and (ally.year not in undergrad_year):
exclude_from_year = True
if user_types and (ally.user_type not in user_types):
exclude_from_ut = True
exclude_from_ms_major = exclude_from_ms and exclude_from_major
if exclude_from_aor and exclude_from_year and exclude_from_sc and exclude_from_ut \
and exclude_from_ms_major:
allies_list = allies_list.exclude(id=ally.id)
tmp = {}
for ally in allies_list:
if ally.user.is_active:
tmp[ally] = False
allies_list = tmp
return render(request, 'sap/dashboard.html', {'allies_list': allies_list})
return HttpResponse()
class MentorsListView(generic.ListView):
"""Enter what this class/method does"""
template_name = 'sap/dashboard_ally.html'
context_object_name = 'allies_list'
def get(self, request):
"""Returns a view of allies"""
allies_list = Ally.objects.order_by('-id')
mentees = AllyMenteeRelation.objects.all()
try:
user_ally = Ally.objects.get(user=request.user)
except ObjectDoesNotExist:
return HttpResponseNotFound
try:
mentor = AllyMentorRelation.objects.get(ally_id=user_ally.id)
mentor = Ally.objects.get(id=mentor.mentor_id)
except ObjectDoesNotExist:
mentor = None
for ally in allies_list:
if ally.user.is_active:
if not ally.user.is_active:
allies_list = allies_list.exclude(id=ally.id)
tmp = {}
for ally in allies_list:
has_mentor = False
for mentee in mentees:
if mentee.mentee_id == ally.id:
has_mentor = True
tmp[ally] = has_mentor
allies_list = tmp
return render(request, 'sap/dashboard_ally.html', {'allies_list': allies_list,
'user_ally': user_ally,
'mentor': mentor})
def post(self, request):
"""Returns filtered version of allies on the dashboard"""
if request.POST.get("form_type") == 'filters':
post_dict = dict(request.POST)
if 'stemGradCheckboxes' in post_dict:
stemfields = post_dict['stemGradCheckboxes']
exclude_from_aor_default = False
else:
exclude_from_aor_default = True
stemfields = []
if 'undergradYear' in post_dict:
exclude_from_year_default = False
undergrad_year = post_dict['undergradYear']
else:
exclude_from_year_default = True
undergrad_year = []
if 'mentorshipStatus' in post_dict:
mentorship_status = post_dict['mentorshipStatus'][0]
exclude_from_ms_default = False
else:
exclude_from_ms_default = True
mentorship_status = []
allies_list = Ally.objects.order_by('-id')
if not (exclude_from_year_default and exclude_from_aor_default and exclude_from_ms_default):
for ally in allies_list:
exclude_from_aor = exclude_from_aor_default
exclude_from_year = exclude_from_year_default
exclude_from_ms = exclude_from_ms_default
if mentorship_status != []:
if (mentorship_status == 'Mentor') and (ally.interested_in_mentoring is False) \
and (ally.openings_in_lab_serving_at is False) and (ally.willing_to_offer_lab_shadowing is False):
exclude_from_ms = True
elif (mentorship_status == 'Mentee') and (ally.interested_in_being_mentored is False):
exclude_from_ms = True
if ally.area_of_research:
aor = ally.area_of_research.split(',')
else:
aor = []
if (stemfields) and (not bool(set(stemfields) & set(aor))):
exclude_from_aor = True
if (undergrad_year) and (ally.year not in undergrad_year):
exclude_from_year = True
if exclude_from_aor and exclude_from_year and exclude_from_ms:
allies_list = allies_list.exclude(id=ally.id)
for ally in allies_list:
if ally.user.is_active:
if not ally.user.is_active:
allies_list = allies_list.exclude(id=ally.id)
user = request.user
ally = Ally.objects.get(user=user)
try:
categories = AllyStudentCategoryRelation.objects.filter(
ally_id=ally.id).values()[0]
categories = StudentCategories.objects.filter(
id=categories['student_category_id'])[0]
except KeyError: # pragma: no cover
categories = []
if (categories) and (exclude_from_ms_default is False):
identity_wise_list = []
curr_identity_list = []
if categories.first_gen_college_student is True:
curr_identity_list.append('First generation college-student')
if categories.low_income is True:
curr_identity_list.append('Low-income')
if categories.under_represented_racial_ethnic is True:
curr_identity_list.append('Underrepresented racial/ethnic minority')
if categories.lgbtq is True:
curr_identity_list.append('LGBTQ')
if categories.rural is True:
curr_identity_list.append('Rural')
if categories.disabled is True:
curr_identity_list.append('Disabled')
for ally in allies_list:
try:
categories_from_list = AllyStudentCategoryRelation.objects.filter(
ally_id=ally.id).values()[0]
categories_from_list = StudentCategories.objects.filter(
id=categories_from_list['student_category_id'])[0]
except KeyError: # pragma: no cover
categories_from_list = []
not_found = True
if categories_from_list:
if (categories_from_list.first_gen_college_student is True) and \
('First generation college-student' in curr_identity_list):
not_found = False
if (categories_from_list.low_income is True) and ('Low-income' in curr_identity_list):
not_found = False
if (categories_from_list.under_represented_racial_ethnic is True) and \
('Underrepresented racial/ethnic minority' in curr_identity_list):
not_found = False
if (categories_from_list.lgbtq is True) and ('LGBTQ' in curr_identity_list):
not_found = False
if (categories_from_list.rural is True) and ('Rural' in curr_identity_list):
not_found = False
if (categories_from_list.disabled is True) and ('Disabled' in curr_identity_list):
not_found = False
if not_found:
identity_wise_list.append(ally)
else:
identity_wise_list.insert(0, ally)
else:
identity_wise_list = allies_list
ordered_dict = OrderedDict()
mentees = AllyMenteeRelation.objects.all()
for ally in identity_wise_list:
has_mentor = False
for mentee in mentees:
if mentee.mentee_id == ally.id:
has_mentor = True
ordered_dict[ally] = has_mentor
identity_wise_list = ordered_dict
try:
user_ally = Ally.objects.get(user=request.user)
except ObjectDoesNotExist:
return HttpResponseNotFound
try:
mentor = AllyMentorRelation.objects.get(ally_id=user_ally.id)
mentor = Ally.objects.get(id=mentor.mentor_id)
except ObjectDoesNotExist:
mentor = None
return render(request, 'sap/dashboard_ally.html', {'allies_list': identity_wise_list,
'user_ally': user_ally,
'mentor': mentor})
return HttpResponse()
class AnalyticsView(AccessMixin, TemplateView):
"""takes in input from other methods and returns the seperate years and numbers"""
template_name = "sap/analytics.html"
@staticmethod
def clean_undergrad_dic(undergrad_dic):
"""Enter what this class/method does"""
years = []
numbers = []
if undergrad_dic != {}:
for key in sorted(undergrad_dic, reverse=True):
years.append(int(key))
numbers.append(undergrad_dic[key])
return years, numbers
@staticmethod
def clean_other_dic(other_dic):
"""takes in input from other methods and returns the seperate years and numbers"""
years = []
numbers = [[], [], []]
if other_dic != {}:
for key in sorted(other_dic, reverse=True):
years.append(int(key))
for i in range(0, 3):
numbers[i].append(other_dic[key][i])
return years, numbers
@staticmethod
def year_helper(ally):
"""turns datetime object into a string (just the year)"""
user = ally.user
joined = user.date_joined
joined = datetime.datetime.strftime(joined, '%Y')
return joined
@staticmethod
def find_years(allies):
"""get the years that each user type signed up for"""
year_and_number = {}
undergrad_number = {}
for ally in allies:
joined = AnalyticsView.year_helper(ally)
if ally.user_type != 'Undergraduate Student':
year_and_number[joined] = [0, 0, 0] # Staff,Grad,Faculty
else:
undergrad_number[joined] = 0 # num undergrad in a particular year
return year_and_number, undergrad_number
@staticmethod
def user_type_per_year(allies, year_and_number, undergrad_number):
"""Finds the number of each type of ally that signup per year"""
for ally in allies:
joined = AnalyticsView.year_helper(ally)
if ally.user_type == 'Staff':
year_and_number[joined][0] += 1
elif ally.user_type == 'Graduate Student':
year_and_number[joined][1] += 1
elif ally.user_type == 'Undergraduate Student':
undergrad_number[joined] += 1
elif ally.user_type == 'Faculty':
year_and_number[joined][2] += 1
return year_and_number, undergrad_number
@staticmethod
def find_the_categories(allies, relation, categories):
"""finds all categories and appends them to a list"""
categories_list = []
for ally in allies:
category_relation = relation.filter(ally_id=ally.id)
if category_relation.exists():
category = categories.filter(id=category_relation[0].student_category_id)
if category.exists():
categories_list.append(category[0])
return categories_list
@staticmethod
def determine_num_per_category(category_list):
"""
Gets the number per category of allies
"""
per_category = [0, 0, 0, 0, 0, 0, 0] # lbtq,minorities,rural,disabled,firstGen,transfer,lowIncome
for category in category_list:
if category.lgbtq:
per_category[0] += 1
if category.under_represented_racial_ethnic:
per_category[1] += 1
if category.rural:
per_category[2] += 1
if category.disabled:
per_category[3] += 1
if category.first_gen_college_student:
per_category[4] += 1
if category.transfer_student:
per_category[5] += 1
if category.low_income:
per_category[6] += 1
return per_category
@staticmethod
def undergrad_per_year(allies):
"""
gets number of students per year
"""
per_category = [0, 0, 0, 0] # Freshman,Sophmore,Junior,Senior
for ally in allies:
if ally.year == "Freshman":
per_category[0] += 1
if ally.year == "Sophomore":
per_category[1] += 1
if ally.year == "Junior":
per_category[2] += 1
if ally.year == "Senior":
per_category[3] += 1
return per_category
def get(self, request):
"""gets analytics view"""
if request.user.is_staff:
role = "admin"
else:
role = "ally"
allies = Ally.objects.all()
if len(allies) != 0:
categories = StudentCategories.objects.all()
relation = AllyStudentCategoryRelation.objects.all()
other_year, undergrad_year = AnalyticsView.find_years(allies)
other_joined_per_year, undergrad_joined_per_year = AnalyticsView.user_type_per_year(allies, other_year, undergrad_year)
undergrad_years, undergrad_numbers = AnalyticsView.clean_undergrad_dic(undergrad_joined_per_year)
other_years, other_numbers = AnalyticsView.clean_other_dic(other_joined_per_year)
students = allies.filter(user_type="Undergraduate Student")
mentors = allies.filter(~Q(user_type="Undergraduate Student"))
student_categories = AnalyticsView.find_the_categories(students, relation, categories)
mentor_categories = AnalyticsView.find_the_categories(mentors, relation, categories)
num_student_categories = AnalyticsView.determine_num_per_category(student_categories)
num_mentor_categories = AnalyticsView.determine_num_per_category(mentor_categories)
num_undergrad_per_year = AnalyticsView.undergrad_per_year(students)
return render(request, 'sap/analytics.html', {"numStudentCategories": num_student_categories,
"numMentorCategories": num_mentor_categories,
"numUndergradPerYear": num_undergrad_per_year,
"undergradYears": undergrad_years,
"undergradNumbers": undergrad_numbers,
"otherYears": other_years,
"staffNumbers": other_numbers[0],
"gradNumbers": other_numbers[1],
"facultyNumbers": other_numbers[2],
"role": role, })
messages.error(request, "No allies to display!")
return redirect('sap:sap-dashboard')
class AdminProfileView(TemplateView):
"""Enter what this class/method does"""
template_name = "sap/profile.html"
class AboutPageView(TemplateView):
"""Enter what this class/method does"""
template_name = "sap/about.html"
class ResourcesView(TemplateView):
"""Enter what this class/method does"""
template_name = "sap/resources.html"
class SupportPageView(TemplateView):
"""Enter what this class/method does"""
template_name = "sap/support.html"
class CreateAdminView(AccessMixin, TemplateView):
"""Enter what this class/method does"""
template_name = "sap/create_iba_admin.html"
def get(self, request):
"""Enter what this class/method does"""
return render(request, self.template_name)
def post(self, request):
"""Enter what this class/method does"""
new_admin_dict = dict(request.POST)
valid = True
for key in new_admin_dict:
if new_admin_dict[key][0] == '':
valid = False
if valid:
# Check if username credentials are correct
if authenticate(request, username=new_admin_dict['current_username'][0],
password=new_admin_dict['current_password'][0]) is not None:
# if are check username exists in database
if User.objects.filter(username=new_admin_dict['new_username'][0]).exists():
messages.add_message(request, messages.ERROR, 'Account was not created because username exists')
return redirect('/create_iba_admin')
# Check if repeated password is same
if new_admin_dict['new_password'][0] != new_admin_dict['repeat_password'][0]:
messages.add_message(request, messages.ERROR, 'New password was not the same as repeated password')
return redirect('/create_iba_admin')
messages.add_message(request, messages.SUCCESS, 'Account Created')
user = User.objects.create_user(new_admin_dict['new_username'][0],
new_admin_dict['new_email'][0], new_admin_dict['new_password'][0])
user.is_staff = True
user.save()
return redirect('/dashboard')
messages.add_message(request, messages.ERROR, 'Invalid Credentials entered')
return redirect('/create_iba_admin')
messages.add_message(request, messages.ERROR,
'Account was not created because one or more fields were not entered')
return redirect('/create_iba_admin')
class CreateEventView(AccessMixin, TemplateView):
"""Create a new event functions"""
template_name = "sap/create_event.html"
def get(self, request):
"""Render create event page"""
if request.user.is_staff:
return render(request, self.template_name)
return redirect('sap:resources')
def post(self, request):
"""Creates a new event if when the admin clicks on create event button on create event page"""
new_event_dict = dict(request.POST)
event_title = new_event_dict['event_title'][0]
event_description = new_event_dict['event_description'][0]
event_start_time = new_event_dict['event_start_time'][0]
event_end_time = new_event_dict['event_end_time'][0]
event_location = new_event_dict['event_location'][0]
invite_all = True
mentor_status = None
special_category = None
research_field = None
school_year_selected = None
role_selected = None
allies_list = Ally.objects.order_by('-id')
for ally in allies_list:
if not ally.user.is_active:
allies_list = allies_list.exclude(id=ally.id)
allies_list = list(allies_list)
if 'role_selected' in new_event_dict:
invite_ally_user_types = new_event_dict['role_selected']
role_selected = ','.join(new_event_dict['role_selected'])
else:
invite_ally_user_types = []
if 'school_year_selected' in new_event_dict:
invite_ally_school_years = new_event_dict['school_year_selected']
school_year_selected = ','.join(new_event_dict['school_year_selected'])
else:
invite_ally_school_years = []
if 'mentor_status' in new_event_dict:
invite_mentor_mentee = new_event_dict['mentor_status']
mentor_status = ','.join(new_event_dict['mentor_status'])
else:
invite_mentor_mentee = []
if 'special_category' in new_event_dict:
invite_ally_belonging_to_special_categories = new_event_dict['special_category']
special_category = ','.join(new_event_dict['special_category'])
else:
invite_ally_belonging_to_special_categories = []
if 'research_area' in new_event_dict:
invite_ally_belonging_to_research_area = new_event_dict['research_area']
research_field = ','.join(new_event_dict['research_area'])
else:
invite_ally_belonging_to_research_area = []
if 'invite_all' in new_event_dict:
invite_all_selected = True
invite_all = new_event_dict['invite_all'][0] == 'invite_all'
else:
invite_all_selected = []
invite_all = False
allday = 'event_allday' in new_event_dict
if event_end_time < event_start_time:
messages.warning(request, 'End time cannot be less than start time!')
return redirect('/create_event')
if invite_all_selected:
# If all allies are invited
allies_to_be_invited = allies_list
else:
allies_to_be_invited = []
allies_to_be_invited.extend(Ally.objects.filter(user_type__in=invite_ally_user_types))
allies_to_be_invited.extend(Ally.objects.filter(year__in=invite_ally_school_years))
if 'Mentors' in invite_mentor_mentee:
allies_to_be_invited.extend(Ally.objects.filter(interested_in_mentoring=True))
if 'Mentees' in invite_mentor_mentee:
allies_to_be_invited.extend(Ally.objects.filter(interested_in_mentor_training=True))
allies_to_be_invited.extend(Ally.objects.filter(area_of_research__in=invite_ally_belonging_to_research_area))
student_categories_to_include_for_event = []
for category in invite_ally_belonging_to_special_categories:
if category == 'First generation college-student':
student_categories_to_include_for_event.extend(StudentCategories.objects.filter(first_gen_college_student=True))
elif category == 'Low-income':
student_categories_to_include_for_event.extend(StudentCategories.objects.filter(low_income=True))
elif category == 'Underrepresented racial/ethnic minority':
student_categories_to_include_for_event.extend(StudentCategories.objects.filter(under_represented_racial_ethnic=True))
elif category == 'LGBTQ':
student_categories_to_include_for_event.extend(StudentCategories.objects.filter(lgbtq=True))
elif category == 'Rural':
student_categories_to_include_for_event.extend(StudentCategories.objects.filter(rural=True))
elif category == 'Disabled':
student_categories_to_include_for_event.extend(StudentCategories.objects.filter(disabled=True))
invited_allies_ids = AllyStudentCategoryRelation.objects.filter(student_category__in=
student_categories_to_include_for_event).values('ally')
allies_to_be_invited.extend(
Ally.objects.filter(id__in=invited_allies_ids)
)
allies_to_be_invited = set(allies_to_be_invited)
try:
junk = new_event_dict['email_list']
if junk[0] == 'get_email_list':
return CreateEventView.build_response(allies_to_be_invited, event_title)
return redirect('/calendar')
except KeyError:
event = Event.objects.create(title=event_title,
description=event_description,
start_time=parse_datetime(event_start_time + '-0500'),
# converting time to central time before storing in db
end_time=parse_datetime(event_end_time + '-0500'),
location=event_location,
allday=allday,
invite_all=invite_all,
mentor_status=mentor_status,
special_category=special_category,
research_field=research_field,
school_year_selected=school_year_selected,
role_selected=role_selected)
CreateEventView.invite_and_notify(request, allies_to_be_invited, event)
messages.success(request, "Event successfully created!")
return redirect('/calendar')
@staticmethod
def invite_and_notify(request, allies_to_be_invited, event):
"""
invite the users, notify users
"""
invited_allies = set()
all_event_ally_objs = []
notifications = Notification.objects.all()
for ally in allies_to_be_invited:
if ally.user.is_active:
event_ally_rel_obj = EventInviteeRelation(event=event, ally=ally)
all_event_ally_objs.append(event_ally_rel_obj)
invited_allies.add(event_ally_rel_obj.ally)
ally_user = ally.user
if not ally_user.is_staff:
user_notify = notifications.filter(recipient=ally_user.id)
msg = 'Event Invitation: ' + event.title
make_notification(request, user_notify, ally_user, msg, event)
EventInviteeRelation.objects.bulk_create(all_event_ally_objs)
@staticmethod
def build_response(ally_list, event_title):
"Creates an httpresponse object containing a file that will be returned"
byte_stream = io.BytesIO()
workbook = xlsxwriter.Workbook(byte_stream)
emails = workbook.add_worksheet('Ally Invite Emails')
emails.write(0, 0, 'Username')
emails.write(0, 1, 'Email')
rows = 1
for ally in ally_list:
emails.write(rows, 0, ally.user.username)
emails.write(rows, 1, ally.user.email)
rows += 1
workbook.close()
byte_stream.seek(0)
today = datetime.date.today()
today = today.strftime("%b-%d-%Y")
file_name = today + "_SAP_Invitees_" + event_title + ".xlsx"
response = HttpResponse(
byte_stream,
content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
)
response['Content-Disposition'] = 'attachment; filename=' + file_name
return response
|
import mysql.connector
from mysql.connector import Error
try:
con = mysql.connector.connect(host='localhost', database='db_products', username='root', password='')
query = "SELECT * FROM tbl_products"
cur = con.cursor()
cur.execute(query)
records = cur.fetchall()
print("Number of records in the table: ", cur.rowcount)
for row in records:
print("ID : ", row[0])
print("NAME : ", row[1])
print("DESCRIPTION : ", row[2])
print("PRICE : ", row[3])
print("QUANTITY : ", row[4])
print("----------------------")
except Error as error:
print("Error in the program {}".format(error))
finally:
if con.is_connected():
cur.close()
con.close()
print("MySQL Connection is now CLOSED!") |
from config import config, desarollo
from flask_script import Manager, Server
#inportat funcion
from src import ini_app
configuracion = config['desarollo']
app = ini_app()
# configuracio del server
Manager = Manager(app)
Manager.add_command('runserver', Server(host='127.0.0.1', port=9200))
if __name__ == '__main__':
app.run()
|
import os
import sys
import csv
import json
import jsonschema
import requests
from pyelasticsearch import ElasticSearch
import xlrd
import xlwt
from base64 import b64encode
# set headers. UNCLEAR IF THIS IS USED PROPERLY
HEADERS = {'content-type': 'application/json'}
# get object from server
def get_ENCODE(obj_id):
'''GET an ENCODE object as JSON and return as dict'''
url = SERVER+obj_id+'?limit=all'
response = requests.get(url, auth=(AUTHID, AUTHPW), headers=HEADERS)
if not response.status_code == 200:
print >> sys.stderr, response.text
return response.json()
# patch object to server
def patch_ENCODE(obj_id, patch_json):
'''PATCH an existing ENCODE object and return the response JSON'''
url = SERVER+obj_id
json_payload = json.dumps(patch_json)
response = requests.patch(url, auth=(AUTHID, AUTHPW), data=json_payload)
print "Patch:"
print response.status_code
if not response.status_code == 200:
print >> sys.stderr, response.text
return response.json()
# post object to server
def new_ENCODE(collection_id, object_json):
'''POST an ENCODE object as JSON and return the resppnse JSON'''
url = SERVER+'/'+collection_id+'/'
json_payload = json.dumps(object_json)
response = requests.post(url, auth=(AUTHID, AUTHPW), headers=HEADERS, data=json_payload)
if not response.status_code == 201:
print >> sys.stderr, response.text
return response.json()
# write new json obect. SHOULD BE MODIFIED TO CUSTOM OUTPUT FORMAT (FOR HUMAN VIEWING)
def WriteJSON(new_object,object_file):
with open(object_file, 'w') as outfile:
json.dump(new_object, outfile)
outfile.close()
if __name__ == "__main__":
'''
This script will read in all objects in the objects folder, determine if they are different from the database object, and post or patch them to the database.
Authentication is determined from the keys.txt file.
'''
# FUTURE: Should also be deal with errors that are only dependency based.
# set server name. MODIFY TO HAVE USER CHOOSE SERVER (ENUM LIST FROM THE FILE)
server_name = 'staging'
# get ID, PW. MODIFY TO USE USERNAME/PASS TO GAIN ACCESS TO CREDENTIALS
key_file = open('keys.txt')
keys = csv.DictReader(key_file,delimiter = '\t')
for key in keys:
if key.get('Server') == server_name:
USER = key.get('User')
SERVER = ('http://' + key.get('Server') + '.encodedcc.org')
AUTHID = key.get('ID')
AUTHPW = key.get('PW')
key_file.close()
# let user know the server/user that is set for running script
print(USER + ' will be running this update on ' + SERVER)
#print(AUTHID,AUTHPW)
# load objects in object folder. MODIFY TO HAVE USER VIEW AND SELECT OBJECTS
object_filenames = os.listdir('objects/')
# run for each object in objects folder
for object_filename in object_filenames:
if '.json' in object_filename:
# define object parameters. SHOULD NOT RELY ON FILENAME. NEED WAY TO IDENTIFY OBJECT TYPE/NAME BY REVIEWING DATA
object_type,object_name = object_filename.strip('.json').split(';')
object_file = ('objects/' + object_type + ';' + object_name + '.json')
object_collection = (object_type.replace('_','-') + 's')
object_id = ('/' + object_collection + '/' + object_name + '/')
# load object SHOULD HANDLE ERRORS GRACEFULLY
json_object = open(object_file)
new_object = json.load(json_object)
json_object.close()
# check to see if object already exists
# PROBLEM: SHOULD CHECK UUID AND NOT USE ANY SHORTCUT METADATA THAT MIGHT NEED TO CHANGE
# BUT CAN'T USE UUID IF NEW... HENCE PROBLEM
old_object = get_ENCODE(object_id)
# if object is not found, verify and post it
if old_object.get(u'title') == u'Not Found':
# get relevant schema
object_schema = get_ENCODE(('/profiles/' + object_type + '.json'))
# test the new object. SHOULD HANDLE ERRORS GRACEFULLY
try:
jsonschema.validate(new_object,object_schema)
# did not validate
except Exception as e:
print('Validation of ' + object_id + ' failed.')
print(e)
# did validate
else:
# inform the user of the success
print('Validation of ' + object_id + ' succeeded.')
# post the new object(s). SHOULD HANDLE ERRORS GRACEFULLY
response = new_ENCODE(object_collection,new_object)
# if object is found, check for differences and patch it if needed.
else:
# compare new object to old one, remove identical fields.
for key in new_object.keys():
if new_object.get(key) == old_object.get(key):
new_object.pop(key)
# if there are any different fields, patch them. SHOULD ALLOW FOR USER TO VIEW/APPROVE DIFFERENCES
if new_object:
# inform user of the updates
print(object_id + ' has updates.')
print(new_object)
# patch object
response = patch_ENCODE(object_id, new_object)
# inform user there are no updates
else:
print(object_id + ' has no updates.')
|
# Generated by Django 3.0.7 on 2020-10-09 07:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cl_table', '0005_auto_20201009_0730'),
]
operations = [
migrations.CreateModel(
name='Stock',
fields=[
('item_no', models.AutoField(db_column='Item_no', primary_key=True, serialize=False)),
('item_code', models.CharField(blank=True, max_length=20, null=True)),
('itm_icid', models.FloatField(blank=True, db_column='Itm_ICID', null=True)),
('itm_code', models.CharField(blank=True, db_column='Itm_Code', max_length=20, null=True)),
('item_div', models.CharField(blank=True, db_column='Item_Div', max_length=20, null=True)),
('item_dept', models.CharField(blank=True, db_column='Item_Dept', max_length=20, null=True)),
('item_class', models.CharField(blank=True, db_column='Item_Class', max_length=20, null=True)),
('item_barcode', models.CharField(blank=True, db_column='Item_Barcode', max_length=20, null=True)),
('onhand_cst', models.FloatField(blank=True, db_column='ONHAND_CST', null=True)),
('item_margin', models.FloatField(blank=True, db_column='Item_Margin', null=True)),
('item_isactive', models.BooleanField()),
('item_name', models.CharField(blank=True, db_column='Item_Name', max_length=60, null=True)),
('item_abbc', models.CharField(blank=True, db_column='Item_abbc', max_length=60, null=True)),
('item_desc', models.CharField(blank=True, db_column='Item_Desc', max_length=60, null=True)),
('cost_price', models.DecimalField(blank=True, db_column='COST_PRICE', decimal_places=4, max_digits=19, null=True)),
('item_price', models.DecimalField(blank=True, db_column='Item_Price', decimal_places=4, max_digits=19, null=True)),
('onhand_qty', models.FloatField(blank=True, db_column='ONHAND_QTY', null=True)),
('itm_promotionyn', models.CharField(blank=True, db_column='Itm_PromotionYN', max_length=20, null=True)),
('itm_disc', models.FloatField(blank=True, db_column='Itm_Disc', null=True)),
('itm_commission', models.FloatField(blank=True, db_column='Itm_Commission', null=True)),
('item_type', models.CharField(blank=True, db_column='Item_Type', max_length=20, null=True)),
('itm_duration', models.FloatField(blank=True, db_column='Itm_Duration', null=True)),
('item_price2', models.FloatField(blank=True, db_column='Item_Price2', null=True)),
('item_price3', models.FloatField(blank=True, db_column='Item_Price3', null=True)),
('item_price4', models.FloatField(blank=True, db_column='Item_Price4', null=True)),
('item_price5', models.FloatField(blank=True, db_column='Item_Price5', null=True)),
('itm_remark', models.CharField(blank=True, db_column='Itm_Remark', max_length=100, null=True)),
('itm_value', models.CharField(blank=True, db_column='Itm_Value', max_length=10, null=True)),
('itm_expiredate', models.DateTimeField(blank=True, db_column='Itm_ExpireDate', null=True)),
('itm_status', models.CharField(blank=True, db_column='Itm_Status', max_length=10, null=True)),
('item_minqty', models.IntegerField(blank=True, null=True)),
('item_maxqty', models.IntegerField(blank=True, null=True)),
('item_onhandcost', models.CharField(blank=True, db_column='item_OnHandCost', max_length=20, null=True)),
('item_barcode1', models.CharField(blank=True, db_column='item_Barcode1', max_length=20, null=True)),
('item_barcode2', models.CharField(blank=True, db_column='item_Barcode2', max_length=20, null=True)),
('item_barcode3', models.CharField(blank=True, db_column='item_Barcode3', max_length=20, null=True)),
('item_marginamt', models.FloatField(blank=True, null=True)),
('item_date', models.DateTimeField(blank=True, null=True)),
('item_time', models.DateTimeField(blank=True, null=True)),
('item_moddate', models.DateTimeField(blank=True, db_column='item_ModDate', null=True)),
('item_modtime', models.DateTimeField(blank=True, db_column='item_ModTime', null=True)),
('item_createuser', models.CharField(blank=True, max_length=60, null=True)),
('item_supp', models.CharField(blank=True, max_length=10, null=True)),
('item_parentcode', models.CharField(blank=True, db_column='Item_Parentcode', max_length=20, null=True)),
('item_color', models.CharField(blank=True, max_length=10, null=True)),
('item_sizepack', models.CharField(blank=True, db_column='item_SizePack', max_length=10, null=True)),
('item_size', models.CharField(blank=True, db_column='item_Size', max_length=10, null=True)),
('item_season', models.CharField(blank=True, db_column='item_Season', max_length=10, null=True)),
('item_fabric', models.CharField(blank=True, max_length=10, null=True)),
('item_brand', models.CharField(blank=True, db_column='item_Brand', max_length=10, null=True)),
('lstpo_ucst', models.FloatField(blank=True, db_column='LSTPO_UCST', null=True)),
('lstpo_no', models.CharField(blank=True, db_column='LSTPO_NO', max_length=20, null=True)),
('lstpo_date', models.DateTimeField(blank=True, db_column='LSTPO_Date', null=True)),
('item_havechild', models.BooleanField(db_column='item_haveChild')),
('value_applytochild', models.BooleanField(db_column='Value_ApplyToChild')),
('package_disc', models.FloatField(blank=True, db_column='Package_Disc', null=True)),
('have_package_disc', models.BooleanField(db_column='Have_Package_Disc')),
('pic_path', models.CharField(blank=True, db_column='PIC_Path', max_length=255, null=True)),
('item_foc', models.BooleanField(db_column='Item_FOC')),
('item_uom', models.CharField(blank=True, db_column='Item_UOM', max_length=20, null=True)),
('mixbrand', models.BooleanField(db_column='MIXBRAND')),
('serviceretail', models.BooleanField(blank=True, db_column='SERVICERETAIL', null=True)),
('item_range', models.CharField(blank=True, db_column='Item_Range', max_length=20, null=True)),
('commissionable', models.BooleanField(blank=True, db_column='Commissionable', null=True)),
('trading', models.BooleanField(blank=True, db_column='Trading', null=True)),
('cust_replenish_days', models.CharField(blank=True, db_column='Cust_Replenish_Days', max_length=10, null=True)),
('cust_advance_days', models.CharField(blank=True, db_column='Cust_Advance_Days', max_length=10, null=True)),
('salescomm', models.CharField(blank=True, db_column='SalesComm', max_length=20, null=True)),
('workcomm', models.CharField(blank=True, db_column='WorkComm', max_length=20, null=True)),
('reminder_active', models.BooleanField(blank=True, db_column='Reminder_Active', null=True)),
('disclimit', models.FloatField(blank=True, db_column='DiscLimit', null=True)),
('disctypeamount', models.BooleanField(blank=True, db_column='DiscTypeAmount', null=True)),
('autocustdisc', models.BooleanField(db_column='AutoCustDisc')),
('reorder_active', models.BooleanField(blank=True, db_column='ReOrder_Active', null=True)),
('reorder_minqty', models.FloatField(blank=True, db_column='ReOrder_MinQty', null=True)),
('service_expire_active', models.BooleanField(db_column='Service_Expire_Active')),
('service_expire_month', models.FloatField(blank=True, db_column='Service_Expire_Month', null=True)),
('treatment_limit_active', models.BooleanField(db_column='Treatment_Limit_Active')),
('treatment_limit_count', models.FloatField(blank=True, db_column='Treatment_Limit_Count', null=True)),
('limitservice_flexionly', models.BooleanField(db_column='LimitService_FlexiOnly')),
('salescommpoints', models.FloatField(blank=True, db_column='SalesCommPoints', null=True)),
('workcommpoints', models.FloatField(blank=True, db_column='WorkCommPoints', null=True)),
('item_price_floor', models.FloatField(blank=True, db_column='Item_Price_Floor', null=True)),
('voucher_value', models.FloatField(blank=True, db_column='Voucher_Value', null=True)),
('voucher_value_is_amount', models.BooleanField(db_column='Voucher_Value_Is_Amount')),
('voucher_valid_period', models.CharField(blank=True, db_column='Voucher_Valid_Period', max_length=20, null=True)),
('prepaid_value', models.FloatField(blank=True, db_column='Prepaid_Value', null=True)),
('prepaid_sell_amt', models.FloatField(blank=True, db_column='Prepaid_Sell_Amt', null=True)),
('prepaid_valid_period', models.CharField(blank=True, db_column='Prepaid_Valid_Period', max_length=20, null=True)),
('membercardnoaccess', models.BooleanField(blank=True, db_column='MemberCardNoAccess', null=True)),
('rpt_code', models.CharField(blank=True, db_column='Rpt_Code', max_length=20, null=True)),
('is_gst', models.BooleanField(db_column='IS_GST')),
('account_code', models.CharField(blank=True, db_column='Account_Code', max_length=20, null=True)),
('stock_pic_b', models.BinaryField(blank=True, db_column='Stock_PIC_B', null=True)),
('is_open_prepaid', models.BooleanField(db_column='IS_OPEN_PREPAID')),
('appt_wd_min', models.FloatField(blank=True, db_column='Appt_WD_Min', null=True)),
('service_cost', models.FloatField(blank=True, db_column='Service_Cost', null=True)),
('service_cost_percent', models.BooleanField(db_column='Service_Cost_Percent')),
('account_code_td', models.CharField(blank=True, db_column='Account_Code_TD', max_length=20, null=True)),
('voucher_isvalid_until_date', models.BooleanField(db_column='Voucher_IsValid_Until_Date')),
('voucher_valid_until_date', models.DateTimeField(blank=True, db_column='Voucher_Valid_Until_Date', null=True)),
('equipmentcost', models.FloatField(blank=True, null=True)),
('is_have_tax', models.BooleanField(db_column='IS_HAVE_TAX')),
('is_allow_foc', models.BooleanField(db_column='IS_ALLOW_FOC')),
('vilidity_from_date', models.DateTimeField(blank=True, db_column='Vilidity_From_Date', null=True)),
('vilidity_to_date', models.DateTimeField(blank=True, db_column='Vilidity_To_date', null=True)),
('vilidity_from_time', models.DateTimeField(blank=True, db_column='Vilidity_From_Time', null=True)),
('vilidity_to_time', models.DateTimeField(blank=True, db_column='Vilidity_To_Time', null=True)),
('t1_tax_code', models.CharField(blank=True, db_column='T1_Tax_Code', max_length=20, null=True)),
('t2_tax_code', models.CharField(blank=True, db_column='T2_Tax_Code', max_length=20, null=True)),
('prepaid_disc_type', models.CharField(blank=True, db_column='Prepaid_Disc_Type', max_length=20, null=True)),
('prepaid_disc_percent', models.FloatField(blank=True, db_column='Prepaid_Disc_Percent', null=True)),
('srv_duration', models.FloatField(blank=True, db_column='Srv_Duration', null=True)),
('voucher_template_name', models.CharField(blank=True, db_column='Voucher_Template_Name', max_length=50, null=True)),
('autoproportion', models.BooleanField(db_column='AutoProportion')),
('item_pingying', models.CharField(db_column='Item_PingYing', max_length=250, null=True)),
('process_remark', models.CharField(db_column='Process_Remark', max_length=250, null=True)),
],
options={
'db_table': 'Stock',
},
),
]
|
from django.contrib import admin
from django.conf.urls import include
from django.urls import re_path
from stored_messages.tests.views import message_view, message_create, message_create_mixed
admin.autodiscover()
urlpatterns = [
re_path(r'^consume$', message_view),
re_path(r'^create$', message_create),
re_path(r'^create_mixed$', message_create_mixed),
re_path(r'^messages', include(('stored_messages.urls', 'reviews'), namespace='stored_messages'))
]
|
from django.db import models
from django.utils import timezone
# from django.contrib.auth.models import User
from account.models import customUser
from django.urls import reverse
# Create your models here.
class Plant(models.Model):
name = models.CharField(max_length=100)
description = models.TextField()
date_posted = models.DateTimeField(default=timezone.now)
q_avail = models.IntegerField(default=0)
price = models.FloatField(default=0.0)
manager = models.ForeignKey(customUser, on_delete=models.CASCADE)
plant_image = models.ImageField(default='default.jpg', upload_to='plant_pics')
def __str__(self):
return f'{self.name} , available : {self.q_avail}'
def get_absolute_url(self):
return reverse('plant-detail', kwargs={'pk':self.pk})
class Orders(models.Model):
plant = models.ForeignKey(Plant, on_delete=models.CASCADE)
seller = models.CharField(max_length=100)
buyer = models.CharField(max_length=100)
quantity = models.IntegerField(default=1)
status = models.BooleanField(default=False)
def __str__(self):
return f'[ plant:{ self.plant.name}, seller:{self.seller}, buyer={self.buyer} for {self.quantity} ]'
def get_absolute_url(self):
return reverse('myorders',kwargs={ 'pk' : self.pk }) |
from xmind.tests import logging_configuration as lc
from xmind.core.topic import TopicElement
from xmind.tests import base
from unittest.mock import patch, Mock, PropertyMock, call
from xmind.core.const import (
TAG_TOPIC,
TAG_TOPICS,
TAG_TITLE,
TAG_MARKERREF,
TAG_MARKERREFS,
TAG_POSITION,
TAG_CHILDREN,
TAG_SHEET,
ATTR_ID,
ATTR_HREF,
ATTR_BRANCH,
VAL_FOLDED,
TOPIC_ROOT,
TOPIC_ATTACHED,
ATTR_TYPE,
TAG_NOTES)
class TestTopicElement(base.Base):
"""Test class for TopicElement class"""
def getLogger(self):
if not getattr(self, '_logger', None):
self._logger = lc.get_logger('TopicElement')
return self._logger
def setUp(self):
super(TestTopicElement, self).setUp()
self._workbook_mixin_element_init = self._init_patch_with_name(
'_mixin_init', 'xmind.core.topic.WorkbookMixinElement.__init__')
self._add_attribute = self._init_patch_with_name(
'_add_attribute', 'xmind.core.topic.TopicElement.addIdAttribute', return_value=True)
def _assert_init_methods(self):
self._workbook_mixin_element_init.assert_called_once_with(None, None)
self._add_attribute.assert_called_once_with(ATTR_ID)
def test_excessive_parameters(self):
_element = TopicElement()
self.assertEqual(TAG_TOPIC, _element.TAG_NAME)
_parameters = [
('_get_title', 0),
('_get_markerrefs', 0),
('_get_position', 0),
('_get_children', 0),
('_set_hyperlink', 1),
('getOwnerSheet', 0),
('getTitle', 0),
('setTitle', 1),
('getMarkers', 0),
('addMarker', 1),
('setFolded', 0),
('getPosition', 0),
('setPosition', 2),
('removePosition', 0),
('getType', 0),
('getTopics', (1, False)),
('getSubTopics', (1, False)),
('getSubTopicByIndex', 2),
('addSubTopic', (3, False)),
('getIndex', 0),
('getHyperlink', 0),
('setFileHyperlink', 1),
('setTopicHyperlink', 1),
('setURLHyperlink', 1),
('getNotes', 0),
('_set_notes', 0),
('setPlainNotes', 1),
]
for pair in _parameters:
with self.subTest(pair=pair):
self._test_method_by_excessive_parameters(pair, _element)
self._assert_init_methods()
def test_init_has_no_node_has_no_owner_workbook(self):
_element = TopicElement()
self._assert_init_methods()
def test_init_by_excessive_parameters(self):
with self.assertRaises(TypeError) as _ex:
_element = TopicElement(1, 2, 3)
self.assertEqual(
'__init__() takes from 1 to 3 positional arguments but 4 were given', _ex.exception.args[0])
def test_init_has_no_node_has_owner_workbook(self):
_element = TopicElement(ownerWorkbook=5)
self._workbook_mixin_element_init.assert_called_once_with(None, 5)
self._add_attribute.assert_called_once_with(ATTR_ID)
def test_init_has_node_has_no_owner_workbook(self):
_element = TopicElement(3)
self._workbook_mixin_element_init.assert_called_once_with(3, None)
self._add_attribute.assert_called_once_with(ATTR_ID)
def test_init_has_node_has_owner_workbook(self):
_element = TopicElement(3, 5)
self._workbook_mixin_element_init.assert_called_once_with(3, 5)
self._add_attribute.assert_called_once_with(ATTR_ID)
def test_get_title(self):
_element = TopicElement()
with patch.object(_element, 'getFirstChildNodeByTagName') as _mock:
_mock.return_value = 10
self.assertEqual(10, _element._get_title())
_mock.assert_called_once_with(TAG_TITLE)
self._assert_init_methods()
def test_get_markerrefs(self):
_element = TopicElement()
with patch.object(_element, 'getFirstChildNodeByTagName') as _mock:
_mock.return_value = 10
self.assertEqual(10, _element._get_markerrefs())
_mock.assert_called_once_with(TAG_MARKERREFS)
self._assert_init_methods()
def test_get_position(self):
_element = TopicElement()
with patch.object(_element, 'getFirstChildNodeByTagName') as _mock:
_mock.return_value = 10
self.assertEqual(10, _element._get_position())
_mock.assert_called_once_with(TAG_POSITION)
self._assert_init_methods()
def test_get_children(self):
_element = TopicElement()
with patch.object(_element, 'getFirstChildNodeByTagName') as _mock:
_mock.return_value = 10
self.assertEqual(10, _element._get_children())
_mock.assert_called_once_with(TAG_CHILDREN)
self._assert_init_methods()
def test_set_hyperlink(self):
_element = TopicElement()
with patch.object(_element, 'setAttribute') as _mock:
_mock.return_value = 10
self.assertIsNone(_element._set_hyperlink('url'))
_mock.assert_called_once_with(ATTR_HREF, 'url')
self._assert_init_methods()
def test_getOwnerSheet_has_no_parent(self):
_element = TopicElement()
_get_parent_node_mock = patch.object(_element, 'getParentNode').start()
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_parent_node_mock.return_value = None
self.assertIsNone(_element.getOwnerSheet())
_get_parent_node_mock.assert_called_once_with()
_get_owner_workbook_mock.assert_not_called()
self._assert_init_methods()
def test_getOwnerSheet_has_parent_no_parent_of_parent(self):
_element = TopicElement()
_parent = Mock(tagName=TAG_MARKERREFS)
_parent_node = PropertyMock(return_value=None)
type(_parent).parentNode = _parent_node
_get_parent_node_mock = patch.object(_element, 'getParentNode').start()
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_parent_node_mock.return_value = _parent
self.assertIsNone(_element.getOwnerSheet())
_get_parent_node_mock.assert_called_once_with()
_get_owner_workbook_mock.assert_not_called()
_parent_node.assert_called_once()
self._assert_init_methods()
def test_getOwnerSheet_has_parent_has_no_owner_workbook(self):
_element = TopicElement()
_parent_of_parent = Mock(tagName=TAG_SHEET)
_parent = Mock(tagName=TAG_MARKERREFS)
_parent_node = PropertyMock(return_value=_parent_of_parent)
type(_parent).parentNode = _parent_node
_get_parent_node_mock = patch.object(_element, 'getParentNode').start()
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = None
_get_parent_node_mock.return_value = _parent
self.assertIsNone(_element.getOwnerSheet())
_get_parent_node_mock.assert_called_once_with()
_get_owner_workbook_mock.assert_called_once_with()
_parent_node.assert_called_once()
self._assert_init_methods()
def test_getOwnerSheet_has_parent_has_owner_workbook_has_no_sheets(self):
_element = TopicElement()
_parent_of_parent = Mock(tagName=TAG_SHEET)
_parent = Mock(tagName=TAG_MARKERREFS)
_parent_node = PropertyMock(return_value=_parent_of_parent)
type(_parent).parentNode = _parent_node
_owner_workbook = Mock()
_owner_workbook.getSheets.return_value = []
_get_parent_node_mock = patch.object(_element, 'getParentNode').start()
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = _owner_workbook
_get_parent_node_mock.return_value = _parent
self.assertIsNone(_element.getOwnerSheet())
_get_parent_node_mock.assert_called_once_with()
_get_owner_workbook_mock.assert_called_once_with()
_parent_node.assert_called_once()
_owner_workbook.getSheets.assert_called_once()
self._assert_init_methods()
def test_getOwnerSheet_has_parent_has_owner_workbook_has_sheets_parent_is_no_sheet_impl(self):
# see https://stackoverflow.com/questions/132988/is-there-a-difference-between-and-is-in-python to understand what is it 'is'
_element = TopicElement()
_parent_of_parent = Mock(tagName=TAG_SHEET)
_parent = Mock(tagName=TAG_MARKERREFS)
_parent_node = PropertyMock(return_value=_parent_of_parent)
type(_parent).parentNode = _parent_node
_sheet = Mock()
_sheet.getImplementation.return_value = 10 # << parent is NOT 10 in our test
_owner_workbook = Mock()
_owner_workbook.getSheets.return_value = [_sheet]
_get_parent_node_mock = patch.object(_element, 'getParentNode').start()
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = _owner_workbook
_get_parent_node_mock.return_value = _parent
self.assertIsNone(_element.getOwnerSheet())
_get_parent_node_mock.assert_called_once_with()
_get_owner_workbook_mock.assert_called_once_with()
_parent_node.assert_called_once()
_owner_workbook.getSheets.assert_called_once()
_sheet.getImplementation.assert_called_once()
self._assert_init_methods()
def test_getOwnerSheet_has_parent_has_owner_workbook_has_sheets_parent_is_sheet_impl(self):
# see https://stackoverflow.com/questions/132988/is-there-a-difference-between-and-is-in-python to understand what is it 'is'
_element = TopicElement()
_parent_of_parent = Mock(tagName=TAG_SHEET)
_parent = Mock(tagName=TAG_MARKERREFS)
_parent_node = PropertyMock(return_value=_parent_of_parent)
type(_parent).parentNode = _parent_node
_sheet = Mock()
# << parent is _parent_of_parent in our test
_sheet.getImplementation.return_value = _parent_of_parent
_owner_workbook = Mock()
_owner_workbook.getSheets.return_value = [_sheet]
_get_parent_node_mock = patch.object(_element, 'getParentNode').start()
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = _owner_workbook
_get_parent_node_mock.return_value = _parent
self.assertEqual(_sheet, _element.getOwnerSheet())
_get_parent_node_mock.assert_called_once_with()
_get_owner_workbook_mock.assert_called_once_with()
_parent_node.assert_called_once()
_owner_workbook.getSheets.assert_called_once()
_sheet.getImplementation.assert_called_once()
self._assert_init_methods()
def test_getTitle_has_no_title(self):
_element = TopicElement()
_create_title_element = self._init_patch_with_name(
'_title_element', 'xmind.core.topic.TitleElement')
with patch.object(_element, '_get_title') as _mock:
_mock.return_value = None
self.assertIsNone(_element.getTitle())
_create_title_element.assert_not_called()
_mock.assert_called_once_with()
self._assert_init_methods()
def test_getTitle_has_title(self):
_element = TopicElement()
_title = Mock()
_title.getTextContent.return_value = 'NewValue'
_create_title_element = self._init_patch_with_name(
'_title_element', 'xmind.core.topic.TitleElement',
return_value=_title)
_wb_mock = patch.object(_element, 'getOwnerWorkbook').start()
_wb_mock.return_value = 'SomeWorkbook'
_get_title_mock = patch.object(_element, '_get_title').start()
_get_title_mock.return_value = 'SomeValue'
self.assertEqual('NewValue', _element.getTitle())
_create_title_element.assert_called_once_with(
'SomeValue', 'SomeWorkbook')
_wb_mock.assert_called_once_with()
_get_title_mock.assert_called_once_with()
_title.getTextContent.assert_called_once_with()
self._assert_init_methods()
def test_setTitle_title_is_None(self):
_element = TopicElement()
_title = Mock()
_title.setTextContent.return_value = None
_get_title_mock = self._init_patch_with_name('_get_title',
'xmind.core.topic.TopicElement._get_title',
return_value=None)
_title_element_mock = self._init_patch_with_name('_title_element',
'xmind.core.topic.TitleElement',
return_value=_title)
_append_child_mock = self._init_patch_with_name('_append_child',
'xmind.core.topic.TopicElement.appendChild')
_get_owner_workbook_mock = self._init_patch_with_name('_get_owner_wb',
'xmind.core.topic.TopicElement.getOwnerWorkbook',
return_value='owner')
_element.setTitle('someTitle')
_get_title_mock.assert_called_once()
_title_element_mock.assert_called_once_with(None, 'owner')
_title.setTextContent.assert_called_once_with('someTitle')
_get_owner_workbook_mock.assert_called_once()
_append_child_mock.assert_called_once_with(_title)
def test_setTitle_title_is_not_None(self):
_element = TopicElement()
_title = Mock()
_title.setTextContent.return_value = None
_get_title_mock = self._init_patch_with_name('_get_title',
'xmind.core.topic.TopicElement._get_title',
return_value='NiceTitle')
_title_element_mock = self._init_patch_with_name('_title_element',
'xmind.core.topic.TitleElement',
return_value=_title)
_append_child_mock = self._init_patch_with_name('_append_child',
'xmind.core.topic.TopicElement.appendChild')
_get_owner_workbook_mock = self._init_patch_with_name('_get_owner_wb',
'xmind.core.topic.TopicElement.getOwnerWorkbook',
return_value='owner')
_element.setTitle('someTitle')
_get_title_mock.assert_called_once()
_title_element_mock.assert_called_once_with('NiceTitle', 'owner')
_title.setTextContent.assert_called_once_with('someTitle')
_get_owner_workbook_mock.assert_called_once()
_append_child_mock.assert_not_called()
def test_getMarkers_refs_are_None(self):
_element = TopicElement()
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement'
)
with patch.object(_element, '_get_markerrefs') as _mock:
_mock.return_value = None
self.assertIsNone(_element.getMarkers())
_mock.assert_called_once()
_marker_refs_element_constructor_mock.assert_not_called()
self._assert_init_methods()
def test_getMarkers_markers_are_None(self):
_element = TopicElement()
_marker_fefs_element = Mock()
_marker_fefs_element.getChildNodesByTagName.return_value = None
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_fefs_element,
autospec=True
)
_refs_mock = Mock()
_get_wb_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_wb_mock.return_value = 'OwnerWorkbook'
_get_markerrefs_mock = patch.object(
_element, '_get_markerrefs').start()
_get_markerrefs_mock.return_value = _refs_mock
self.assertListEqual([], _element.getMarkers())
_get_markerrefs_mock.assert_called_once()
_marker_refs_element_constructor_mock.assert_called_once_with(
_refs_mock,
'OwnerWorkbook')
_get_wb_mock.assert_called_once()
_marker_fefs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self._assert_init_methods()
def test_getMarkers_markers_are_not_list(self):
_element = TopicElement()
_marker_fefs_element = Mock()
_marker_fefs_element.getChildNodesByTagName.return_value = 12
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_fefs_element,
autospec=True
)
_refs_mock = Mock()
_get_wb_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_wb_mock.return_value = 'OwnerWorkbook'
_get_markerrefs_mock = patch.object(
_element, '_get_markerrefs').start()
_get_markerrefs_mock.return_value = _refs_mock
with self.assertRaises(TypeError) as _ex:
_element.getMarkers()
_get_markerrefs_mock.assert_called_once()
_marker_refs_element_constructor_mock.assert_called_once_with(
_refs_mock,
'OwnerWorkbook')
self.assertEqual("'int' object is not iterable", _ex.exception.args[0])
_get_wb_mock.assert_called_once()
_marker_fefs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self._assert_init_methods()
def test_getMarkers(self):
_element = TopicElement()
_marker_fefs_element = Mock()
_marker_fefs_element.getChildNodesByTagName.return_value = [11, 12, 13]
_marker_refs_element_constructor_mock = patch(
'xmind.core.topic.MarkerRefsElement').start()
_marker_refs_element_constructor_mock.return_value = _marker_fefs_element
_marker_ref_element_constructor_mock = patch(
'xmind.core.topic.MarkerRefElement').start()
_marker_ref_element_constructor_mock.side_effect = [
111,
112,
113
]
_refs_mock = Mock()
_get_wb_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_wb_mock.return_value = 'OwnerWorkbook'
_get_markerrefs_mock = patch.object(
_element, '_get_markerrefs').start()
_get_markerrefs_mock.return_value = _refs_mock
self.assertListEqual(
[111, 112, 113], _element.getMarkers())
_get_markerrefs_mock.assert_called_once()
_marker_refs_element_constructor_mock.assert_called_once_with(
_refs_mock,
'OwnerWorkbook')
self.assertEqual(3, _marker_ref_element_constructor_mock.call_count)
self.assertListEqual([
call(11, 'OwnerWorkbook'),
call(12, 'OwnerWorkbook'),
call(13, 'OwnerWorkbook')], _marker_ref_element_constructor_mock.call_args_list)
self.assertEqual(4, _get_wb_mock.call_count)
_marker_fefs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self._assert_init_methods()
def test_addMarker_markerId_is_none(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
self.assertIsNone(_element.addMarker(None))
_get_markerrefs.assert_not_called()
self._assert_init_methods()
def test_addMarker_markerId_is_str(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.side_effect = Exception('test exception')
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement'
)
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value='new_marker_id'
)
with self.assertRaises(Exception) as _ex_mock:
_element.addMarker('marker_test')
self.assertTrue(_ex_mock.exception.args[0].find(
"test exception") != -1)
_get_markerrefs.assert_called_once()
_marker_refs_element_constructor_mock.assert_not_called()
_marker_id_constructor.assert_called_once_with('marker_test')
self._assert_init_methods()
def test_addMarker_markerId_is_object(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.side_effect = Exception('test exception')
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement'
)
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value='new_marker_id'
)
with self.assertRaises(Exception) as _ex_mock:
_element.addMarker(Mock())
self.assertTrue(_ex_mock.exception.args[0].find(
"test exception") != -1)
_get_markerrefs.assert_called_once()
_marker_refs_element_constructor_mock.assert_not_called()
_marker_id_constructor.assert_not_called()
self._assert_init_methods()
def test_addMarker_markerrefs_are_none(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.return_value = None
_marker_refs_element = Mock()
_marker_refs_element.getChildNodesByTagName.side_effect = Exception(
'test exception')
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_refs_element
)
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
_append_child_mock = patch.object(_element, 'appendChild').start()
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value='new_marker_id'
)
with self.assertRaises(Exception) as _ex_mock:
_element.addMarker('marker_test')
self.assertTrue(_ex_mock.exception.args[0].find(
"test exception") != -1)
_marker_id_constructor.assert_called_once_with('marker_test')
_get_markerrefs.assert_called_once()
_get_owner_workbook_mock.assert_called_once()
_marker_refs_element_constructor_mock.assert_called_once_with(
None, 'ownerWorkbook')
_append_child_mock.assert_called_once_with(_marker_refs_element)
_marker_refs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self._assert_init_methods()
def test_addMarker_markerrefs_are_object(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.return_value = 'refs_value'
_marker_refs_element = Mock()
_marker_refs_element.getChildNodesByTagName.side_effect = Exception(
'test exception')
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_refs_element
)
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
_append_child_mock = patch.object(_element, 'appendChild').start()
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value='new_marker_id'
)
with self.assertRaises(Exception) as _ex_mock:
_element.addMarker('marker_test')
self.assertTrue(_ex_mock.exception.args[0].find(
"test exception") != -1)
_marker_id_constructor.assert_called_once_with('marker_test')
_get_markerrefs.assert_called_once()
_get_owner_workbook_mock.assert_called_once()
_marker_refs_element_constructor_mock.assert_called_once_with(
'refs_value', 'ownerWorkbook')
_append_child_mock.assert_not_called()
_marker_refs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self._assert_init_methods()
def test_addMarker_markers_are_none(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.return_value = 'refs_value'
_marker_refs_element = Mock()
_marker_refs_element.getChildNodesByTagName.return_value = None
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_refs_element
)
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
_append_child_mock = patch.object(_element, 'appendChild').start()
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value='new_marker_id'
)
_marker_ref_element = Mock()
_marker_ref_element.setMarkerId.side_effect = Exception(
'test exception')
_marker_ref_element.appendChild.side_effect = Exception
_marker_ref_element_constructor_mock = self._init_patch_with_name(
'_marker_ref_element_constructor_mock',
'xmind.core.topic.MarkerRefElement',
return_value=_marker_ref_element
)
with self.assertRaises(Exception) as _ex_mock:
_element.addMarker('marker_test')
self.assertTrue(_ex_mock.exception.args[0].find(
"test exception") != -1)
_marker_id_constructor.assert_called_once_with('marker_test')
_get_markerrefs.assert_called_once()
self.assertEqual(2, _get_owner_workbook_mock.call_count)
_marker_refs_element_constructor_mock.assert_called_once_with(
'refs_value', 'ownerWorkbook')
_append_child_mock.assert_not_called()
_marker_refs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
_marker_ref_element_constructor_mock.assert_called_once_with(
None, 'ownerWorkbook')
_marker_ref_element.setMarkerId.assert_called_once_with(
'new_marker_id')
_marker_ref_element.appendChild.assert_not_called()
self._assert_init_methods()
def test_addMarker_markers_are_not_list(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.return_value = 'refs_value'
_marker_refs_element = Mock()
_marker_refs_element.getChildNodesByTagName.return_value = 12
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_refs_element
)
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
_append_child_mock = patch.object(_element, 'appendChild').start()
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value='new_marker_id'
)
_marker_ref_element = Mock()
_marker_ref_element.setMarkerId.side_effect = Exception("exception1")
_marker_ref_element.appendChild.side_effect = Exception("exception2")
_marker_ref_element_constructor_mock = self._init_patch_with_name(
'_marker_ref_element_constructor_mock',
'xmind.core.topic.MarkerRefElement',
return_value=_marker_ref_element
)
with self.assertRaises(Exception) as _ex_mock:
_element.addMarker('marker_test')
self.assertTrue(_ex_mock.exception.args[0].find(
"'int' object is not iterable") != -1, _ex_mock.exception.args[0])
_marker_id_constructor.assert_called_once_with('marker_test')
_get_markerrefs.assert_called_once()
self.assertEqual(1, _get_owner_workbook_mock.call_count)
_marker_refs_element_constructor_mock.assert_called_once_with(
'refs_value', 'ownerWorkbook')
_append_child_mock.assert_not_called()
_marker_refs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
_marker_ref_element_constructor_mock.assert_not_called()
_marker_ref_element.setMarkerId.assert_not_called()
_marker_ref_element.appendChild.assert_not_called()
self._assert_init_methods()
def test_addMarker_mre_family_equals_to_markerid(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.return_value = 'refs_value'
_marker_refs_element = Mock()
_marker_refs_element.getChildNodesByTagName.return_value = ['m1', 'm2']
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_refs_element
)
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
_append_child_mock = patch.object(_element, 'appendChild').start()
_marker_id_element = Mock()
_marker_id_element.getFamilly.return_value = 15
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value=_marker_id_element
)
_marker_ref_element = Mock()
_marker_ref_element.setMarkerId.side_effect = Exception
_marker_ref_element.appendChild.side_effect = Exception
_marker_ref_element_constructor_mock = patch(
'xmind.core.topic.MarkerRefElement'
).start()
_marker_with_family = Mock()
_marker_with_family.getFamilly.side_effect = [5, 15]
_element_not_equal = Mock()
_element_not_equal.getMarkerId.return_value = _marker_with_family
_element_equal = Mock()
_element_equal.getMarkerId.return_value = _marker_with_family
_element_equal.setMarkerId.return_value = None
_marker_ref_element_constructor_mock.side_effect = [
_element_not_equal,
_element_equal
]
self.assertEqual(_element_equal, _element.addMarker('marker_test'))
_marker_id_constructor.assert_called_once_with('marker_test')
_get_markerrefs.assert_called_once()
self.assertEqual(3, _get_owner_workbook_mock.call_count)
_marker_refs_element_constructor_mock.assert_called_once_with(
'refs_value', 'ownerWorkbook')
_append_child_mock.assert_not_called()
_marker_refs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self.assertEqual(2, _marker_ref_element_constructor_mock.call_count)
self.assertListEqual(
[call('m1', 'ownerWorkbook'), call('m2', 'ownerWorkbook')],
_marker_ref_element_constructor_mock.call_args_list
)
_marker_ref_element.setMarkerId.assert_not_called()
_marker_ref_element.appendChild.assert_not_called()
self.assertEqual(2, _marker_id_element.getFamilly.call_count)
self.assertEqual(2, _marker_with_family.getFamilly.call_count)
_element_equal.setMarkerId.assert_called_once_with(_marker_id_element)
self._assert_init_methods()
def test_addMarker_mre_family_does_not_equal_to_markerid(self):
_element = TopicElement()
_get_markerrefs = patch.object(_element, '_get_markerrefs').start()
_get_markerrefs.return_value = 'refs_value'
_marker_refs_element = Mock()
_marker_refs_element.getChildNodesByTagName.return_value = ['m1', 'm2']
_marker_refs_element_constructor_mock = self._init_patch_with_name(
'_marker_refs_element_constructor_mock',
'xmind.core.topic.MarkerRefsElement',
return_value=_marker_refs_element
)
_get_owner_workbook_mock = patch.object(
_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
_append_child_mock = patch.object(_element, 'appendChild').start()
_marker_id_element = Mock()
_marker_id_element.getFamilly.return_value = 15
_marker_id_constructor = self._init_patch_with_name(
'_marker_id_constructor',
'xmind.core.topic.MarkerId',
return_value=_marker_id_element
)
_marker_ref_element = Mock()
_marker_ref_element_constructor_mock = patch(
'xmind.core.topic.MarkerRefElement'
).start()
_marker_with_family = Mock()
_marker_with_family.getFamilly.side_effect = [5, 6]
_element_not_equal = Mock()
_element_not_equal.getMarkerId.return_value = _marker_with_family
_marker_ref_element_constructor_mock.side_effect = [
_element_not_equal,
_element_not_equal,
_marker_ref_element
]
self.assertEqual(_marker_ref_element,
_element.addMarker('marker_test'))
_marker_id_constructor.assert_called_once_with('marker_test')
_get_markerrefs.assert_called_once()
self.assertEqual(4, _get_owner_workbook_mock.call_count)
_marker_refs_element_constructor_mock.assert_called_once_with(
'refs_value', 'ownerWorkbook')
_append_child_mock.assert_not_called()
_marker_refs_element.getChildNodesByTagName.assert_called_once_with(
TAG_MARKERREF)
self.assertEqual(3, _marker_ref_element_constructor_mock.call_count)
self.assertListEqual(
[
call('m1', 'ownerWorkbook'),
call('m2', 'ownerWorkbook'),
call(None, 'ownerWorkbook')
],
_marker_ref_element_constructor_mock.call_args_list
)
_marker_ref_element.setMarkerId.assert_called_once_with(
_marker_id_element)
_marker_refs_element.appendChild.assert_called_once_with(
_marker_ref_element)
self.assertEqual(2, _marker_id_element.getFamilly.call_count)
self.assertEqual(2, _marker_with_family.getFamilly.call_count)
self._assert_init_methods()
def test_setFolded(self):
_element = TopicElement()
with patch.object(_element, 'setAttribute') as _mock:
self.assertIsNone(_element.setFolded())
_mock.assert_called_once_with(ATTR_BRANCH, VAL_FOLDED)
self._assert_init_methods()
def test_getPosition_position_is_none(self):
_element = TopicElement()
_position_element_construction_mock = self._init_patch_with_name(
'_position_element',
'xmind.core.topic.PositionElement',
thrown_exception=Exception,
autospec=True)
with patch.object(_element, '_get_position') as _mock:
_mock.return_value = None
self.assertIsNone(_element.getPosition())
_mock.assert_called_once()
_position_element_construction_mock.assert_not_called()
self._assert_init_methods()
def test_getPosition_x_is_none_and_y_is_none(self):
# import ipdb; ipdb.set_trace()
_element = TopicElement()
_position_element_mock = Mock()
_position_element_mock.getX.return_value = None
_position_element_mock.getY.return_value = None
_position_element_construction_mock = self._init_patch_with_name(
'_position_element',
'xmind.core.topic.PositionElement',
return_value=_position_element_mock,
autospec=True)
_get_position_mock = patch.object(_element, '_get_position').start()
_get_position_mock.return_value = 'position'
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
self.assertIsNone(_element.getPosition())
_get_owner_workbook_mock.assert_called_once()
_get_position_mock.assert_called_once()
_position_element_construction_mock.assert_called_once_with(
'position',
'ownerWorkbook'
)
_position_element_mock.getX.assert_called_once()
_position_element_mock.getY.assert_called_once()
self._assert_init_methods()
def test_getPosition_position_x_is_none(self):
_element = TopicElement()
_position_element_mock = Mock()
_position_element_mock.getX.return_value = None
_position_element_mock.getY.return_value = 5
_position_element_construction_mock = self._init_patch_with_name(
'_position_element',
'xmind.core.topic.PositionElement',
return_value=_position_element_mock,
autospec=True)
_get_position_mock = patch.object(_element, '_get_position').start()
_get_position_mock.return_value = 'position'
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
self.assertEqual((0, 5), _element.getPosition())
_get_owner_workbook_mock.assert_called_once()
_get_position_mock.assert_called_once()
_position_element_construction_mock.assert_called_once_with(
'position',
'ownerWorkbook'
)
_position_element_mock.getX.assert_called_once()
_position_element_mock.getY.assert_called_once()
self._assert_init_methods()
def test_setPosition_position_is_none(self):
_element = TopicElement()
_position_element_mock = Mock()
_position_element_mock.setX.return_value = None
_position_element_mock.setY.return_value = None
_position_element_construction_mock = self._init_patch_with_name(
'_position_element',
'xmind.core.topic.PositionElement',
return_value=_position_element_mock,
autospec=True)
_append_child_mock = patch.object(_element, 'appendChild').start()
_get_position_mock = patch.object(_element, '_get_position').start()
_get_position_mock.return_value = None
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
self.assertIsNone(_element.setPosition(0, 6))
_get_owner_workbook_mock.assert_called_once()
_get_position_mock.assert_called_once()
_position_element_construction_mock.assert_called_once_with(
ownerWorkbook='ownerWorkbook'
)
_append_child_mock.assert_called_once_with(_position_element_mock)
_position_element_mock.setX.assert_called_once_with(0)
_position_element_mock.setY.assert_called_once_with(6)
self._assert_init_methods()
def test_setPosition_position_is_not_none(self):
_element = TopicElement()
_position_element_mock = Mock()
_position_element_mock.setX.return_value = None
_position_element_mock.setY.return_value = None
_position_element_construction_mock = self._init_patch_with_name(
'_position_element',
'xmind.core.topic.PositionElement',
return_value=_position_element_mock,
autospec=True)
_append_child_mock = patch.object(_element, 'appendChild').start()
_get_position_mock = patch.object(_element, '_get_position').start()
_get_position_mock.return_value = 'newPosition'
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
self.assertIsNone(_element.setPosition(0, 6))
_get_owner_workbook_mock.assert_called_once()
_get_position_mock.assert_called_once()
_position_element_construction_mock.assert_called_once_with(
'newPosition',
'ownerWorkbook'
)
_append_child_mock.assert_not_called()
_position_element_mock.setX.assert_called_once_with(0)
_position_element_mock.setY.assert_called_once_with(6)
self._assert_init_methods()
def test_removePosition_position_is_none(self):
_element = TopicElement()
_impl_mock = Mock()
_impl_mock.removeChild.side_effect = Exception
_get_position_mock = patch.object(_element, '_get_position').start()
_get_position_mock.return_value = None
_get_impl_mock = patch.object(_element, 'getImplementation').start()
_get_impl_mock.return_value = _impl_mock
self.assertIsNone(_element.removePosition())
_get_position_mock.assert_called_once()
_get_impl_mock.assert_not_called()
_impl_mock.removeChild.assert_not_called()
self._assert_init_methods()
def test_removePosition_position_is_not_none(self):
_element = TopicElement()
_impl_mock = Mock()
_get_position_mock = patch.object(_element, '_get_position').start()
_get_position_mock.return_value = 'newPosition'
_get_impl_mock = patch.object(_element, 'getImplementation').start()
_get_impl_mock.return_value = _impl_mock
self.assertIsNone(_element.removePosition())
_get_position_mock.assert_called_once()
_get_impl_mock.assert_called_once()
_impl_mock.removeChild.assert_called_once_with('newPosition')
self._assert_init_methods()
def test_getType_parent_is_none(self):
_element = TopicElement()
_topics_element_constructor_mock = self._init_patch_with_name(
'_topics_element_contructor',
'xmind.core.topic.TopicsElement',
thrown_exception=Exception
)
with patch.object(_element, 'getParentNode') as _mock:
_mock.return_value = None
self.assertIsNone(_element.getType())
_mock.assert_called_once()
_topics_element_constructor_mock.assert_not_called()
self._assert_init_methods()
def test_getType_parent_tagName_is_tag_sheet(self):
_element = TopicElement()
_topics_element_constructor_mock = self._init_patch_with_name(
'_topics_element_contructor',
'xmind.core.topic.TopicsElement',
thrown_exception=Exception
)
_parent_mock = Mock(tagName = TAG_SHEET)
with patch.object(_element, 'getParentNode') as _mock:
_mock.return_value = _parent_mock
self.assertEqual(TOPIC_ROOT, _element.getType())
_mock.assert_called_once()
_topics_element_constructor_mock.assert_not_called()
self._assert_init_methods()
def test_getType_parent_tagName_is_tag_topics(self):
_element = TopicElement()
_topics_mock = Mock()
_topics_mock.getType.return_value = 'newType'
_topics_element_constructor_mock = self._init_patch_with_name(
'_topics_element_contructor',
'xmind.core.topic.TopicsElement',
return_value=_topics_mock
)
_parent_mock = Mock(tagName = TAG_TOPICS)
_getParentNode_mock = patch.object(_element, 'getParentNode').start()
_getParentNode_mock.return_value = _parent_mock
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
self.assertEqual('newType', _element.getType())
_getParentNode_mock.assert_called_once()
_get_owner_workbook_mock.assert_called_once()
_topics_element_constructor_mock.assert_called_once_with(_parent_mock, 'ownerWorkbook')
_topics_mock.getType.assert_called_once()
self._assert_init_methods()
def test_getTopics_topic_children_is_none(self):
_element = TopicElement()
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = None
_children_element_constructor_mock = self._init_patch_with_name(
'_children_element_constructor',
'xmind.core.topic.ChildrenElement',
thrown_exception=Exception
)
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.side_effect = Exception
self.assertIsNone(_element.getTopics('newType'))
_get_children_mock.assert_called_once()
_children_element_constructor_mock.assert_not_called()
_get_owner_workbook_mock.assert_not_called()
self._assert_init_methods()
def test_getTopics_topic_children_is_not_none(self):
_element = TopicElement()
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = 'some_topic_children'
_topic_children_mock = Mock()
_topic_children_mock.getTopics.return_value = 'newTopics'
_children_element_constructor_mock = self._init_patch_with_name(
'_children_element_constructor',
'xmind.core.topic.ChildrenElement',
return_value=_topic_children_mock
)
_get_owner_workbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_get_owner_workbook_mock.return_value = 'ownerWorkbook'
self.assertEqual('newTopics', _element.getTopics('newType'))
_get_children_mock.assert_called_once()
_get_owner_workbook_mock.assert_called_once()
_children_element_constructor_mock.assert_called_once_with(
'some_topic_children',
'ownerWorkbook'
)
_topic_children_mock.getTopics.assert_called_once_with('newType')
self._assert_init_methods()
def test_getSubTopics_topics_are_none(self):
_element = TopicElement()
with patch.object(_element, 'getTopics') as _getTopics_mock:
_getTopics_mock.return_value = None
self.assertIsNone(_element.getSubTopics())
_getTopics_mock.assert_called_once_with(TOPIC_ATTACHED)
self._assert_init_methods()
def test_getSubTopics_topics_are_not_none(self):
_element = TopicElement()
_topics_mock = Mock()
_topics_mock.getSubTopics.return_value = 12
with patch.object(_element, 'getTopics') as _getTopics_mock:
_getTopics_mock.return_value = _topics_mock
self.assertEqual(12, _element.getSubTopics())
_getTopics_mock.assert_called_once_with(TOPIC_ATTACHED)
_topics_mock.getSubTopics.assert_called_once()
self._assert_init_methods()
def test_getSubTopicByIndex_sub_topics_are_none(self):
_element = TopicElement()
with patch.object(_element, 'getSubTopics') as _getSubTopics_mock:
_getSubTopics_mock.return_value = None
self.assertIsNone(_element.getSubTopicByIndex(0))
_getSubTopics_mock.assert_called_once_with(TOPIC_ATTACHED)
self._assert_init_methods()
def test_getSubTopicByIndex_index_less_than_zero(self):
_element = TopicElement()
with patch.object(_element, 'getSubTopics') as _getSubTopics_mock:
_getSubTopics_mock.return_value = [1, 2]
self.assertListEqual([1, 2], _element.getSubTopicByIndex(-1))
_getSubTopics_mock.assert_called_once_with(TOPIC_ATTACHED)
self._assert_init_methods()
def test_getSubTopicByIndex_index_greater_than_list_len(self):
_element = TopicElement()
with patch.object(_element, 'getSubTopics') as _getSubTopics_mock:
_getSubTopics_mock.return_value = [1, 2]
self.assertListEqual([1, 2], _element.getSubTopicByIndex(4))
_getSubTopics_mock.assert_called_once_with(TOPIC_ATTACHED)
self._assert_init_methods()
def test_getSubTopicByIndex_returns_sub_topic_by_index(self):
_element = TopicElement()
with patch.object(_element, 'getSubTopics') as _getSubTopics_mock:
_getSubTopics_mock.return_value = [1, 2]
self.assertEqual(1, _element.getSubTopicByIndex(0))
_getSubTopics_mock.assert_called_once_with(TOPIC_ATTACHED)
self._assert_init_methods()
def test_addSubTopic_topic_is_none_get_children_throws(self):
_element = TopicElement()
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.side_effect = Exception('our exception')
__class__mock = self._init_patch_with_name('_class_mock', 'xmind.core.topic.TopicElement.__init__')
with self.assertRaises(Exception) as _ex:
_element.addSubTopic()
self.assertEqual("our exception", _ex.exception.args[0])
_getOwnerWorkbook_mock.assert_called_once()
__class__mock.assert_called_once_with(None, 'owner')
_get_children_mock.assert_called_once()
self._assert_init_methods()
def test_addSubTopic_topic_is_not_none_get_children_throws(self):
_element = TopicElement()
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.side_effect = Exception('our exception')
__class__mock = self._init_patch_with_name('_class_mock', 'xmind.core.topic.TopicElement.__init__')
with self.assertRaises(Exception) as _ex:
_element.addSubTopic('value')
self.assertEqual("our exception", _ex.exception.args[0])
_getOwnerWorkbook_mock.assert_called_once()
__class__mock.assert_not_called()
_get_children_mock.assert_called_once()
self._assert_init_methods()
def test_addSubTopic_get_children_returns_none_append_child_throws(self):
_element = TopicElement()
_topic_children_element = Mock()
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = None
_ChildrenElement_mock = self._init_patch_with_name(
'_ChildrenElement_mock',
'xmind.core.topic.ChildrenElement',
return_value=_topic_children_element
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
_appendChild_mock.side_effect = Exception("appendChildException")
_topic_children_element.getTopics.side_effect = Exception('getTopicsException')
with self.assertRaises(Exception) as _ex:
_element.addSubTopic('value')
self.assertEqual('appendChildException', _ex.exception.args[0])
_getOwnerWorkbook_mock.assert_called_once()
_get_children_mock.assert_called_once()
_ChildrenElement_mock.assert_called_once_with(ownerWorkbook='owner')
_appendChild_mock.assert_called_once_with(_topic_children_element)
_topic_children_element.getTopics.assert_not_called()
self._assert_init_methods()
def test_addSubTopic_get_children_returns_value_getTopics_throws(self):
_element = TopicElement()
_topic_children_element = Mock()
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = 'topic_children_value'
_ChildrenElement_mock = self._init_patch_with_name(
'_ChildrenElement_mock',
'xmind.core.topic.ChildrenElement',
return_value=_topic_children_element
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
_appendChild_mock.side_effect = Exception('appendChildException')
_topic_children_element.getTopics.side_effect = Exception('getTopicsException')
with self.assertRaises(Exception) as _ex:
_element.addSubTopic('value')
self.assertEqual('getTopicsException', _ex.exception.args[0])
_getOwnerWorkbook_mock.assert_called_once()
_get_children_mock.assert_called_once()
_ChildrenElement_mock.assert_called_once_with('topic_children_value', 'owner')
_appendChild_mock.assert_not_called()
_topic_children_element.getTopics.assert_called_once_with(TOPIC_ATTACHED)
self._assert_init_methods()
def test_addSubTopic_getTopics_returns_none_appendChild_throws(self):
_element = TopicElement()
_topic_children_element = Mock()
_topic_children_element.getTopics.return_value = None
_topic_children_element.appendChild.side_effect = Exception('appendChildExceptionTopic')
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = 'topic_children_value'
_ChildrenElement_mock = self._init_patch_with_name(
'_ChildrenElement_mock',
'xmind.core.topic.ChildrenElement',
return_value=_topic_children_element
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
_appendChild_mock.side_effect = Exception('appendChildException')
_topics_element = Mock()
_TopicsElement_mock = self._init_patch_with_name(
'_TopicsElement_mock',
'xmind.core.topic.TopicsElement',
return_value=_topics_element
)
with self.assertRaises(Exception) as _ex:
_element.addSubTopic('value')
self.assertEqual('appendChildExceptionTopic', _ex.exception.args[0])
_getOwnerWorkbook_mock.assert_called_once()
_get_children_mock.assert_called_once()
_ChildrenElement_mock.assert_called_once_with('topic_children_value', 'owner')
_appendChild_mock.assert_not_called()
_topic_children_element.getTopics.assert_called_once_with(TOPIC_ATTACHED)
_TopicsElement_mock.assert_called_once_with(ownerWorkbook='owner')
_topics_element.setAttribute.assert_called_once_with(ATTR_TYPE, TOPIC_ATTACHED)
_topic_children_element.appendChild.assert_called_once_with(_topics_element)
self._assert_init_methods()
def test_addSubTopic_getTopics_returns_value_topics_appendChild_called(self):
_element = TopicElement()
_topics_element = Mock()
_topics_element.getChildNodesByTagName.return_value = []
_topic_children_element = Mock()
_topic_children_element.getTopics.return_value = _topics_element
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = 'topic_children_value'
_ChildrenElement_mock = self._init_patch_with_name(
'_ChildrenElement_mock',
'xmind.core.topic.ChildrenElement',
return_value=_topic_children_element
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
_appendChild_mock.side_effect = Exception
_TopicsElement_mock = self._init_patch_with_name(
'_TopicsElement_mock',
'xmind.core.topic.TopicsElement',
return_value=_topics_element
)
self.assertEqual('value', _element.addSubTopic('value'))
_getOwnerWorkbook_mock.assert_called_once()
_get_children_mock.assert_called_once()
_ChildrenElement_mock.assert_called_once_with('topic_children_value', 'owner')
_appendChild_mock.assert_not_called()
_topic_children_element.getTopics.assert_called_once_with(TOPIC_ATTACHED)
_TopicsElement_mock.assert_not_called()
_topics_element.setAttribute.assert_not_called()
_topic_children_element.appendChild.assert_not_called()
_topics_element.getChildNodesByTagName.assert_called_once_with(TAG_TOPIC)
_topics_element.appendChild.assert_called_once_with('value')
self._assert_init_methods()
def test_addSubTopic_getTopics_returns_value_insertBefore_called(self):
_element = TopicElement()
_topics_element = Mock()
_topics_element.getChildNodesByTagName.return_value = [311, 322]
_topic_children_element = Mock()
_topic_children_element.getTopics.return_value = _topics_element
_getOwnerWorkbook_mock = patch.object(_element, 'getOwnerWorkbook').start()
_getOwnerWorkbook_mock.return_value = 'owner'
_get_children_mock = patch.object(_element, '_get_children').start()
_get_children_mock.return_value = 'topic_children_value'
_ChildrenElement_mock = self._init_patch_with_name(
'_ChildrenElement_mock',
'xmind.core.topic.ChildrenElement',
return_value=_topic_children_element
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
_appendChild_mock.side_effect = Exception
_TopicsElement_mock = self._init_patch_with_name(
'_TopicsElement_mock',
'xmind.core.topic.TopicsElement',
return_value=_topics_element
)
with patch('xmind.core.topic.TopicElement') as _TopicElement_mock:
_TopicElement_mock.side_effect = [66, 77]
self.assertEqual('value', _element.addSubTopic('value', 0))
_getOwnerWorkbook_mock.assert_called_once()
_get_children_mock.assert_called_once()
_ChildrenElement_mock.assert_called_once_with('topic_children_value', 'owner')
_appendChild_mock.assert_not_called()
_topic_children_element.getTopics.assert_called_once_with(TOPIC_ATTACHED)
_TopicsElement_mock.assert_not_called()
_topics_element.setAttribute.assert_not_called()
_topic_children_element.appendChild.assert_not_called()
self.assertEqual(2, _TopicElement_mock.call_count)
self.assertListEqual([call(311, 'owner'), call(322, 'owner')], _TopicElement_mock.call_args_list)
_topics_element.getChildNodesByTagName.assert_called_once_with(TAG_TOPIC)
_topics_element.appendChild.assert_not_called()
_topics_element.insertBefore.assert_called_once_with('value', 66)
self._assert_init_methods()
def test_getIndex_parent_is_none(self):
_element = TopicElement()
_getParentNode_mock = patch.object(_element, 'getParentNode').start()
_getParentNode_mock.return_value = None
self.assertEqual(-1, _element.getIndex())
_getParentNode_mock.assert_called_once()
self._assert_init_methods()
def test_getIndex_parent_tagName_is_not_topic(self):
_element = TopicElement()
_tagName_mock = PropertyMock(return_value=TAG_CHILDREN)
_parent = Mock()
type(_parent).tagName = _tagName_mock
_getParentNode_mock = patch.object(_element, 'getParentNode').start()
_getParentNode_mock.return_value = _parent
self.assertEqual(-1, _element.getIndex())
_getParentNode_mock.assert_called_once()
_tagName_mock.assert_called_once()
self._assert_init_methods()
def test_getIndex_parent_childNodes_is_empty_list(self):
_element = TopicElement()
_tagName_mock = PropertyMock(return_value=TAG_TOPICS)
_childNodes_mock = PropertyMock(return_value=[])
_parent = Mock()
type(_parent).tagName = _tagName_mock
type(_parent).childNodes = _childNodes_mock
_getParentNode_mock = patch.object(_element, 'getParentNode').start()
_getParentNode_mock.return_value = _parent
self.assertEqual(-1, _element.getIndex())
_getParentNode_mock.assert_called_once()
_tagName_mock.assert_called_once()
_childNodes_mock.assert_called_once()
self._assert_init_methods()
def test_getIndex_none_of_childs_in_childNodes_equals_by_implementation(self):
_element = TopicElement()
_tagName_mock = PropertyMock(return_value=TAG_TOPICS)
_childNodes_mock = PropertyMock(return_value=['a', 'b', 'c'])
_parent = Mock()
type(_parent).tagName = _tagName_mock
type(_parent).childNodes = _childNodes_mock
_getParentNode_mock = patch.object(_element, 'getParentNode').start()
_getParentNode_mock.return_value = _parent
_getImplementation_mock = patch.object(_element, 'getImplementation').start()
_getImplementation_mock.return_value = 'd'
self.assertEqual(-1, _element.getIndex())
_getParentNode_mock.assert_called_once()
_tagName_mock.assert_called_once()
_childNodes_mock.assert_called_once()
self.assertEqual(3, _getImplementation_mock.call_count)
self._assert_init_methods()
def test_getIndex_third_child_in_childNodes_equals_by_implementation(self):
_element = TopicElement()
_tagName_mock = PropertyMock(return_value=TAG_TOPICS)
_childNodes_mock = PropertyMock(return_value=['a', 'b', 'c', 'd', 'e'])
_parent = Mock()
type(_parent).tagName = _tagName_mock
type(_parent).childNodes = _childNodes_mock
_getParentNode_mock = patch.object(_element, 'getParentNode').start()
_getParentNode_mock.return_value = _parent
_getImplementation_mock = patch.object(_element, 'getImplementation').start()
_getImplementation_mock.return_value = 'c'
self.assertEqual(2, _element.getIndex())
_getParentNode_mock.assert_called_once()
_tagName_mock.assert_called_once()
_childNodes_mock.assert_called_once()
self.assertEqual(3, _getImplementation_mock.call_count)
self._assert_init_methods()
def test_getHyperlink(self):
_element = TopicElement()
with patch.object(_element, 'getAttribute') as _mock:
_mock.return_value = 'http://go.here/'
self.assertEqual('http://go.here/', _element.getHyperlink())
_mock.assert_called_once_with(ATTR_HREF)
self._assert_init_methods()
def test_setFileHyperlink_protocol_is_none(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=(None, 'someContent')
)
_get_abs_path_mock = self._init_patch_with_name(
'_get_abs_path_mock',
'xmind.core.topic.utils.get_abs_path',
return_value='/some/file/here'
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setFileHyperlink('here'))
_split_hyperlink_mock.assert_called_once_with('here')
_get_abs_path_mock.assert_called_once_with('here')
_set_hyperlink_mock.assert_called_once_with('file:///some/file/here')
self._assert_init_methods()
def test_setFileHyperlink_protocol_is_not_none(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=('http://', 'someContent')
)
_get_abs_path_mock = self._init_patch_with_name(
'_get_abs_path_mock',
'xmind.core.topic.utils.get_abs_path',
return_value='/some/file/here'
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setFileHyperlink('http://here'))
_split_hyperlink_mock.assert_called_once_with('http://here')
_get_abs_path_mock.assert_not_called()
_set_hyperlink_mock.assert_called_once_with('http://here')
self._assert_init_methods()
def test_setTopicHyperlink_protocol_is_not_none(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=('http://', 'someContent')
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setTopicHyperlink('http://here'))
_split_hyperlink_mock.assert_called_once_with('http://here')
_set_hyperlink_mock.assert_called_once_with('http://here')
self._assert_init_methods()
def test_setTopicHyperlink_protocol_is_none_tid_starts_with_sharp(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=(None, 'someContent')
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setTopicHyperlink('#TheBest'))
_split_hyperlink_mock.assert_called_once_with('#TheBest')
_set_hyperlink_mock.assert_called_once_with('xmind:#TheBest')
self._assert_init_methods()
def test_setTopicHyperlink_protocol_is_none(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=(None, 'someContent')
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setTopicHyperlink('TheBest'))
_split_hyperlink_mock.assert_called_once_with('TheBest')
_set_hyperlink_mock.assert_called_once_with('xmind:#TheBest')
self._assert_init_methods()
def test_setURLHyperlink_protocol_is_none(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=(None, 'TheBest')
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setURLHyperlink('TheBest'))
_split_hyperlink_mock.assert_called_once_with('TheBest')
_set_hyperlink_mock.assert_called_once_with('http://TheBest')
self._assert_init_methods()
def test_setURLHyperlink_protocol_is_not_none(self):
_element = TopicElement()
_split_hyperlink_mock = self._init_patch_with_name(
'_split_hyperlink_mock',
'xmind.core.topic.split_hyperlink',
return_value=('someProtocol://', 'TheBest')
)
_set_hyperlink_mock = patch.object(_element, '_set_hyperlink').start()
self.assertIsNone(_element.setURLHyperlink('someProtocol://TheBest'))
_split_hyperlink_mock.assert_called_once_with('someProtocol://TheBest')
_set_hyperlink_mock.assert_called_once_with('someProtocol://TheBest')
self._assert_init_methods()
def test_getNotes_notes_are_none(self):
_element = TopicElement()
_NotesElement_mock = self._init_patch_with_name(
'_NotesElement_mock',
'xmind.core.topic.NotesElement',
return_value='notes'
)
_getFirstChildNodeByTagName_mock = patch.object(_element, 'getFirstChildNodeByTagName').start()
_getFirstChildNodeByTagName_mock.return_value = None
self.assertIsNone(_element.getNotes())
_NotesElement_mock.assert_not_called()
_getFirstChildNodeByTagName_mock.assert_called_once_with(TAG_NOTES)
self._assert_init_methods()
def test_getNotes_notes_are_not_none(self):
_element = TopicElement()
_NotesElement_mock = self._init_patch_with_name(
'_NotesElement_mock',
'xmind.core.topic.NotesElement',
return_value='notes'
)
_getFirstChildNodeByTagName_mock = patch.object(_element, 'getFirstChildNodeByTagName').start()
_getFirstChildNodeByTagName_mock.return_value = 'someNotes'
self.assertEqual('notes', _element.getNotes())
_NotesElement_mock.assert_called_once_with('someNotes', _element)
_getFirstChildNodeByTagName_mock.assert_called_once_with(TAG_NOTES)
self._assert_init_methods()
def test__set_notes_notes_are_none(self):
_element = TopicElement()
_getNotes_mock = patch.object(_element, 'getNotes').start()
_getNotes_mock.return_value = None
_NotesElement_mock = self._init_patch_with_name(
'_NotesElement_mock',
'xmind.core.topic.NotesElement',
return_value='notes'
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
self.assertEqual('notes', _element._set_notes())
_getNotes_mock.assert_called_once()
_NotesElement_mock.assert_called_once_with(ownerTopic=_element)
_appendChild_mock.assert_called_once_with('notes')
self._assert_init_methods()
def test__set_notes_notes_are_not_none(self):
_element = TopicElement()
_getNotes_mock = patch.object(_element, 'getNotes').start()
_getNotes_mock.return_value = 'someNotes'
_NotesElement_mock = self._init_patch_with_name(
'_NotesElement_mock',
'xmind.core.topic.NotesElement',
return_value='notes'
)
_appendChild_mock = patch.object(_element, 'appendChild').start()
self.assertEqual('someNotes', _element._set_notes())
_getNotes_mock.assert_called_once()
_NotesElement_mock.assert_not_called()
_appendChild_mock.assert_not_called()
self._assert_init_methods()
def test_setPlainNotes_old_is_not_none(self):
_element = TopicElement()
_notes_elemet = Mock()
_notes_elemet.getFirstChildNodeByTagName.return_value = 'value'
_getImplementation_mock = Mock()
_notes_elemet.getImplementation.return_value = _getImplementation_mock
_set_notes_mock = patch.object(_element, '_set_notes').start()
_set_notes_mock.return_value = _notes_elemet
_plain_notes_element = Mock()
_plain_notes_element.getFormat.return_value = 'format'
_PlainNotes_mock = self._init_patch_with_name(
'_PlainNotes_mock',
'xmind.core.topic.PlainNotes',
return_value=_plain_notes_element
)
self.assertIsNone(_element.setPlainNotes('notesContent'))
_set_notes_mock.assert_called_once()
_PlainNotes_mock.assert_called_once_with('notesContent', None, _element)
_notes_elemet.getFirstChildNodeByTagName.assert_called_once_with('format')
_getImplementation_mock.removeChild.assert_called_once_with('value')
_notes_elemet.appendChild.assert_called_once_with(_plain_notes_element)
self._assert_init_methods()
def test_setPlainNotes_old_is_none(self):
_element = TopicElement()
_notes_elemet = Mock()
_notes_elemet.getFirstChildNodeByTagName.return_value = None
_getImplementation_mock = Mock()
_notes_elemet.getImplementation.return_value = _getImplementation_mock
_set_notes_mock = patch.object(_element, '_set_notes').start()
_set_notes_mock.return_value = _notes_elemet
_plain_notes_element = Mock()
_plain_notes_element.getFormat.return_value = 'format'
_PlainNotes_mock = self._init_patch_with_name(
'_PlainNotes_mock',
'xmind.core.topic.PlainNotes',
return_value=_plain_notes_element
)
self.assertIsNone(_element.setPlainNotes('notesContent'))
_set_notes_mock.assert_called_once()
_PlainNotes_mock.assert_called_once_with('notesContent', None, _element)
_notes_elemet.getFirstChildNodeByTagName.assert_called_once_with('format')
_getImplementation_mock.removeChild.assert_not_called()
_notes_elemet.appendChild.assert_called_once_with(_plain_notes_element)
self._assert_init_methods()
|
#!/usr/bin/env python3
import os
import pathlib
source_root = pathlib.Path(os.environ['MESON_DIST_ROOT'])
modfile = source_root / 'prog.c'
contents = modfile.read_text()
contents = contents.replace('"incorrect"', '"correct"')
modfile.write_text(contents)
|
# This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2020 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from pymor.core.base import ImmutableObject
class InstationaryProblem(ImmutableObject):
"""Instationary problem description.
This class describes an instationary problem of the form ::
| ∂_t u(x, t, μ) + A(u(x, t, μ), t, μ) = f(x, t, μ),
| u(x, 0, μ) = u_0(x, μ)
where A, f are given by the problem's `stationary_part` and
t is allowed to vary in the interval [0, T].
Parameters
----------
stationary_part
The stationary part of the problem.
initial_data
|Function| providing the initial values u_0.
T
The final time T.
parameter_space
|ParameterSpace| for the problem.
name
Name of the problem.
Attributes
----------
T
stationary_part
parameter_space
name
"""
def __init__(self, stationary_part, initial_data, T=1., parameter_space=None, name=None):
name = name or ('instationary_' + stationary_part.name)
self.__auto_init(locals())
def with_stationary_part(self, **kwargs):
return self.with_(stationary_part=self.stationary_part.with_(**kwargs))
|
from ..base import *
from ..button import *
from ..toolbar import *
from ..dialog import *
class SnapButton(ToolbarButton):
def __init__(self, toolbar):
tooltip_text = "Enable snapping"
command = lambda: Mgr.update_locally("object_snap", "snap")
ToolbarButton.__init__(self, toolbar, "", "icon_snap", tooltip_text, command)
hotkey = ("s", 0)
self.set_hotkey(hotkey, "S")
class OptionsButton(ToolbarButton):
def __init__(self, toolbar):
tooltip_text = "Set snap options"
ToolbarButton.__init__(self, toolbar, "", "icon_snap_options",
tooltip_text, lambda: SnapDialog())
class SnapToolbar(Toolbar):
def __init__(self, parent, toolbar_id, name):
Toolbar.__init__(self, parent, toolbar_id, name)
self._btns = btns = {}
borders = (0, 5, 0, 0)
btn = SnapButton(self)
btn.enable(False)
self.add(btn, borders=borders, alignment="center_v")
btns["snap"] = btn
btn = OptionsButton(self)
btn.enable(False)
self.add(btn, borders=borders, alignment="center_v")
btns["snap_options"] = btn
tools_menu = Mgr.get("main_gui_components")["main_context_tools_menu"]
item = tools_menu.add("snap", "Snap", self._btns["snap"].press, item_type="check")
item.enable(False)
self._tools_menu_item = item
tools_menu = Mgr.get("tool_options_menu")
item = tools_menu.add("snap", "Snap", lambda: self.__update_snapping("show_options"))
item.enable(False)
self._tool_options_menu_item = item
Mgr.add_app_updater("object_snap", self.__update_snapping)
def setup(self):
def enter_transf_start_snap_mode(prev_state_id, active):
Mgr.do("enable_gui", False)
def exit_transf_start_snap_mode(next_state_id, active):
Mgr.do("enable_gui")
add_state = Mgr.add_state
add_state("transf_start_snap_mode", -1, enter_transf_start_snap_mode,
exit_transf_start_snap_mode)
def __update_snapping(self, update_type, *args):
if update_type == "reset":
self._btns["snap"].active = False
self._btns["snap"].enable(False)
self._btns["snap_options"].enable(False)
self._tools_menu_item.enable(False)
self._tool_options_menu_item.enable(False)
elif update_type == "enable":
enable, force_snap_on = args
if enable:
self._btns["snap"].enable()
self._btns["snap_options"].enable()
self._tools_menu_item.enable()
self._tool_options_menu_item.enable()
if force_snap_on:
self._btns["snap"].active = True
self._tools_menu_item.check()
else:
snap_settings = GD["snap"]
snap_type = snap_settings["type"]
active = snap_settings["on"][snap_type]
self._btns["snap"].active = active
self._tools_menu_item.check(active)
else:
if not (Mgr.is_state_active("creation_mode")
or GD["active_transform_type"]):
self._btns["snap"].enable(False)
self._btns["snap_options"].enable(False)
self._tools_menu_item.enable(False)
self._tools_menu_item.check(False)
self._tool_options_menu_item.enable(False)
else:
snap_settings = GD["snap"]
snap_type = snap_settings["prev_type"]
active = snap_settings["on"][snap_type]
self._btns["snap"].active = active
self._tools_menu_item.check(active)
elif update_type == "show_options":
SnapDialog()
elif update_type == "snap":
snap_settings = GD["snap"]
snap_type = snap_settings["type"]
if snap_type in ("transf_center", "coord_origin"):
self._btns["snap"].active = True
self._tools_menu_item.check()
return
snap_on = not snap_settings["on"][snap_type]
snap_settings["on"][snap_type] = snap_on
self._btns["snap"].active = snap_on
self._tools_menu_item.check(snap_on)
transf_type = GD["active_transform_type"]
state_id = Mgr.get_state_id()
if transf_type and state_id == "selection_mode":
if GD["snap"]["on"][transf_type]:
Mgr.update_app("status", ["select", transf_type, "snap_idle"])
else:
Mgr.update_app("status", ["select", transf_type, "idle"])
elif state_id == "creation_mode":
creation_type = GD["active_creation_type"]
if GD["snap"]["on"]["creation"]:
Mgr.update_app("status", ["create", creation_type, "snap_idle"])
else:
Mgr.update_app("status", ["create", creation_type, "idle"])
Mgr.update_remotely("object_snap")
class SnapDialog(Dialog):
def __init__(self):
old_options = GD["snap"]
self._snap_type = snap_type = old_options["type"]
if snap_type == "creation":
title = 'Snap options (object creation)'
elif snap_type == "transf_center":
title = 'Snap options (transform center)'
elif snap_type == "coord_origin":
title = 'Snap options (ref. coord. origin)'
elif snap_type == "translate":
title = 'Snap options (translation)'
elif snap_type == "rotate":
title = 'Snap options (rotation)'
elif snap_type == "scale":
title = 'Snap options (scaling)'
Dialog.__init__(self, title, "okcancel", on_yes=self.__on_yes)
self._options = new_options = {}
if snap_type == "creation":
checkbtns = {}
fields = {}
toggle_btns = ToggleButtonGroup()
creation_phase_radio_btns = {}
new_creation_options = new_options["creation_start"] = {}
for option_id in ("on", "tgt_type", "size", "show_marker", "marker_size"):
new_creation_options[option_id] = old_options[option_id]["creation_start"]
for creation_snap_type in ("creation_phase_1", "creation_phase_2", "creation_phase_3"):
new_creation_options = new_options[creation_snap_type] = {}
for option_id in ("on", "tgt_type", "size", "show_marker", "marker_size",
"show_proj_line", "show_proj_marker", "proj_marker_size", "increment"):
new_creation_options[option_id] = old_options[option_id][creation_snap_type]
else:
for option_id in ("src_type", "tgt_type", "size", "show_marker", "marker_size"):
new_options[option_id] = old_options[option_id][snap_type]
if snap_type not in ("transf_center", "coord_origin", "creation"):
for option_id in ("show_rubber_band", "show_proj_line", "show_proj_marker",
"proj_marker_size", "use_axis_constraints", "increment"):
new_options[option_id] = old_options[option_id][snap_type]
client_sizer = self.get_client_sizer()
if snap_type in ("translate", "rotate", "scale"):
group = DialogWidgetGroup(self, "Snap from:")
borders = (20, 20, 0, 10)
client_sizer.add(group, expand=True, borders=borders)
def get_command(target_type):
def command():
self._options["src_type"] = target_type
return command
columns = 4 if snap_type == "translate" else 3
radio_btns = DialogRadioButtonGroup(group, columns=columns, gap_h=10, gap_v=5)
if snap_type == "translate":
btn_id = "transf_center"
radio_btns.add_button(btn_id, "transform center")
radio_btns.set_button_command(btn_id, get_command(btn_id))
btn_ids = ("grid_point", "obj_center", "obj_pivot", "vert", "edge", "poly")
texts = ("grid point", "object center", "object pivot", "vertex",
"edge center", "polygon center")
for btn_id, text in zip(btn_ids, texts):
radio_btns.add_button(btn_id, text)
radio_btns.set_button_command(btn_id, get_command(btn_id))
radio_btns.set_selected_button(old_options["src_type"][snap_type])
borders = (5, 0, 0, 0)
group.add(radio_btns.sizer, expand=True, borders=borders)
def add_target_options(parent, parent_sizer, borders, for_creation_phase=False):
group = DialogWidgetGroup(parent, "Snap to:")
parent_sizer.add(group, expand=True, borders=borders)
add_incr_option = ((snap_type == "creation" and for_creation_phase)
or snap_type in ("translate", "rotate", "scale"))
columns = 4 if add_incr_option else 3
radio_btns = DialogRadioButtonGroup(group, columns=columns, gap_h=10, gap_v=5)
if add_incr_option:
def command():
if for_creation_phase:
self._options[self._creation_phase_id]["tgt_type"] = "increment"
else:
self._options["tgt_type"] = "increment"
if snap_type == "rotate":
btn_txt = "angle increment"
elif snap_type == "scale":
btn_txt = "scale increment"
else:
btn_txt = "offset increment"
radio_btns.add_button("increment", btn_txt)
radio_btns.set_button_command("increment", command)
def get_command(target_type):
def command():
if for_creation_phase:
self._options[self._creation_phase_id]["tgt_type"] = target_type
elif snap_type == "creation":
self._options["creation_start"]["tgt_type"] = target_type
else:
self._options["tgt_type"] = target_type
return command
btn_ids = ("grid_point", "obj_center", "obj_pivot", "vert", "edge", "poly")
texts = ("grid point", "object center", "object pivot", "vertex",
"edge center", "polygon center")
for btn_id, text in zip(btn_ids, texts):
radio_btns.add_button(btn_id, text)
radio_btns.set_button_command(btn_id, get_command(btn_id))
if for_creation_phase:
tgt_type = old_options["tgt_type"][self._creation_phase_id]
creation_phase_radio_btns["tgt_type"] = radio_btns
elif snap_type == "creation":
tgt_type = old_options["tgt_type"]["creation_start"]
else:
tgt_type = old_options["tgt_type"][snap_type]
radio_btns.set_selected_button(tgt_type)
if snap_type == "creation" and not for_creation_phase:
h_subsizer = Sizer("horizontal")
borders = (5, 0, 0, 0)
group.add(h_subsizer, expand=True, proportion=1., borders=borders)
h_subsizer.add(radio_btns.sizer, proportion=1., borders=borders)
subsizer = Sizer("vertical")
borders = (50, 0, 0, 0)
h_subsizer.add(subsizer, expand=True, proportion=1., borders=borders)
parent_sizer.add((0, 5))
else:
borders = (5, 0, 0, 0)
group.add(radio_btns.sizer, expand=True, borders=borders)
subsizer = Sizer("horizontal")
borders = (5, 0, 0, 10)
group.add(subsizer, expand=True, borders=borders)
if add_incr_option:
if snap_type == "rotate":
incr_type = "Angle"
incr_unit_descr = " (degr.)"
input_parser = self.__parse_angle_incr_input
val_rng = (.001, 180.)
elif snap_type == "scale":
incr_type = "Scale"
incr_unit_descr = " (%)"
input_parser = self.__parse_input
val_rng = (.001, None)
else:
incr_type = "Offset"
incr_unit_descr = ""
input_parser = self.__parse_input
val_rng = (.001, None)
text = DialogText(group, f"{incr_type} increment{incr_unit_descr}:")
borders = (5, 0, 0, 0)
subsizer.add(text, alignment="center_v", borders=borders)
val_id = "increment"
if for_creation_phase:
handler = self.__get_value_handler()
incr = old_options[val_id][self._creation_phase_id]
else:
handler = self.__handle_value
incr = old_options[val_id][snap_type]
field = DialogSpinnerField(group, val_id, "float", val_rng,
.001, handler, 100)
if for_creation_phase:
fields[val_id] = field
field.set_value(incr)
field.set_input_parser(input_parser)
subsizer.add(field, proportion=1., alignment="center_v", borders=borders)
subsizer.add((10, 0), proportion=.2)
text = DialogText(group, "Target point size:")
borders = (5, 0, 0, 0)
subsizer.add(text, alignment="center_v", borders=borders)
val_id = "size"
if for_creation_phase:
handler = self.__get_value_handler()
size = old_options[val_id][self._creation_phase_id]
elif snap_type == "creation":
handler = self.__get_value_handler("creation_start")
size = old_options[val_id]["creation_start"]
else:
handler = self.__handle_value
size = old_options[val_id][snap_type]
field = DialogSpinnerField(group, val_id, "float", (.001, None),
.001, handler, 100)
if for_creation_phase:
fields[val_id] = field
field.set_value(size)
field.set_input_parser(self.__parse_input)
subsizer.add(field, expand=True, proportion=1., alignment="center_v", borders=borders)
def add_marker_display_options(group, text, for_creation_phase=False):
def command(show):
if for_creation_phase:
self._options[self._creation_phase_id]["show_marker"] = show
elif snap_type == "creation":
self._options["creation_start"]["show_marker"] = show
else:
self._options["show_marker"] = show
widgets = []
checkbtn = DialogCheckButton(group, command, text)
val_id = "show_marker"
if for_creation_phase:
show = old_options[val_id][self._creation_phase_id]
elif snap_type == "creation":
show = old_options[val_id]["creation_start"]
else:
show = old_options[val_id][snap_type]
checkbtn.check(show)
widgets.append(checkbtn)
widgets.append(DialogText(group, "Size:"))
val_id = "marker_size"
if for_creation_phase:
handler = self.__get_value_handler()
size = old_options[val_id][self._creation_phase_id]
elif snap_type == "creation":
handler = self.__get_value_handler("creation_start")
size = old_options[val_id]["creation_start"]
else:
handler = self.__handle_value
size = old_options[val_id][snap_type]
field = DialogSpinnerField(group, val_id, "float", (.001, None), .001,
handler, 100)
field.set_value(size)
field.set_input_parser(self.__parse_input)
widgets.append(field)
return widgets
def add_marker_display_group(parent, parent_sizer, borders, proportion=0.):
group = DialogWidgetGroup(parent, "Marker display")
parent_sizer.add(group, expand=True, proportion=proportion, borders=borders)
subsizer = Sizer("horizontal")
borders = (5, 0, 0, 0)
group.add(subsizer, expand=True, borders=borders)
checkbtn, text, field = add_marker_display_options(group, "Show")
subsizer.add(checkbtn, alignment="center_v")
subsizer.add((10, 0), proportion=.1)
subsizer.add(text, alignment="center_v", borders=borders)
subsizer.add(field, proportion=1., alignment="center_v", borders=borders)
if snap_type == "creation":
self._creation_phase_id = "creation_phase_1"
group = DialogWidgetGroup(self, "Creation start")
borders = (20, 20, 0, 10)
client_sizer.add(group, expand=True, borders=borders)
subsizer = Sizer("horizontal")
borders = (5, 0, 0, 0)
group.add(subsizer, expand=True, borders=borders)
def enable_snapping(enable):
self._options["creation_start"]["on"] = enable
text = "Enable snapping"
checkbtn = DialogCheckButton(group, enable_snapping, text)
checkbtn.check(old_options["on"]["creation_start"])
subsizer.add(checkbtn, alignment="center_v")
borders = (20, 5, 0, 0)
add_marker_display_group(group, subsizer, borders, proportion=1.)
borders = (5, 5, 0, 10)
add_target_options(group, group.get_client_sizer(), borders)
group = DialogWidgetGroup(self, "Creation phases")
borders = (20, 20, 0, 10)
client_sizer.add(group, expand=True, borders=borders)
subsizer = Sizer("horizontal")
borders = (5, 0, 0, 0)
group.add(subsizer, expand=True, borders=borders)
def get_checkbox_command(phase_id):
def enable_snapping(enable):
self._options[f"creation_{phase_id}"]["on"] = enable
return enable_snapping
def get_btn_command(phase_id):
def command():
toggle_btns.set_active_button(phase_id)
options = self._options[f"creation_{phase_id}"]
creation_phase_radio_btns["tgt_type"].set_selected_button(options["tgt_type"])
for option_id in ("show_marker", "show_proj_marker", "show_proj_line"):
checkbtns[option_id].check(options[option_id])
for option_id in ("increment", "size", "marker_size", "proj_marker_size"):
fields[option_id].set_value(options[option_id])
self._creation_phase_id = f"creation_{phase_id}"
return command
for index in range(3):
phase_id = f"phase_{index + 1}"
checkbtn = DialogCheckButton(group, get_checkbox_command(phase_id))
checkbtn.check(old_options["on"][f"creation_{phase_id}"])
subsizer.add(checkbtn, alignment="center_v")
text = f"Phase {index + 1}"
tooltip_text = f"Creation phase {index + 1} settings"
btn = DialogButton(group, text, "", tooltip_text)
toggle = (get_btn_command(phase_id), lambda: None)
toggle_btns.add_button(btn, phase_id, toggle)
subsizer.add(btn, alignment="center_v", borders=borders)
subsizer.add((10, 0), proportion=1.)
toggle_btns.set_active_button("phase_1")
borders = (5, 5, 0, 10)
add_target_options(group, group.get_client_sizer(), borders, True)
subgroup = DialogWidgetGroup(group, "Display")
borders = (5, 5, 5, 10)
group.add(subgroup, expand=True, borders=borders)
subsizer = GridSizer(columns=7, gap_h=5, gap_v=2)
borders = (5, 0, 0, 0)
subgroup.add(subsizer, expand=True, borders=borders)
checkbtn, text, field = add_marker_display_options(subgroup,
"Target point marker", for_creation_phase=True)
checkbtns["show_marker"] = checkbtn
fields["marker_size"] = field
subsizer.add(checkbtn, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=.5)
subsizer.add(text, alignment_v="center_v")
subsizer.add(field, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=1.)
subsizer.add((0, 0))
subsizer.add((0, 0))
def command(show):
self._options[self._creation_phase_id]["show_proj_marker"] = show
val_id = "show_proj_marker"
text = "Projected point marker"
checkbtn = DialogCheckButton(subgroup, command, text)
checkbtn.check(old_options[val_id][self._creation_phase_id])
checkbtns["show_proj_marker"] = checkbtn
subsizer.add(checkbtn, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=.5)
text = DialogText(subgroup, "Size:")
subsizer.add(text, alignment_v="center_v")
val_id = "proj_marker_size"
field = DialogSpinnerField(subgroup, val_id, "float", (.001, None), .001,
self.__get_value_handler(), 100)
fields[val_id] = field
field.set_value(old_options[val_id][self._creation_phase_id])
field.set_input_parser(self.__parse_input)
subsizer.add(field, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=1.)
def command(show):
self._options[self._creation_phase_id]["show_proj_line"] = show
val_id = "show_proj_line"
text = "Projection line"
checkbtn = DialogCheckButton(subgroup, command, text)
checkbtn.check(old_options[val_id][self._creation_phase_id])
checkbtns["show_proj_line"] = checkbtn
subsizer.add(checkbtn, alignment_v="center_v")
else:
borders = (20, 20, 0, 10)
add_target_options(self, client_sizer, borders)
if snap_type in ("transf_center", "coord_origin"):
add_marker_display_group(self, client_sizer, borders)
else:
group = DialogWidgetGroup(self, "Display")
client_sizer.add(group, expand=True, borders=borders)
subsizer = GridSizer(columns=6, gap_h=5, gap_v=2)
borders = (5, 0, 0, 0)
group.add(subsizer, expand=True, borders=borders)
checkbtn, text, field = add_marker_display_options(group,
"Target point marker")
subsizer.add(checkbtn, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=.5)
subsizer.add(text, alignment_v="center_v")
subsizer.add(field, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=1.)
def command(show):
self._options["show_rubber_band"] = show
val_id = "show_rubber_band"
text = "Rubber band"
checkbtn = DialogCheckButton(group, command, text)
checkbtn.check(old_options[val_id][snap_type])
subsizer.add(checkbtn, alignment_v="center_v")
def command(show):
self._options["show_proj_marker"] = show
val_id = "show_proj_marker"
text = "Projected point marker"
checkbtn = DialogCheckButton(group, command, text)
checkbtn.check(old_options[val_id][snap_type])
subsizer.add(checkbtn, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=.5)
text = DialogText(group, "Size:")
subsizer.add(text, alignment_v="center_v")
val_id = "proj_marker_size"
field = DialogSpinnerField(group, val_id, "float", (.001, None), .001,
self.__handle_value, 100)
field.set_value(old_options[val_id][snap_type])
field.set_input_parser(self.__parse_input)
subsizer.add(field, alignment_v="center_v")
subsizer.add((10, 0), proportion_h=1.)
def command(show):
self._options["show_proj_line"] = show
val_id = "show_proj_line"
text = "Projection line"
checkbtn = DialogCheckButton(group, command, text)
checkbtn.check(old_options[val_id][snap_type])
subsizer.add(checkbtn, alignment_v="center_v")
def command(use):
self._options["use_axis_constraints"] = use
text = "Use axis constraints (snap to projection of target " \
"point onto transform plane/axis)"
checkbtn = DialogCheckButton(self, command, text)
checkbtn.check(old_options["use_axis_constraints"][snap_type])
borders = (25, 20, 0, 10)
client_sizer.add(checkbtn, borders=borders)
client_sizer.add((0, 20))
self.finalize()
def __get_value_handler(self, snap_type="creation_phase"):
def handle_value(value_id, value, state="done"):
if snap_type == "creation_phase":
self._options[self._creation_phase_id][value_id] = value
else:
self._options[snap_type][value_id] = value
return handle_value
def __handle_value(self, value_id, value, state="done"):
self._options[value_id] = value
def __parse_input(self, input_text):
try:
return max(.001, abs(float(eval(input_text))))
except:
return None
def __parse_angle_incr_input(self, input_text):
try:
return max(.001, min(180., abs(float(eval(input_text)))))
except:
return None
def __on_yes(self):
snap_type = self._snap_type
state_id = Mgr.get_state_id()
if ((state_id == "transf_center_snap_mode" and snap_type == "transf_center")
or (state_id == "coord_origin_snap_mode" and snap_type == "coord_origin")
or state_id == "creation_mode"):
Mgr.enter_state("suppressed")
old_options = GD["snap"]
new_options = self._options
if snap_type == "creation":
new_creation_options = new_options["creation_start"]
for option_id in ("on", "tgt_type", "size", "show_marker", "marker_size"):
old_options[option_id]["creation_start"] = new_creation_options[option_id]
for creation_snap_type in ("creation_phase_1", "creation_phase_2", "creation_phase_3"):
new_creation_options = new_options[creation_snap_type]
for option_id in ("on", "tgt_type", "size", "show_marker", "marker_size",
"show_proj_line", "show_proj_marker", "proj_marker_size", "increment"):
old_options[option_id][creation_snap_type] = new_creation_options[option_id]
else:
for option_id in ("src_type", "tgt_type", "size", "show_marker", "marker_size"):
old_options[option_id][snap_type] = new_options[option_id]
if snap_type not in ("transf_center", "coord_origin", "creation"):
for option_id in ("show_rubber_band", "show_proj_line", "show_proj_marker",
"proj_marker_size", "use_axis_constraints", "increment"):
old_options[option_id][snap_type] = new_options[option_id]
if ((state_id == "transf_center_snap_mode" and snap_type == "transf_center")
or (state_id == "coord_origin_snap_mode" and snap_type == "coord_origin")
or state_id == "creation_mode"):
Mgr.exit_state("suppressed")
|
import numpy as np
import PIL.Image
import PIL.ImageDraw
import PIL.ImageFilter
import PIL.ImageFont
import PIL.ImageOps
from ..image import image_line
from .mullerlyer_parameters import _mullerlyer_parameters
def _mullerlyer_image(parameters=None, width=800, height=600, outline=20, background="white", **kwargs):
# Create white canvas and get drawing context
if parameters is None:
parameters = mullerlyer_parameters(**kwargs)
# Background
image = PIL.Image.new('RGB', (width, height), color=background)
# Distractors lines
for which in ["TopLeft", "TopRight", "BottomLeft", "BottomRight"]: #
for side in ["1", "2"]:
image = image_line(
image=image,
x1=parameters["Distractor_" + which + side + "_x1"],
y1=parameters["Distractor_" + which + side + "_y1"],
x2=parameters["Distractor_" + which + side + "_x2"],
y2=parameters["Distractor_" + which + side + "_y2"],
color="black",
size=outline)
# Target lines (horizontal)
for position in ["Bottom", "Top"]:
image = image_line(image=image,
x1=parameters[position + "_x1"],
y1=parameters[position + "_y1"],
x2=parameters[position + "_x2"],
y2=parameters[position + "_y2"],
color="red",
size=outline)
return image
|
import sys
if __name__ == "__main__":
ops = [l.strip() for l in sys.stdin]
SIGNAL = [1, 1]
for op in ops:
if op.startswith("noop"):
SIGNAL.append(SIGNAL[-1])
else:
_, v = op.split()
SIGNAL.append(SIGNAL[-1])
SIGNAL.append(SIGNAL[-1] + int(v))
DISPLAY = [[" " for _ in range(40)] for __ in range(6)]
for i in range(240):
scanline = i // 40
center = i - scanline * 40
if abs(SIGNAL[i + 1] - center) <= 1:
DISPLAY[scanline][center] = "#"
for scanline in DISPLAY:
print("".join(scanline))
|
import os
def main():
for count, filename in enumerate(os.listdir("out/itemTrendDetail")):
data = str(filename).decode()
print(data)
if __name__ == '__main__':
main() |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
def Postorder_Traverse(root):
ret = []
stack = []
while True:
# the same as in order
while now:
stack.append([now, 'L'])
now = now.left
# because when it was 'R', the poping continues
continuel = True
while continuel and stack:
now, tag = stack.pop()
if tag == 'L':
stack.append([now, 'R'])
now = now.right
continuel = False
break
else:
ret.append(now) |
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 2 18:36:46 2019
@author: Thomas
"""
import os
os.chdir('C:\\Users\\Thomas\\Documents\\Uni_masters\\Masterpraktikum')
from CustomDataset import CustomDataset
from CNN import SimpleCNN
import numpy as np
import pickle
import torch
from torch.utils.data import DataLoader
import time
import sklearn.metrics as metrics
from CreatePlots import create_plts
import torch as nn
timer = time.time()
splits = 5
num_classes = 4
#--------------- parameterize hyperparameters ---------------
nn.manual_seed(10)
root = 'C:\\Users\\Thomas\\Documents\\Uni_masters\\MasterPrak_Data\\'
params = {'batch_size': 128,
'shuffle': True,
'num_workers': 0}
all_params_list = [{'batch_size': 50,
'shuffle': True,
'num_workers': 0},{'batch_size': 100,
'shuffle': True,
'num_workers': 0},{'batch_size': 150,
'shuffle': True,
'num_workers': 0},{'batch_size': 200,
'shuffle': True,
'num_workers': 0},{'batch_size': 250,
'shuffle': True,
'num_workers': 0}]
num_epochs = 21
learning_rate = 1e-3
weights = [7.554062537062119e-07,
1.2681182393446364e-05,
6.0745960393633824e-05,
3.161855376735068e-05] # 1/occurence(type of residue) see calcClassimbalance
dev = torch.device('cuda')
#dev = torch.device('cpu')
class_weights = torch.FloatTensor(weights).to(dev) # only for Cross entropy loss
printafterepoch = 8
#--------------- Disable/Enable the addition of a crf ---------------
no_crf = False
#--------------- Cross Validation ---------------
cross_validation = False
benchmarked_cross_validation, benchplots = True,False
#--------------- Parameterize grid search here ---------------
gridsearch = False
all_num_epochs = [33,34,35,36,37]
all_learning_rate = (1e-3, 1e-4, 5e-4 ,1e-5)
#--------------- Benchmark ---------------
benchmark = False
normal_run = False
#---Selected split to benchmark/validate upon (0-4, !must not be the same!)---
selected_split = 1
benchmark_split = 0
# =============================================================================
# Main functions
# =============================================================================
def cross_validate():
# do crossvalidation
params_list = []
labels = []
predictions = []
print('Starting cross-validation...')
for split in range(splits):
out = main(split, benchmark_split, False)
params_list.append(out)
return params_list, labels, predictions
def cross_benchmark():
params_list = []
labels = []
predictions = []
labels_pre = []
predictions_pre =[]
print('Starting cross-validation...')
for split in range(splits):
benchmark_split = split
out = main(split, benchmark_split)
out = main(split, benchmark_split, True)
params_list.append(out)
labels += out[8][0]
predictions += out[8][1]
labels_pre += out[9][0]
predictions_pre += out[9][1]
create_plts(params_list, cross_validation, False, split, root, learning_rate,
num_epochs,benchmark_crossvalid = True,labels = labels, predictions = predictions,
labels_pre= labels_pre, predictions_pre= predictions_pre )
return params_list, labels, predictions
def main(split,benchmark_split, benchmark = False):
# create data folders if non-existent
if not os.path.isdir(root + 'Pictures'):
os.mkdir(root + 'Pictures')
elif not os.path.isdir(root + 'pickled_files'):
os.mkdir(root + 'pickled_files')
if benchmark:
print('Benchmarkset is:', benchmark_split)
try:
model = torch.load(root + 'model.pickle')
print('Your existing model will be benchmarked')
bench_data, bench_labels, bench_orga = de_serializeBenchmark(benchmark_split)
bench_dataset = CustomDataset(bench_data, bench_labels, bench_orga)
bench_loader = DataLoader(bench_dataset, **params)
acc, true_mcc, loss_ave, cm , mcc_orga, cm_orga, label_predicted_batch = validate(bench_loader, model, dev)
label_predicted_batch_pre = label_predicted_batch
print('Confusion matrix is:\n', cm)
mcc_res_post, mcc_glob_post, mcc_glob_pre, cs_pre, cs_post, csreldiff_pre, csreldiff_post = createcompfile(root,label_predicted_batch,benchmark_split, true_mcc)
if benchplots:
create_plts(cm, cross_validation, benchmark, benchmark_split, root, learning_rate, num_epochs, mcc_orga = mcc_orga, cm_orga = cm_orga)
out_params = [true_mcc, mcc_res_post, mcc_glob_pre, mcc_glob_post, cs_pre, cs_post, csreldiff_pre, csreldiff_post, label_predicted_batch, label_predicted_batch_pre, mcc_orga, cm_orga , cm]
return out_params
except:
print('No model found for benchmarking! Start a new run with benchmark = False!')
else:
print('Validationset is:', split, 'Benchmarkset is:', benchmark_split)
# train on the GPU or on the CPU, if a GPU is not available
train_data, train_labels, validation_data, validation_labels, info, train_orga, validation_orga = de_serializeInput(split, benchmark_split)
train_dataset = CustomDataset(train_data, train_labels, train_orga)
validation_dataset = CustomDataset(validation_data, validation_labels, validation_orga)
train_loader = DataLoader(train_dataset, **params)
validation_loader = DataLoader(validation_dataset, **params)
model = SimpleCNN(num_classes)
model = model.to(dev)
model, out_params, label_predicted_batch = train(model, train_loader, validation_loader,
num_epochs, learning_rate, dev)
if normal_run and not cross_validation and not gridsearch:
create_plts(out_params, cross_validation, benchmark ,split, root, learning_rate, num_epochs)
torch.save(model, root + 'model.pickle')
return out_params
# =============================================================================
# Load / preprocess data
# =============================================================================
def de_serializeInput(validation_split, benchmark_split):
all_features, info = loaddata('signalP4.npz', 'train_set.fasta')
train_data,train_labels,train_orga = [],[],[]
try:
print('Loading pickled files...')
for split in range(splits):
split_data = pickle.load(open(root+"pickled_files\\split_"+str(split)+"_data.pickle", "rb"))
split_labels = pickle.load(open(root+"pickled_files\\split_"+str(split)+"_labels.pickle", "rb"))
split_orga = pickle.load(open(root+"pickled_files\\split_"+str(split)+"_orga.pickle", "rb"))
if split == validation_split:
validation_data,validation_labels, validation_orga = split_data, split_labels, split_orga
else:
train_data.extend(split_data)
train_labels.extend(split_labels)
train_orga.extend(split_orga)
print('Done!')
except (OSError, IOError):
print('Pickled files not found!\nCreating new train/validation dataset...')
for split in range(splits):
split_keys = selectTestTrainSplit(info,split)
split_data, split_labels, split_orga = createDataVectors(info,all_features,split_keys)
pickle.dump(split_data, open( root+"pickled_files\\split_"+str(split)+"_data.pickle", "wb" ))
pickle.dump(split_labels, open( root+"pickled_files\\split_"+str(split)+"_labels.pickle", "wb" ))
pickle.dump(split_orga, open( root+"pickled_files\\split_"+str(split)+"_orga.pickle", "wb" ))
if split == validation_split:
validation_data,validation_labels, validation_orga = split_data, split_labels, split_orga
else:
train_data.extend(split_data)
train_labels.extend(split_labels)
train_orga.extend(split_orga)
print('Saved and Done!')
return train_data,train_labels,validation_data,validation_labels,info, train_orga, validation_orga
def de_serializeBenchmark(bench_split):
all_features, info = loaddata('signalP4.npz', 'benchmark_set.fasta')
try:
print('Loading pickled benchmark files...')
bench_data = pickle.load(open(root+"pickled_files\\bench_"+str(bench_split)+"_data.pickle", "rb"))
bench_labels = pickle.load(open(root+"pickled_files\\bench_"+str(bench_split)+"_labels.pickle", "rb"))
bench_orga = pickle.load(open(root+"pickled_files\\bench_"+str(bench_split)+"_orga.pickle", "rb"))
print('Done!')
except (OSError, IOError):
print('Pickled files not found!\nCreating new benchmark dataset...')
for split in range(splits):
split_keys = selectTestTrainSplit(info,split)
split_data, split_labels, split_orga = createDataVectors(info,all_features,split_keys)
pickle.dump(split_data, open( root+"pickled_files\\bench_"+str(split)+"_data.pickle", "wb" ))
pickle.dump(split_labels, open( root+"pickled_files\\bench_"+str(split)+"_labels.pickle", "wb" ))
pickle.dump(split_orga, open( root+"pickled_files\\bench_"+str(split)+"_orga.pickle", "wb" ))
if split == bench_split:
bench_data, bench_labels, bench_orga = split_data, split_labels, split_orga
print('Saved and Done!')
return bench_data, bench_labels, bench_orga
def loaddata (data_name , training_name):
train_data = open(root+training_name, 'r')
train_data = train_data.read().split('\n')
tmp = np.load(root+data_name)
info = {}
header = train_data[0].split('|')[0].replace('>','')
org = train_data[0].split('|')[1]
signalp = train_data[0].split('|')[2]
partition = train_data[0].split('|')[3]
seq = train_data[1]
sig = train_data[2]
# sigbin = list(map(int,sig.replace('I','0').replace('M','1').replace('O','2')
# .replace('S','3').replace('T','4').replace('L','5')))
sigbin = list(map(int,sig.replace('I','0').replace('M','0').replace('O','0')
.replace('S','1').replace('T','2').replace('L','3')))
count = 3
for x in range(int((len(train_data)-4)/3)):
lenprot = 70
if (len(seq) == lenprot):
info[header] = [signalp, partition,seq,sig,sigbin,lenprot,org]
else:
# padding of proteins < 70 aminoacids
lenprot = len(seq)
[sigbin.append(-100) for x in range (70 - lenprot)]
info[header] = [signalp, partition,seq,sig,sigbin,lenprot,org]
seq = train_data[count+1]
sig = train_data[count+2]
# sigbin = list(map(int,sig.replace('I','0').replace('M','1').replace('O','2')
# .replace('S','3').replace('T','4').replace('L','5')))
sigbin = list(map(int,sig.replace('I','0').replace('M','0').replace('O','0')
.replace('S','1').replace('T','2').replace('L','3')))
header = train_data[count].split('|')[0].replace('>','')
org = train_data[count].split('|')[1]
signalp = train_data[count].split('|')[2]
partition = train_data[count].split('|')[3]
count += 3
# remove invalid Proteinidentifiers (which changed over time)
for e in (set(list(info.keys()))-set(tmp.files)):
info.pop(e)
return tmp, info
def createDataVectors(info, all_features, keys):
data = []
label = []
orga = []
for key in keys:
lenprot = info[key][5]
label.append(info[key][4])
orga.append(info[key][6])
if lenprot < 70:
feat = all_features[key][:lenprot]
result = np.zeros([70,1024])
result[:feat.shape[0], :feat.shape[1]] = feat
data.append(result)
else:
data.append(all_features[key][:70])
return data, label, orga
def selectTestTrainSplit(train_data,x):
split = [key for (key, value) in train_data.items() if value[1] == str(x)]
return split
def createcompfile(root, label_predicted_batch,split, mcc_pre):
k,j = 0, 0
f = open(root+"comparison"+str(split)+".txt","w+")
mcc_glob_pre = calcGlobMCC(label_predicted_batch)
csdiff_pre, csreldiff_pre = csdiff(label_predicted_batch)
gaps, mixed, label_predicted_batch, org_pred = postProcess(label_predicted_batch)
mcc_glob_post = calcGlobMCC(label_predicted_batch)
csdiff_post, csreldiff_post = csdiff(label_predicted_batch)
mcc_post, cm, acc = calcResMCC(label_predicted_batch)
f.write("Mean residue cleavage residue deviation of predicted to true label before postprocessing: " + str(round(csdiff_pre,3)) + " and after " + str(round(csdiff_post,3)) +
"\nGlobulal signal peptide MCC before post-processing: " + str(round(mcc_glob_pre,3)) + " and after: " + str(round(mcc_glob_post,3)) +
"\nResidue MCC before post-processing: " + str(round(mcc_pre,3)) + " and after: " + str(round(mcc_post,3)) +
"\nGaps have been found at protein predictions: "+ str(gaps) + " and have been post-processed" +
"\nMixed Signal peptide predictions have been found at: "+ str(mixed) + " and have been post-processed\n")
for i in range(len(label_predicted_batch[0])):
f.write("Protein "+ str(i)+ "\n")
f.write("True labels: " + str(label_predicted_batch[0][i].tolist()) + "\n")
if i in gaps:
f.write("Orig pred labels: " + str(org_pred[0][j].tolist()) + "\n")
j += 1
if i in mixed:
f.write("Orig pred labels: " + str(org_pred[1][k].tolist()) + "\n")
k += 1
f.write("Predicted labels: " + str(label_predicted_batch[1][i].astype(int).tolist()) + "\n")
f.close()
return mcc_post, mcc_glob_post, mcc_glob_pre, csdiff_pre, csdiff_post, csreldiff_pre, csreldiff_post
def postProcess(label_predicted_batch):
gaps, mixed, org_pred = [], [], [[], []]
gapstr,mixedstr = [],[]
for i in range (len(label_predicted_batch[0])):
truth, prediction = label_predicted_batch[0][i], label_predicted_batch[1][i]
gap, mixedtype, prediction = processPrediction(prediction,org_pred)
gapsi,mixedsi,_ = processPrediction(truth,[[],[]])
if gap:
gaps.append(i)
if gapsi:
gapstr.append(i)
if mixedsi:
mixedstr.append(i)
if mixedtype:
mixed.append(i)
label_predicted_batch[1][i] = prediction
print('The prediction contains gaps at : ' + str(gaps))
print('The true labels contain gaps at : ' + str(gapstr))
print('The prediction contains mixed SP types at : ' + str(mixed))
print('The true labels contain mixed SP types at : ' + str(mixedstr))
return gaps, mixed, label_predicted_batch, org_pred
def processPrediction (prediction, org_pr):
gap = False
mixedtype = False
endnotNull = (prediction[69] != 0)
x = (prediction == 0)
x = np.where(x[:-1] != x[1:])[0]
if x.size != 0:
if x.size > 1 or endnotNull:
gap = True
org_pr[0].append(prediction.astype(int))
if np.unique(prediction).size > 2 and not gap:
mixedtype = True
org_pr[1].append(prediction.astype(int))
if endnotNull:
gapstart = x[0]+1
if prediction[x[0]] == 0 and x.size > 1 : gapstart = x[1]+1
prediction[gapstart:] = 0
x = (prediction == 0)
x = np.where(x[:-1] != x[1:])[0]
if x.size > 1 or np.unique(prediction).size > 2:
gap_end = x[x.size-1]+1
most_common_residue = np.bincount(prediction[:gap_end].astype(int)).argmax()
prediction[:gap_end] = most_common_residue
return gap, mixedtype, prediction
def calcGlobMCC(label_predicted_batch):
x = [label[0] for label in label_predicted_batch[0]]
y = [label[0] for label in label_predicted_batch[1]]
mcc = metrics.matthews_corrcoef(x, y)
return mcc
def csdiff(label_predicted_batch):
csreldiff = []
csdiff = 0
label = label_predicted_batch[0]
prediction = label_predicted_batch[1]
for x in range (len(label_predicted_batch[0])):
csreldiff.append(label[x][label[x] != 0].size - prediction[x][prediction[x] != 0].size)
csdiff += abs(label[x][label[x] != 0].size - prediction[x][prediction[x] != 0].size)
csdiff = csdiff/len(label_predicted_batch[0])
return csdiff, csreldiff
def calcResMCC(label_predicted_batch):
x = [list(label) for label in label_predicted_batch[0]]
y = [list(label) for label in label_predicted_batch[1]]
mcc, cm, acc = calcMCCbatch(x,y)
return mcc,cm, acc
def calcClassImbalance(info):
# calculate class imbalance of the dataset
# for 6 classes: counts = [0,0,0,0,0,0]
counts = [0,0,0,0]
for x in info:
classes = info[x][3]
counts[0] = counts[0] + classes.count('I')
counts[0] = counts[0] + classes.count('M')
counts[0] = counts[0] + classes.count('O')
counts[1] = counts[1] + classes.count('S')
counts[2] = counts[2] + classes.count('T')
counts[3] = counts[3] + classes.count('L')
counts = [1/x for x in counts]
return counts
# =============================================================================
# Functions for training/validation
# =============================================================================
def orgaBatch (labels, predicted, orga, predicted_batch, labels_batch, label_predicted_batch):
#to do: apply in validate
predicted, labels = predicted.to('cpu').numpy(), labels.to('cpu').numpy()
for x in range(len(labels)):
if orga[x] == 'ARCHAEA':
predicted_batch[0].extend(predicted[x])
labels_batch[0].extend(labels[x])
elif orga[x] == 'EUKARYA':
predicted_batch[1].extend(predicted[x])
labels_batch[1].extend(labels[x])
elif orga[x] == 'NEGATIVE':
predicted_batch[2].extend(predicted[x])
labels_batch[2].extend(labels[x])
elif orga[x] == 'POSITIVE':
predicted_batch[3].extend(predicted[x])
labels_batch[3].extend(labels[x])
label_predicted_batch[0].append(labels[x])
label_predicted_batch[1].append(predicted[x])
return predicted_batch, labels_batch, label_predicted_batch
def calcMCCbatch (labels_batch, predicted_batch):
# calculate MCC over given batches of an epoch in training/validation
x = sum(predicted_batch, [])
y = sum(labels_batch,[])
mcc = metrics.matthews_corrcoef(x, y)
cm = metrics.confusion_matrix(x, y, [0, 1, 2, 3]) #[0, 1, 2, 3, 4, 5])
acc = metrics.accuracy_score(x,y)
return mcc,cm,acc
def calcMCCorga(labels_batch, predicted_batch):
# calculate MCC over given batches of an epoch in training/validation
# [0]:Archea, [1]:Eukaryot, [2]:Gram negative, [3]:Gram positive
mcc_list, cm_list = [],[]
for x in range(len(labels_batch)):
mcc = metrics.matthews_corrcoef(predicted_batch[x], labels_batch[x])
cm = metrics.confusion_matrix(predicted_batch[x], labels_batch[x], [0, 1, 2, 3]) #[0, 1, 2, 3, 4, 5])
mcc_list.append(mcc)
cm_list.append(cm)
return mcc_list, cm_list
def train(model, train_loader, validation_loader, num_epochs, learning_rate, dev):
print('Starting to learn...')
total_step = len(train_loader)
predicted_batch = [[],[],[],[]] # [0]:Archea, [1]:Eukaryot, [2]:Gram negative, [3]:Gram positive
labels_batch= [[],[],[],[]]
label_predicted_batch = [[],[]]
out_params = []
criterion = torch.nn.CrossEntropyLoss(weight = class_weights, ignore_index = -100, reduction = 'mean')
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
for epoch in range(num_epochs):
loss_train_list = []
correct = 0
total = 0
for i, (train, labels, mask, orga) in enumerate(train_loader):
# Run the forward pass
train, labels, mask = train.to(dev), labels.to(dev), mask.to(dev)
outputs = model(train.unsqueeze(3))
outputs = outputs.squeeze_()
if no_crf:
loss = criterion(outputs, labels)
else:
loss = criterion(outputs, labels)
outputs = outputs.permute(2,0,1)
loss = -model.crf(outputs, labels.permute(1,0), mask = mask.permute(1,0))+loss
# Backprop and perform Adam optimisation
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Track the accuracy, mcc and cm
if (epoch%printafterepoch) == 0:
if no_crf:
_, predicted = torch.max(outputs.data, 1)
predicted = predicted.squeeze_()
correct += (predicted == labels).sum().item()
else:
predicted = nn.Tensor(model.crf.decode(outputs)).cuda()
correct += (predicted == labels.float()).sum().item()
total += labels.size(0)* labels.size(1)
predicted_batch, labels_batch, label_predicted_batch = orgaBatch(labels, predicted, orga, predicted_batch, labels_batch, label_predicted_batch)
loss_train_list.append(loss.item())
# and print the results
if (epoch%printafterepoch) == 0:
mcc_train, cm_train, a = calcMCCbatch(labels_batch, predicted_batch)
acc_train = (correct / total)*100
loss_ave = sum(loss_train_list)/len(loss_train_list)
print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}, Accuracy: {:.2f}%, MCC: {:.2f}'
.format(epoch+1, num_epochs, i + 1, total_step, loss_ave,
acc_train, mcc_train))
acc_valid, mcc_valid, loss_valid, cm_valid, mcc_orga, cm_orga, label_predicted_batch_val = validate(validation_loader, model, dev)
out_params.append((loss_valid, loss_ave, epoch, acc_valid, acc_train, mcc_valid, mcc_train , mcc_orga, cm_orga, cm_train, cm_valid))
# check overfitting
print('Best validation loss:', min(out_params)[0] ,' at epoch:', min(out_params)[2])
return model, out_params, label_predicted_batch
def validate(validation_loader, model, dev):
with torch.no_grad():
model.eval()
correct = 0
total = 0
predicted_batch = [[],[],[],[]] # [0]:Archea, [1]:Eukaryot, [2]:Gram negative, [3]:Gram positive
labels_batch= [[],[],[],[]]
label_predicted_batch = [[],[]]
loss_list = []
criterion = torch.nn.CrossEntropyLoss(weight = class_weights, ignore_index = -100, reduction = 'mean')
for validation, labels, mask, orga in validation_loader:
# preprocess outputs to correct format (1024*70*1)
validation, labels, mask = validation.to(dev), labels.to(dev), mask.to(dev)
outputs = model(validation.unsqueeze(3))
outputs.squeeze_()
if no_crf:
# use CrossEntropyloss minimalization
loss = criterion(outputs, labels)
_, predicted = torch.max(outputs.data, 1)
correct += (predicted == labels).sum().item()
else:
# apply conditional random field and decode via Vertibri algorithm
loss = criterion(outputs, labels)
outputs = outputs.permute(2,0,1)
loss = -model.crf(outputs, labels.permute(1,0), mask = mask.permute(1,0))+loss
predicted = nn.Tensor(model.crf.decode(outputs)).cuda()
correct += (predicted == labels.float()).sum().item()
# calculate quality measurements
total = total + (labels.size(0) * labels.size(1))
result = ((correct / total) * 100)
predicted_batch, labels_batch, label_predicted_batch = orgaBatch(labels, predicted, orga, predicted_batch, labels_batch, label_predicted_batch)
loss_list.append(loss.item())
mcc, cm, a = calcMCCbatch(labels_batch, predicted_batch)
mcc_orga, cm_orga = calcMCCorga(labels_batch, predicted_batch)
loss_ave = sum(loss_list)/len(loss_list)
print('Accuracy of the model on the validation proteins is: {:.2f}%, Loss:{:.3f} and MCC is: {:.2f}'.format(result,loss_ave,mcc))
return result, mcc, loss_ave, cm, mcc_orga, cm_orga, label_predicted_batch
# =============================================================================
# Execute when running script
# =============================================================================
if __name__ == "__main__":
if selected_split == benchmark_split and benchmark:
try: raise SystemExit
except: print('Benchmark and validation split cannot be the same when doing a normal run with benchmarking because of continous biased evaluation.')
if cross_validation and not normal_run and not gridsearch:
print("Starting normal cross-validation run...")
out1, labels, predictions = cross_validate()
create_plts(out1, cross_validation, False, selected_split, root, learning_rate, num_epochs)
else: print('Disable normal run and gridsearch to do simple cross validation!')
if gridsearch :
if cross_validation:
print ("Disable cross validation to do gridsearch.")
else:
cross_valid_params = []
print("Starting gridsearch... This can take up to a day or two...")
# for y in range(len(all_learning_rate)):
# learning_rate = all_learning_rate[y]
# out = cross_validate()
# cross_valid_params.append(out)
learning_rate = 1e-3
for y in range(len(all_num_epochs)):
num_epochs = all_num_epochs[y]
params = all_params_list[y]
out = cross_validate()
cross_valid_params.append(out)
create_plts(out, cross_validation, False, selected_split, root, learning_rate, num_epochs)
if normal_run:
cross_validation, gridsearch = False, False
out = main(selected_split, benchmark_split)
if benchmark:
out = main(0, benchmark_split, benchmark = True)
if benchmarked_cross_validation:
out, labels, predictions = cross_benchmark()
print("Runtime: ", time.time() - timer) |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-20 21:45
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=12)),
],
),
migrations.CreateModel(
name='Membership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gold_member', models.BooleanField(default=False)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Group')),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=12)),
],
),
migrations.AddField(
model_name='membership',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Person'),
),
migrations.AddField(
model_name='group',
name='people',
field=models.ManyToManyField(through='tests.Membership', to='tests.Person'),
),
]
|
import unittest
class Solution:
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
letter_to_ind = {}
l = 0
ans = 0
for r in range(len(s)):
if s[r] in letter_to_ind:
l = max(l, letter_to_ind[s[r]])
ans = max(ans, r - l + 1)
letter_to_ind[s[r]] = r + 1
return ans
class CaseCheck(unittest.TestCase):
def testEmpty(self):
s = Solution()
actual = s.lengthOfLongestSubstring('')
expected = 0
self.assertEqual(actual, expected)
def testSimple0(self):
s = Solution()
actual = s.lengthOfLongestSubstring('c')
expected = 1
self.assertEqual(actual, expected)
def testSimple1(self):
s = Solution()
actual = s.lengthOfLongestSubstring('abcabcbb')
expected = 3
self.assertEqual(actual, expected)
def testSimple2(self):
s = Solution()
actual = s.lengthOfLongestSubstring('bbbbb')
expected = 1
self.assertEqual(actual, expected)
def testSimple3(self):
s = Solution()
actual = s.lengthOfLongestSubstring('pwwkew')
expected = 3
self.assertEqual(actual, expected)
def testSimple4(self):
s = Solution()
actual = s.lengthOfLongestSubstring('abba')
expected = 2
self.assertEqual(actual, expected)
if __name__ == "__main__":
unittest.main()
|
# coding: utf-8
# 导入数据集mnist
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("./../data/MNIST/", one_hot=True)
import tensorflow as tf
import os
INPUT_NODE = 784
OUTPUT_NODE = 10
IMAGE_SIZE = 28
NUM_CHANNELS = 1
NUM_LABEL = 10
# 第一层卷积层的尺寸和深度
CONV1_DEEP = 32
CONV1_SIZE = 5
# 第二层卷积层的尺寸和深度
CONV2_DEEP = 64
CONV2_SIZE = 5
# 全连接层的结点个数
FC_SIZE = 512
# 定义卷积神经网络的前向传播过程。这里添加了一个新的参数train,用于区分训练过程和测试过程。在这个程序中将用到dropout方法,
# dropout方法可进一步提升模型的可靠性并防止过拟合,dropout过程只在训练时使用
def inference(input_tensor, regularizer, train=True, ):
# 声明第一层卷积层的变量并实现前向传播过程。通过使用不同命名空间来隔离不同层的变量,让每一层中的变量命名只需要考虑在当前层的作用,
# 不需担心重命名的问题。第一层输出为28×28×32的张量
input_tensor = tf.reshape(input_tensor, [-1, 28, 28, 1])
with tf.variable_scope('layer1-conv1'):
conv1_weights = tf.get_variable('weight', [CONV1_SIZE, CONV1_SIZE, NUM_CHANNELS, CONV1_DEEP],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv1_biases = tf.get_variable('bias', [CONV1_DEEP], initializer=tf.constant_initializer(0.0))
conv1 = tf.nn.conv2d(input_tensor, conv1_weights, strides=[1, 1, 1, 1], padding='SAME')
relu1 = tf.nn.relu(tf.nn.bias_add(conv1, conv1_biases))
with tf.name_scope('layer2-pool1'):
pool1 = tf.nn.max_pool(relu1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
with tf.variable_scope('layer3-conv2'):
conv2_weights = tf.get_variable('weight', [CONV2_SIZE, CONV2_SIZE, CONV1_DEEP, CONV2_DEEP],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv2_biases = tf.get_variable('bias', [CONV2_DEEP], initializer=tf.constant_initializer(0.0))
conv2 = tf.nn.conv2d(pool1, conv2_weights, strides=[1, 1, 1, 1], padding='SAME')
relu2 = tf.nn.relu(tf.nn.bias_add(conv2, conv2_biases))
with tf.name_scope('layer4-pool2'):
pool2 = tf.nn.max_pool(relu2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
pool_shape = pool2.get_shape().as_list()
nodes = pool_shape[1] * pool_shape[2] * pool_shape[3]
reshaped = tf.reshape(pool2, [pool_shape[0], nodes])
with tf.variable_scope('layer5-fc1'):
fc1_weights = tf.get_variable('weight', [nodes, FC_SIZE],
initializer=tf.truncated_normal_initializer(stddev=0.1))
if regularizer != None:
tf.add_to_collection('losses', regularizer(fc1_weights))
fc1_biases = tf.get_variable('bias', [FC_SIZE], initializer=tf.constant_initializer(0.0))
fc1 = tf.nn.relu(tf.matmul(reshaped, fc1_weights) + fc1_biases)
fc1 = tf.nn.dropout(fc1, 0.5)
with tf.variable_scope('layer6-fc2'):
fc2_weights = tf.get_variable('weight', [FC_SIZE, NUM_LABEL],
initializer=tf.truncated_normal_initializer(stddev=0.1))
if regularizer != None:
tf.add_to_collection('losses', regularizer(fc2_weights))
fc2_biases = tf.get_variable('bias', [NUM_LABEL], initializer=tf.constant_initializer(0.0))
logit = tf.nn.matmul(fc1, fc2_weights) + fc2_biases
return logit
BATCH_SIZE = 100
LEARNING_RATE_BASE = 0.8
LEARNING_RATE_DECAY = 0.99
REGULARAZTION_RATE = 0.0001
TRAINING_STEPS = 30000
MOVING_AVERAGE_DECAY = 0.99
MODEL_SAVE_PATH = './model'
MODEL_NAME = 'LeNet5.ckpt'
x = tf.placeholder(tf.float32, [None, INPUT_NODE], name="x-input")
y_ = tf.placeholder(tf.float32, [None, OUTPUT_NODE], name="y-input")
regularizer = tf.contrib.layers.l2_regularizer(REGULARAZTION_RATE)
y = inference(x, regularizer=regularizer)
global_step = tf.Variable(0, trainable=False)
variable_averages = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY, global_step)
variable_averages_op = variable_averages.apply(tf.trainable_variables())
cross_entropy_mean = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1)))
learning_rate = tf.train.exponential_decay(learning_rate=LEARNING_RATE_BASE,
global_step=global_step,
decay_steps=mnist.train.num_examples / BATCH_SIZE,
decay_rate=LEARNING_RATE_DECAY)
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy_mean, global_step=global_step)
with tf.control_dependencies([train_step, variable_averages_op]):
train_op = tf.no_op(name='train')
saver = tf.train.Saver()
#模型训练
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(TRAINING_STEPS):
xs, ys = mnist.train.next_batch(BATCH_SIZE)
_, loss_value, step = sess.run([train_op, cross_entropy_mean, global_step], feed_dict={x: xs, y_: ys})
if i % 1000 == 0:
print("After %d training step(s), loss on training batch is %g." % (step, loss_value))
saver.save(sess, os.path.join(MODEL_SAVE_PATH, MODEL_NAME), global_step=global_step)
#预测结果
with tf.Graph().as_default() as g:
x = tf.placeholder(tf.float32, [None, INPUT_NODE], name='x_eval_input')
y_ = tf.placeholder(tf.float32, [None, OUTPUT_NODE], name='y_eval_input')
y = inference(x, None)
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
saver = tf.train.Saver(tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY).variables_to_restore())
with tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(MODEL_SAVE_PATH)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
global_step = ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1]
accuracy_score = sess.run(accuracy, feed_dict={x: mnist.validation.images, y_: mnist.validation.labels})
print("After %s training step(s), loss on training batch is %g." % (global_step, accuracy_score))
else:
print("no checkpoint file found")
|
import peri0
import time
def touchsound():
buzzer = peri0.Buzzer()
buzzer.set_tempo(180)
buzzer.tone(4,"MI",1/8)
def opensound():
buzzer = peri0.Buzzer()
buzzer.set_tempo(120)
opensound = ((4,"DO",1/4), (4,"MI",1/4),(4,"SOL",1/4))
buzzer.play(opensound)
def closesound():
buzzer = peri0.Buzzer()
buzzer.set_tempo(120)
closesound = ((4,"SOL",1/4), (4,"MI",1/4), (4,"DO",1/4))
buzzer.play(closesound)
def callsound():
buzzer = peri0.Buzzer()
buzzer.set_tempo(120)
callsound = ((4,"SI",1/4), (4,"MI",1/4), (4,"SI",1/4), (4,"MI",1/4))
buzzer.play(callsound)
def errorsound():
buzzer = peri0.Buzzer()
buzzer.set_tempo(120)
errorsound = ((2,"SI",1/8), (2,"SI",1/8), (2,"SI",1/8))
buzzer.play(errorsound)
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from vdgnn.units import DynamicRNN
class DiscriminativeDecoder(nn.Module):
def __init__(self, args, encoder):
super(DiscriminativeDecoder, self).__init__()
self.args = args
# share word embedding
self.word_embed = encoder.word_embed
self.embed_size = args.embed_size
self.rnn_hidden_size = args.rnn_hidden_size
self.num_layers = args.num_layers
# share embedding lstm
self.option_rnn = encoder.node_rnn
self.log_softmax = nn.LogSoftmax(dim=1)
def init_weights(self, init_type='kaiming'):
self.similarity_score.init_weights(init_type=init_type)
def forward(self, enc_out, batch):
options = batch['opt']
options_len = batch['opt_len']
# word embed options
batch_size, num_rounds, num_options, max_opt_len = options.size()
# options = options.view(batch_size * num_rounds, num_options, max_opt_len)
options_len = options_len.view(-1, num_options)
# batch_size, num_options, max_opt_len = options.size()
options = options.contiguous().view(-1, num_options * max_opt_len)
options = self.word_embed(options)
options = options.view(-1, num_options, max_opt_len, self.embed_size)
# score each option
scores = []
for opt_id in range(num_options):
opt = options[:, opt_id, :, :]
opt_len = options_len[:, opt_id]
opt_embed = self.option_rnn(opt, opt_len)
scores.append(torch.sum(opt_embed * enc_out, 1))
# return scores
scores = torch.stack(scores, 1)
# print(scores.size())
log_probs = self.log_softmax(scores)
return log_probs
|
import re
import psutil
import misc
import socket
from mylogger import iotlogger
logger = iotlogger(loggername="DevStatus")
def handle_exception(function):
def wrapper_function(*args, **kwargs):
try:
return function(*args, **kwargs)
except Exception as e:
logger.error("Error with " + str(function.func_name) + ":" + str(e), exc_info=True)
return {}
return wrapper_function
@handle_exception
def cpuStats():
logger.debug('Obtaining cpustats')
cpu_load = {'cpu_load': psutil.cpu_percent(percpu=False)}
return cpu_load
def stats():
cpu_stats = cpuStats()
status_ping = {}
status_ping.update(cpu_stats)
return status_ping
|
# Generated by Django 2.2.3 on 2019-10-24 17:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Background',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('main_background', models.ImageField(upload_to='background', verbose_name='Фон главной страницы')),
],
options={
'verbose_name': 'Интерфейс',
'verbose_name_plural': 'Интерфейсы',
},
),
]
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello'
@app.route('/about')
def about():
return 'Built by Chris Grant'
if __name__ == '__main__':
app.debug = True
app.run('localhost', port=3000)
|
from mod1 import City
mayor = City(10000)
mayor.gradual_peace(5000) |
# start pt2 8:55 - paused 9:30
# unpaused 13:30 - solved pt2 13:50
for noun in range(100):
for verb in range(100):
filepath = 'input_2019-2.txt'
with open(filepath) as fp:
xintcode = [int(x) for x in fp.readline().split(",")]
# print( noun, verb )
xintcode[1] = noun
xintcode[2] = verb
result = calcPt1(xintcode) #IndexError: list assignment index out of range
if result[0] == 19690720:
print( (100*noun)+verb )
break
|
import scipy as sp
import OpenPNM
import pytest
def test_find_connected_pores():
pn = OpenPNM.Network.Cubic(shape=(10,10,10))
a = pn.find_connected_pores(throats=[0,1])
assert sp.all(a.flatten() == [0, 1, 1, 2])
a = pn.find_connected_pores(throats=[0,1], flatten=True)
assert sp.all(a == [0, 1, 2])
Tind = sp.zeros((pn.Nt,), dtype=bool)
Tind[[0,1]] = True
a = pn.find_connected_pores(throats=Tind, flatten=True)
assert sp.all(a == [0, 1, 2])
a = pn.find_connected_pores(throats=[], flatten=True)
assert sp.shape(a) == (0,2)
def test_find_neighbor_pores():
pn = OpenPNM.Network.Cubic(shape=(10,10,10))
a = pn.find_neighbor_pores(pores=[])
assert sp.size(a) == 0
Pind = sp.zeros((pn.Np,), dtype=bool)
Pind[[0,1]] = True
a = pn.find_neighbor_pores(pores=Pind)
assert sp.all(a == [2, 10, 11, 100, 101])
a = pn.find_neighbor_pores(pores=[0, 2], mode='union')
assert sp.all(a == [1, 3, 10, 12, 100, 102])
a = pn.find_neighbor_pores(pores=[0, 2], mode='intersection')
assert sp.all(a == [1])
a = pn.find_neighbor_pores(pores=[0, 2], mode='not_intersection')
assert sp.all(a == [3, 10, 12, 100, 102])
a = pn.find_neighbor_pores(pores=[0, 2],
mode='union',
excl_self=False)
assert sp.all(a == [ 0, 1, 2, 3, 10, 12, 100, 102])
a = pn.find_neighbor_pores(pores=[0, 2],
mode='intersection',
excl_self=False)
assert sp.all(a == [1])
a = pn.find_neighbor_pores(pores=[0, 2],
mode='not_intersection',
excl_self=False)
assert sp.all(a == [0, 2, 3, 10, 12, 100, 102])
def test_find_neighbor_throats():
pn = OpenPNM.Network.Cubic(shape=(10,10,10))
a = pn.find_neighbor_throats(pores=[])
assert sp.size(a) == 0
Pind = sp.zeros((pn.Np,), dtype=bool)
Pind[[0,1]] = True
a = pn.find_neighbor_throats(pores=Pind)
assert sp.all(a == [ 0, 1, 900, 901, 1800, 1801])
a = pn.find_neighbor_throats(pores=[0, 2], mode='union')
assert sp.all(a == [ 0, 1, 2, 900, 902, 1800, 1802])
a = pn.find_neighbor_throats(pores=[0, 2], mode='intersection')
assert sp.size(a) == 0
a = pn.find_neighbor_throats(pores=[0, 2], mode='not_intersection')
assert sp.all(a == [ 0, 1, 2, 900, 902, 1800, 1802])
def test_num_neighbors():
pn = OpenPNM.Network.Cubic(shape=(10,10,10))
a = pn.num_neighbors(pores=[])
assert sp.size(a) == 0
Pind = sp.zeros((pn.Np,), dtype=bool)
Pind[0] = True
a = pn.num_neighbors(pores=Pind)
assert a == 3
a = pn.num_neighbors(pores=[0,2], flatten=True)
assert a == 6
assert isinstance(a, int)
a = pn.num_neighbors(pores=[0,2], flatten=False)
assert sp.all(a == [3, 4])
a = pn.num_neighbors(pores=0, flatten=False)
assert sp.all(a = [3])
assert isinstance(a, sp.ndarray)
def test_find_interface_throats():
pn = OpenPNM.Network.Cubic(shape=(3,3,3))
pn['pore.domain1'] = False
pn['pore.domain2'] = False
pn['pore.domain3'] = False
pn['pore.domain1'][[0, 1, 2]] = True
pn['pore.domain2'][[5, 6, 7]] = True
pn['pore.domain3'][18:26] = True
a = pn.find_interface_throats(labels=['domain1', 'domain2'])
assert a == [20]
a = pn.find_interface_throats(labels=['domain1', 'domain3'])
assert sp.size(a) == 0
|
import cv2
import numpy as np
from time import sleep
width_min=80 #MIN WIDHT
height_min=80 #min height
offset=6
pos_line=550 #LINE POSITION
delay= 60 # VIDEO FPS
detect = []
cars= 0 # NO of CARS
def takes_center(x, y, w, h): # FRAME CENTER
x1 = int(w / 2)
y1 = int(h / 2)
cx = x + x1
cy = y + y1
return cx,cy
cap = cv2.VideoCapture('video.mp4') #Importing Video
subraction = cv2.bgsegm.createBackgroundSubtractorMOG() #Subraction Creation
while True:
ret , frame1 = cap.read() # read frames from video
temp = float(1/delay)
sleep(temp)
grey = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY) # converts frame to GREY Scale
blur = cv2.GaussianBlur(grey,(3,3),5) # converts gaussian blur
img_sub = subraction.apply(blur)
dilate = cv2.dilate(img_sub,np.ones((5,5))) #apply morphological filter to image
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5)) #Ellipse morphing image
detected = cv2.morphologyEx (dilate, cv2. MORPH_CLOSE , kernel)
detected = cv2.morphologyEx (detected, cv2. MORPH_CLOSE , kernel)
contour,h=cv2.findContours(detected,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) #find Contours in frame
cv2.line(frame1, (25, pos_line), (1200, pos_line), (255,127,0), 3) #draw lines on the frames
for(i,c) in enumerate(contour):
(x,y,w,h) = cv2.boundingRect(c) #used to draw rect on the frame ROI
valid_contour = (w >= width_min) and (h >= height_min) #rect valid only if it is in the frame
if not valid_contour: #Not valid if it is outside
continue
cv2.rectangle(frame1,(x,y),(x+w,y+h),(0,255,0),2) #drawing rectangle on ROI
centre = takes_center(x, y, w, h) #while crossing center
detect.append(centre) #Appending detect list
cv2.circle(frame1, centre, 4, (0, 0,255), -1)
for (x,y) in detect: #if cars crossing the line
if y<(pos_line+offset) and y>(pos_line-offset):
cars+=1 # add Count
cv2.line(frame1, (25, pos_line), (1200, pos_line), (0,127,255), 3) #draw line in frame
detect.remove((x,y)) # remove rect after crossing the line
print("car is detected : "+str(cars))
cv2.putText(frame1, "VEHICLE COUNT : "+str(cars), (450, 70), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255),5)
cv2.imshow("Video Original" , frame1) #DIsplay Video and COUNT
cv2.imshow("Detected",detected)
if cv2.waitKey(1) == 27:
break
cv2.destroyAllWindows()
cap.release() |
#! /usr/bin/python
class HelloWorld():
def __init__(self):
self.words = ['Beijing','Chongqing','Shanghai']
self.capitals = ['Beijing','WashtionDC','Berlin']
def printword(self):
for w in self.words:
if w == 'Beijing':
print 'Beijing is the capital of China'
else:
for c in self.capitals:
if c == 'WashtionDC':
print 'this is the capital of America'
if __name__ == '__main__':
h = HelloWorld()
h.printword() |
import Functions.datafunctions as df
import Functions.vasicek_loop as vl
import numpy as np
import scipy.stats as stats
import matplotlib.pyplot as plt
delinq_data = df.get_data()
last_pd = {}
score_b = delinq_data["CREDIT_BUCKET"].unique()[delinq_data["CREDIT_BUCKET"].unique() != "No Score"]
for c in score_b:
delinq_vect_old = (delinq_data.loc[np.logical_and(delinq_data["CREDIT_BUCKET"] == c, np.logical_and(delinq_data["new_loan"] == 0, delinq_data["cur_qtr"] == "2020-Q1")),["DELINQ_IND"]]).to_numpy()
delinq_vect_new = (delinq_data.loc[np.logical_and(delinq_data["CREDIT_BUCKET"] == c, np.logical_and(delinq_data["new_loan"] == 1, delinq_data["cur_qtr"] == "2020-Q1")),["DELINQ_IND"]]).to_numpy()
#print(c,np.sum(delinq_vect_new),len(delinq_vect_new))
try:
last_pd[c] = {"Old Loan":np.sum(delinq_vect_new) / len(delinq_vect_new), "New Loan":np.sum(delinq_vect_old) / len(delinq_vect_old)}
except:
print(c)
curr = delinq_data.loc[np.logical_and(delinq_data["cur_qtr"] == "2020-Q1", delinq_data["CREDIT_BUCKET"] != "No Score"),:]
curr = curr.reset_index()
tot_volume = np.sum(curr["CURR_UPB"].to_numpy())
loan_weight = np.zeros(len(curr.index))
loan_corr = np.zeros(len(curr.index)) + 0.15
loan_pd = np.zeros(len(curr.index))
for index, row in curr.iterrows():
loan_weight[index] = row["CURR_UPB"] / tot_volume
c_bucket = row["CREDIT_BUCKET"]
new_ind = "New Loan"
if row["new_loan"] == 0:
new_ind = "Old Loan"
loan_pd[index] = last_pd[c_bucket][new_ind]
res = vl.Copula_Loop(loan_weight, loan_corr, loan_pd, lambda x: stats.norm.rvs(size = x), lambda x: stats.norm.ppf(x), lambda x: stats.norm.cdf(x), 100)
plt.figure(figsize=(20,15))
plt.hist(res, bins = 5000, density = True)
plt.title("Gaussian Distribution of Losses")
textstr = r"Mean = {}".format(np.round(np.mean(res)*100,3)) + \
"\n" + \
r'$99\%$ Var = {}'.format(np.round(res[int(np.floor(len(res) * 0.01))] * 100,3)) + \
"\n Expected Shortfall = {}".format(np.round(np.mean(res[0:int(np.floor(len(res) * 0.01))])*100, 3)) + \
"\n Standard Error = {}".format(np.round(stats.sem(res, axis = None), 5))
#Style
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
#Append the Var and ES
plt.text((plt.xlim()[1] - plt.xlim()[0]) * 0.05 + plt.xlim()[0], plt.ylim()[1] - (plt.ylim()[1] - plt.ylim()[0])*0.05, textstr, fontsize=14,verticalalignment='top', bbox=props)
#Save and Clear
plt.savefig(r"Plots\Gaussian_Distribution.png", dpi = 600)
plt.cla()
res = vl.Copula_Loop(loan_weight, loan_corr, loan_pd, lambda x: stats.t.rvs(size = x, df = 2), lambda x: stats.t.ppf(x, df = 2), lambda x: stats.t.cdf(x, df = 2), 100)
plt.figure(figsize=(20,15))
plt.hist(res, bins = 5000, density = True)
plt.title("Student Distribution of Losses")
textstr = r"Mean = {}".format(np.round(np.mean(res)*100,3)) + \
"\n" + \
r'$99\%$ Var = {}'.format(np.round(res[int(np.floor(len(res) * 0.01))] * 100,3)) + \
"\n Expected Shortfall = {}".format(np.round(np.mean(res[0:int(np.floor(len(res) * 0.01))])*100, 3)) + \
"\n Standard Error = {}".format(np.round(stats.sem(res, axis = None), 5))
#Style
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
#Append the Var and ES
plt.text((plt.xlim()[1] - plt.xlim()[0]) * 0.05 + plt.xlim()[0], plt.ylim()[1] - (plt.ylim()[1] - plt.ylim()[0])*0.05, textstr, fontsize=14,verticalalignment='top', bbox=props)
#Save and Clear
plt.savefig(r"Plots\Student_Distribution.png", dpi = 600) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 1 17:13:02 2020
@author: baxcruiser
"""
n=int(input())
ar=list(map(int,input().strip.split()))
pairs = 0
for element in set(ar):
pairs += ar.count(element) // 2
print(pairs)
|
import os
class Node:
"""Data structure for creating n-ary trees.
"""
def __init__(self, word, index=1, parent=None):
self.index = index
self.word = word
self.parent = parent
self.children = []
def add_child(self, word):
if self.has_child(word):
return None
index = len(self.children)+1
node = Node(word, index, self)
self.children.append(node)
return node
def has_child(self, word):
return self.get_child(word) is not None
def get_child(self, word):
for child in self.children:
if child.word == word:
return child
def is_leaf(self):
return not self.children
def get_path(self):
indices = []
node = self.parent
# if current node is root
if not node:
return ['1']
while(node is not None):
indices.append(str(node.index))
node = node.parent
return indices[::-1]
def get_path_string(self, depth=None):
path = self.get_path()
if depth is not None:
length = depth - len(path)
path += ['0']*length
return self.word + " "+" ".join(path)
class Tree:
"""Tree Datastructure for creating word-hypernym trees.
"""
def __init__(self):
self.root = Node('*root*')
self.depth = 1
self.words = set()
def add_hypernym_path(self, ordered_path, embedded_words, ignore_duplicates):
"""Adds a hypernym path of a word.
:param ordered_path: ordered list of parents of a word/synset, starting from root
:param embedded_words: set containing word-embeddings
:param ignore_duplicates: True if duplicate nodes with different hypernym paths should be ignored, else False
"""
node = self.root
current_len = 0
for synset in ordered_path[1:]:
current_len += 1
child = synset.__str__()[7:-1]
#avoids nodes with multiple word-compositions
if len(child.split()) > 1:
break
elif child.split('.')[0] not in embedded_words:
break
elif node.has_child(child):
node = node.get_child(child)
elif ignore_duplicates and child in self.words:
break
else:
self.words.add(child)
node = node.add_child(child)
if current_len > self.depth:
self.depth = len(ordered_path)
def write_parent_location_code(self, outputfile):
"""Writes parent locations of all words into a file.
:param outputfile: file to write into
"""
if os.path.isfile(outputfile):
return
def traverse(node, file):
code = node.get_path_string(self.depth)
file.write(code+"\n")
for child in node.children:
traverse(child, file)
node = self.root
with open(outputfile, 'w') as file:
traverse(node, file)
def write_tree(self, outputfile):
"""Writes the elements of the tree into a file.
The structure of the output follows the breadth-first-search approach.
:param outputfile: the file to write into
"""
def traverse(node, visited, file):
code = node.get_path_string()
if node.is_leaf():
if not code in visited:
file.write(node.word+"\n")
visited.add(code)
else:
if not code in visited:
file.write(node.word +" "+" ".join(map(lambda n: n.word, node.children)))
file.write('\n')
visited.add(code)
for child in node.children:
traverse(child, visited, file)
visited = {'0'}
node = self.root
with open(outputfile, 'w') as file:
traverse(node, visited, file) |
print("Welcome to hello world! \n\n")
|
import requests
from lxml import etree
from urllib import request
import os
import re
import threading
from queue import Queue
class Producer(threading.Thread):
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36'
}
def __init__(self, page_que, img_que, *args, **kwargs):
super(Producer, self).__init__(*args, **kwargs)
self.page_que = page_que
self.img_que = img_que
def run(self):
while True:
if self.page_que.empty():
break
url = self.page_que.get()
self.parse_page(url)
def parse_page(self, url):
response = requests.get(url, headers=self.headers)
text = response.text
html = etree.HTML(text)
imgs = html.xpath("//div[@class='page-content text-center']//img[@class!='gif']")
for img in imgs:
img_url = img.get('data-original')
alt = img.get('alt')
alt = re.sub(r'[\??\.,。!!\*]', '', alt)
suffix = os.path.splitext(img_url)[1]
filename = alt + suffix
self.img_que.put((img_url, filename))
class Consumer(threading.Thread):
def __init__(self, page_que, img_que, *args, **kwargs):
super(Consumer, self).__init__(*args, **kwargs)
self.page_que = page_que
self.img_que = img_que
def run(self):
while True:
if self.img_que.empty() and self.page_que.empty():
break
img_url, filename = self.img_que.get()
request.urlretrieve(img_url, 'images/' + filename)
print(filename + ' 下载完成!')
def main():
page_que = Queue(100)
img_que = Queue(1000)
for x in range(1, 101):
url = 'https://www.doutula.com/article/list/?page=%d' % x
page_que.put(url)
for x in range(5):
t = Producer(page_que, img_que)
t.start()
for x in range(5):
t = Consumer(page_que, img_que)
t.start()
if __name__ == '__main__':
main()
|
import solve
data = [
[5, [2, 1, 2, 6, 2, 4, 3, 3], [3,4,2,1,5]],
[4, [4,4,4,4,4], [4,1,2,3]]
]
def test(N, stages, res):
ans = solve.solution(N, stages)
assert ans == res
for d in data:
test(d[0], d[1], d[2]) |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Gusseppe Bravo <gbravor@uni.pe>
# License: BSD 3 clause
"""
This module provides the logic of the whole project.
"""
import define
#import analyze
import prepare
import feature_selection
import evaluate
import time
import os
from pyspark.ml import Pipeline
from pyspark.sql import SparkSession
#from pyspark import SparkContext, SparkConf
try:
# spark.stop()
pass
except:
pass
#name = "datasets/buses_10000_filtered.csv"
name = "hdfs://King:9000/user/bdata/buses_10000_filtered.csv"
response = "tiempoRecorrido"
spark_session = SparkSession.builder \
.master('spark://King:7077') \
.appName("Sparkmach") \
.config("spark.driver.allowMultipleContexts", "true")\
.getOrCreate()
# conf = SparkConf()\
# .setMaster("local")\
# .setAppName("sparkmach")\
# .set("spark.driver.allowMultipleContexts", "true")
#sparkContext = SparkContext(conf=conf)
currentDir = os.getcwd()
spark_session.sparkContext.addPyFile(currentDir + "/define.py")
#spark_session.sparkContext.addPyFile("/home/vagrant/tesis/sparkmach/sparkmach/sparkmach/analyze.py")
spark_session.sparkContext.addPyFile(currentDir + "/prepare.py")
spark_session.sparkContext.addPyFile(currentDir + "/feature_selection.py")
spark_session.sparkContext.addPyFile(currentDir + "/evaluate.py")
# STEP 0: Define workflow parameters
definer = define.Define(spark_session, data_path=name, response=response).pipeline()
# STEP 1: Analyze data by ploting it
# analyze.Analyze(definer).pipeline()
# STEP 2: Prepare data by scaling, normalizing, etc.
preparer = prepare.Prepare(definer).pipeline()
#STEP 3: Feature selection
featurer = feature_selection.FeatureSelection(definer).pipeline()
#STEP4: Evalute the algorithms by using the pipelines
evaluator = evaluate.Evaluate(definer, preparer, featurer).pipeline()
# start = time.time()
# result = main()
# end = time.time()
# print()
# print("Execution time for all the steps: ", end-start)
|
#!/usr/bin/python3
class Point:
""" Create a new Point, at coordinates x, y """
def __init__(self, x=0, y=0):
""" Create a new point at x, y """
self.x = x
self.y = y
def distance_from_origin(self):
""" Compute my distance from the origin """
return ((self.x ** 2) + (self.y ** 2)) ** 0.5
def slope_from_origin(self):
""" Computes the slope of a line connection the origin and the point """
return self.y / self.x
def get_line_to(self, point):
""" Gets a line to a point given """
b = ((self.x - point.x) * point.y - (self.y - point.y) * point.x) / (self.x - point.x)
a = (self.y - point.y) / (self.x - point.x)
return a, b
class Rectangle:
""" A class to manufacture rectangle objects """
def __init__(self, posn, w, h):
""" Initialize rectangle at posn, with width w, height h """
self.corner = posn
self.width = w
self.height = h
def __str__(self):
return "({0}, {1}, {2})".format(self.corner, self.width, self.height)
def get_points(self):
""" This function returns all the point of a rectangle """
corner2 = Point(self.corner.x + self.width, self.corner.y)
corner3 = Point(self.corner.x, self.corner.y + self.height)
corner4 = Point(self.corner.x + self.width, self.corner.y + self.height)
return self.corner, corner2, corner3, corner4
def __is_interior_point(self, point):
""" This function determines if a point is interior to this rectangle """
inX = False
inY = False
if (point.x >= self.corner.x) and (point.x <= self.corner.x + self.width):
inX = True
if (point.y >= self.corner.y) and (point.y <= self.corner.y + self.height):
inY = True
return inX and inY
def is_colliding(self, rectangle):
""" This functions determines if a rectangle is colliding to other rectangle"""
for point in rectangle.get_points():
if self.__is_interior_point(point):
return True
return False
rect1 = Rectangle(Point(0, 0), 2, 3)
rect2 = Rectangle(Point(0, 2), 2, 3)
print(rect1.is_colliding(rect2))
print(rect2.is_colliding(rect1))
|
import sys
import multiprocessing
import threading
import ipyparallel
import numpy as np
from time import time
import pickle
import argparse
def timer(fn):
"""Timing decorator"""
def timed(*args, **kwargs):
start = time()
result = fn(*args, **kwargs)
end = time()
print(fn.__name__, args, end-start)
return result, end-start
return timed
def dart(*args, **kwargs):
"""Throw a dart; if in the circle, return 1"""
x, y = np.random.rand(), np.random.rand()
if np.sqrt((x-0.5)**2 + (y-0.5)**2) <= 0.5:
return 1
else:
return 0
@timer
def pi_serial(n_tot):
"""Throw n_tot darts serially"""
n_in = sum([dart() for i in range(n_tot)])
pi_approx = 4 * n_in / n_tot
return pi_approx
@timer
def pi_proc(n_tot, n_procs=4):
"""
Throw n_tot darts and distribute the work among n_procs
"""
pool = multiprocessing.Pool(processes=n_procs)
darts = pool.map(dart, range(n_tot))
n_in = sum(darts)
pi_approx = 4 * n_in / n_tot
pool.terminate()
del pool
return pi_approx
@timer
def pi_cluster(n_tot):
darts = c[:].map(dart, range(n_tot))
n_in = sum(darts)
pi_approx = 4 * n_in / n_tot
return pi_approx
def do_experiment(fn, n_tot_range, reps=3, **kw):
"""Repeat a function 5 times and report the mean and std dev of the exec times"""
times = []
for i in range(reps):
times.append([fn(n_tot, **kw)[1] for n_tot in n_tot_range])
times = np.array(times)
return np.mean(times, axis=0), np.std(times, axis=0)
def do_all(nmax=7, save='./experiment_times.pkl', reps=3):
"""Do all of the experiments for the plot and save the data to file"""
if nmax == None:
nmax = 7
if reps == None:
reps = 3
n_tot_range = [int(i) for i in np.logspace(1, nmax, (nmax*2)-1)]
info, d = {}, {}
info['n_tot'] = n_tot_range
for k in ['serial', 'multiprocess (2 procs)', 'multiprocess (4 procs)', 'cluster (4 cores)']: info[k] = {}
d['mean_time'], d['std_time'] = do_experiment(pi_serial, n_tot_range, reps=reps)
info['serial'] = d
d = {}
d['mean_time'], d['std_time'] = do_experiment(pi_proc, n_tot_range, reps=reps, n_procs=2)
info['multiprocess (2 procs)'] = d
d = {}
d['mean_time'], d['std_time'] = do_experiment(pi_proc, n_tot_range, reps=reps, n_procs=4)
info['multiprocess (4 procs)'] = d
d = {}
d['mean_time'], d['std_time'] = do_experiment(pi_cluster, n_tot_range, reps=reps)
info['cluster (4 cores)'] = d
if save is not None:
# Write to file
pickle.dump(info, open(save, 'wb'))
return info
def make_plot(fname='./experiment_times.pkl', save=None):
"""Make a plot with the data from the saved experiment"""
try:
info = pickle.load(open(fname, 'rb'))
except:
print('No experiment saved under', fname)
sys.exit()
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('white')
sns.set_context('poster')
palette = sns.color_palette('Set2')
fig, ax1 = plt.subplots(figsize=(12, 9))
ax2 = ax1.twinx()
n_tot_range = info['n_tot']
lines = []
for k in ['serial', 'multiprocess (2 procs)', 'multiprocess (4 procs)', 'cluster (4 cores)']:
v = info[k]
color = palette.pop(0)
lines += ax1.plot(n_tot_range, v['mean_time'], '-', label=k, color=color, linewidth=2)
ax1.fill_between(n_tot_range, v['mean_time']-v['std_time'], v['mean_time']+v['std_time'], alpha=0.5, color=color)
ax2.plot(n_tot_range, n_tot_range/v['mean_time'], '--', color=color, linewidth=2)
ax1.set_xscale('log')
ax1.set_yscale('log')
ax2.set_yscale('log')
labels = [l.get_label() for l in lines]
leg = ax2.legend(lines, labels, loc='lower right')
ax1.set_xlabel('Number of darts')
ax1.set_ylabel('Execution time [s] (solid)')
ax2.set_ylabel('Simulation rate [darts/s] (dotted)')
ax1.set_title('MacBook Air w/ 1.3 GHz Core i5 (2 cores)')
if save is not None:
plt.savefig(save, bbox_inches='tight')
else:
plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Test parallel computing methods while calculating pi')
parser.add_argument('--doall', action='store_true', help='Run all tests and save a pickle file of the results')
parser.add_argument('-n', dest='nmax', type=int, default=7, nargs='?', help='log10(max number of darts to throw). Default: 7')
parser.add_argument('-r', dest='reps', type=int, default=3, nargs='?', help='Number of repitiions of each experiment (for error). Default: 3')
parser.add_argument('-s', dest='savetmp', type=str, default='experiment_times.pkl', nargs='?', help='Pickle file name for timing data. Default: experiment_times.pkl')
parser.add_argument('-o', dest='output', type=str, help='Save figure path. Default: parallel.pdf')
args = parser.parse_args()
if args.doall:
# Initialize IPyParallel client
c = ipyparallel.Client()
# Do experiments
do_all(nmax=args.nmax, save=args.savetmp, reps=args.reps)
make_plot(fname=args.savetmp, save=args.output)
|
class Solution(object):
def isPowerOfThree(self, n):
"""
:type n: int
:rtype: bool
"""
if n == 0: return False
while n % 3 == 0:
n /= 3
return n == 1
def isPowerOfThreeR(self, n):
"""
:type n: int
:rtype: bool
"""
if n <= 0: return False
if n == 1: return True
return n % 3 == 0 and self.isPowerOfThreeR(n / 3)
def isPowerOfThree(self, n):
return n > 0 == 3**19 % n
if __name__ == '__main__':
test = Solution()
print test.isPowerOfThree(1) |
import argparse
import base64
import json
import sys
import functools
import requests
def generate_json_data(image_filename, output_filename):
"""Translates the input file into a json output file.
Args:
input_file: a file object, containing lines of input to convert.
output_filename: the name of the file to output the json to.
"""
request_list = []
DETECTION_TYPES = [
'FACE_DETECTION:10',
'CROP_HINTS:10',
'LOGO_DETECTION:10',
'LABEL_DETECTION:10',
'TEXT_DETECTION:10',
'WEB_DETECTION:10'
]
features = "4:10"
with open(image_filename, 'rb') as image_file:
content_json_obj = {
'content': base64.b64encode(image_file.read()).decode('UTF-8')
}
feature_json_obj = []
for detectionType in DETECTION_TYPES:
feature, max_results = detectionType.split(':', 1)
feature_json_obj.append({
'type': feature,
'maxResults': int(max_results),
})
request_list.append({
'features': feature_json_obj,
'image': content_json_obj,
})
with open(output_filename, 'w') as output_file:
json.dump({'requests': request_list}, output_file)
def get_google_analysis(image_filename):
# Will give a response in the form of a dictionary with
# ["crop"] = approximate bounding box (x,y,width,height) if not found
# ["items"] = possible names of the item
# ["best_guess"] = Hopefully string readible title for the object
output_filename = "google_image_data.json"
generate_json_data(image_filename, output_filename)
data = open(output_filename, 'rb').read()
response = requests.post(url='https://vision.googleapis.com/v1/images:annotate?key=AIzaSyBgalC41vkCLty97Je2bmgd9nXH8GeIyJA', data=data, headers={'Content-Type': 'application/json'})
response_json = response.json()["responses"][0]
results = {}
if "cropHintsAnnotation" in response_json:
vertices = response_json["cropHintsAnnotation"]["cropHints"][0]["boundingPoly"]["vertices"]
print("Raw Cropping Vertices --- {}".format(vertices))
x_vars = []
y_vars = []
for vertex_pair in vertices:
x_vars.append(vertex_pair.get("x", 0))
y_vars.append(vertex_pair.get("y", 0))
results["crop"] = (min(x_vars), min(y_vars), max(x_vars) - min(x_vars),max(y_vars) - min(y_vars))
else:
results["crop"] = (0,0,0,0)
best_guess = ""
items = set()
if "labelAnnotations" in response_json:
entities = list(functools.reduce(lambda a, b: a + b ,map(lambda x: x.get("description").lower().split(" "), filter(lambda d: "description" in d, response_json["labelAnnotations"]))))
items.update(entities)
best_guess = response_json["labelAnnotations"][0]["description"]
if "webDetection" in response_json:
if "webEntities" in response_json["webDetection"]:
entities = list(functools.reduce(lambda a, b: a + b ,map(lambda x: x.get("description").lower().split(" "), filter(lambda d: "description" in d, response_json["webDetection"]["webEntities"]))))
items.update(entities)
if "bestGuessLabels" in response_json["webDetection"]:
best_guess = response_json["webDetection"]["bestGuessLabels"][0]["label"]
results["items"] = list(items)
results["best_guess"] = best_guess
return results
# result = get_google_analysis("water-bottle.jpg")
# print(result)
|
class DuplicateHandlerError(Exception):
""" Raised when a handler with a duplicate id or shortcode exists. """
class InvalidStateChange(Exception):
""" Raised when an invalid state change is executed (e.g. closing an open
ticket without the intermediary 'pending' step).
"""
|
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
from products.models import Product
from . import models
class ProductInline(admin.StackedInline):
model = Product
fields = ('id', 'name', 'price')
extra = 0
show_change_link = True
class OrderResource(resources.ModelResource):
class Meta:
model = models.Order
fields = ('id', 'customer_name', 'customer_city', 'customer_country', 'longitude', 'latitude',)
export_order = fields
@admin.register(models.Order)
class OrderAdmin(ImportExportModelAdmin):
list_display = ('id', 'customer_name', 'customer_city', 'customer_country', 'longitude', 'latitude',)
fields = ('id', 'customer_name', 'customer_city', 'customer_country', 'products', 'longitude', 'latitude',)
resource_class = OrderResource
|
import csv
import re
def get_category_list(category_string):
if not re.search('[a-zA-Z]', category_string):
return []
category_string.lower()
category_string.strip()
return re.split('\*',category_string)
def title_string_to_file_name(title_string):
title_string.strip()
title_string = title_string.replace("'", "")
title_string = title_string.replace('"',"")
title_string = title_string.replace('/',"")
title_string = title_string.replace('.',"")
title_string = title_string.replace(',',"")
title_string = title_string.replace('(',"")
title_string = title_string.replace(')',"")
title_string = re.sub("\s+", '-', title_string)
title_string = title_string.lower()
return title_string
def get_annotation_map():
annotation_map = {}
filename = 'imdb_annotation.csv'
# filename = 'test.csv'
with open(filename, encoding="ISO-8859-1") as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
header = True
for row in readCSV:
if header:
header = False
else:
#Remove all apostrophes, \, .
title_string = row[0]
file_name = title_string_to_file_name(title_string)
category_list = get_category_list(row[2])
annotation_map[file_name] = category_list
csvfile.close()
return annotation_map
# annotation_map = get_annotation_map()
# for a in annotation_map:
# print(a)
# print(annotation_map[a])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from plugins.plugin import Plugin
from components.constants import *
class Dilatacao(Plugin):
def __init__(self):
self.x = 0
self.y = 0
self.w = 0
self.h = 0
pass
def set_properties(self, data):
#Escrever o XML
self.x = data["0"]
self.y = data["1"]
self.w = data["2"]
self.h = data["3"]
print "Este é o dilatação" + str(data)
def get_properties(self):
#Ler do XML
return {"0":{"name": "X",
"type": HARPIA_INT,
"lower":0,
"upper":10,
"step" :1,
"value":self.x},
"1":{"name": "Y",
"type": HARPIA_INT,
"lower":0,
"upper":10,
"step" :1,
"value":self.y},
"2":{"name":"Width",
"type": HARPIA_INT,
"lower":0,
"upper":10,
"step" :1,
"value":self.w},
"3": {"name":"Height",
"type": HARPIA_INT,
"lower":0,
"upper":10,
"step" :1,
"value":self.h}
}
def getHelp(self):
return "Operação dilatenta profunda cartesiana e poliritmica.\n Rulez the world!"
|
import sys, time, json, time, random, string
from argparse import ArgumentParser
from datetime import datetime
from kafka import KafkaProducer
def randomname(n):
return ''.join(random.choices(string.ascii_letters + string.digits, k=n))
def get_option(topic, num):
argparser = ArgumentParser(description='This script generates JSON for Kafka stream with multiple topic')
argparser.add_argument('-t', '--topic',
default=topic,
help='''
Specify name of topic. Topic Name will be topic1,topic2... \n
Default: topic\n
''')
argparser.add_argument('-n', '--num', type=int,
default=num,
help='''
Specify number of topic.\n
Default: None\n
''')
return argparser.parse_args()
def main():
TOPIC_NAME = 'topic'
NUM_OF_TOPIC = 1
args = get_option(TOPIC_NAME, NUM_OF_TOPIC)
t = str(args.topic)
n = args.num + 1
producer = KafkaProducer(bootstrap_servers='localhost:9092')
while True:
if n is 1:
topic = t
else:
topic = t + str(random.randrange(1, n, 1))
id = random.randrange(1000)
utime = int(time.time())
word = randomname(5)
val1 = randomname(15)
val2 = random.choice(('hoge', 'fuga', 'piyo'))
value = {
"id" : id,
"utime" : utime,
"word": word,
"val1": val1,
"val2": val2
}
msg = json.dumps(value)
producer.send(topic, msg.encode("utf-8"))
producer.flush()
print(str(topic) + "::" + str(msg))
time.sleep(3)
if __name__ == '__main__':
main()
|
import os
import joblib
import pandas as pd
import statsmodels.api as sm
class Modeler_Price:
def __init__(self):
self.df = pd.read_csv('D:/MY DATA\Desktop/DB/Proposal/new Senior/Models Deployment/All Models/modeler/Price_Deployment_Data.csv')
try: self.model = joblib.load('models/price.model')
except: self.model = None
def fit(self):
X = self.df.drop('price', axis=1)
Y = self.df['price']
X = sm.add_constant(X)
self.model = sm.OLS(Y, X).fit()
joblib.dump(self.model, 'models/price.model')
def predict(self, measurement):
if not os.path.exists('models/price.model'):
raise Exception('Model not trained yet. Fit the model first')
#if len(measurement[0]) != 7:
#raise Exception(f'Expected six parameter for predictions but got {measurement}')
prediction = self.model.predict(measurement)
return prediction[0]
|
N = int (input ())
M = int (input ())
print (N * M * (N + 1) * (M + 1) // 4) |
######saral que 4
# number=[50,40,23,70,56,12,5,10,7]
# i=0
# sum=number[i]
# length=len(number)
# while i<length:
# a=number[i]
# if a<sum:
# sum=a
# i=i+1
# print(a,"is second greater number") |
import zipfile,re
zf = zipfile.ZipFile("channel.zip","r")
num = 90052
comments = []
while True:
try:
num = int(re.findall('\d+',zf.read(str(num)+".txt"))[0])
except:
print zf.read(str(num)+".txt")
break
comments.append(zf.getinfo(str(num)+".txt").comment)
print "".join(comments)
comments = []
#collect the comments
for info in zf.infolist():
comments.append(info.comment)
print "".join(comments) |
###################################################
## By Dan Melacon, Jeff Ong, and Kat Sullivan
###################################################
import serial, sys, binascii
addresses={
'radio1': '0013A200409756B8',
'radio2': '0013A200409756BD',
'radio3': '0013A20040975703',
'radio4': '0013A200409756E2'
}
responseType = {
'00' : "OK",
'01' : "ERROR",
'02' : "INVALID COMMAND",
'03' : "INVALID PARAMETER"
}
def checksum(st):
bytes = [st[i:i+2] for i in range(0,len(st),2)]
checksum = sum([(int(x, 16)) for x in bytes[3:]])
checksum = hex(checksum)
# Keep only the lowest eight bits
lowest_eight_bits = [i for i in str(checksum[-2:])]
lowest_eight_bits = ''.join(lowest_eight_bits)
# Subtract this from 0xff
checksum = hex(int('0xff', 16) - int(lowest_eight_bits, 16))
return str(checksum)[2:]
def length(st):
bytes = [st[i:i+2] for i in range(0,len(st),2)]
length_hex = '{0:#0{1}x}'.format(len(bytes),6)
return length_hex[2:]
def messageToHex(message):
return binascii.hexlify(message)
class RemoteAT():
command_type = '1701'
def __init__(self, message, radio):
message = message.upper()
new_str = message[0].encode("hex") + message[1].encode("hex")
if len(message) == 3:
new_str += '0' + message[2]
else:
new_str += message[2:4]
self.message = new_str
try:
self.address = addresses[radio]
except KeyError:
self.address = radio
def update(self):
request = self.command_type + self.address + 'FFFE' + '01' + self.message
request = '7E' + length(request) + request
request = request + checksum(request)
self.frame = request.decode("hex")
def send(self, serial, response_length):
serial.write(self.frame)
try:
response = serial.read(response_length)
response = binascii.hexlify(response)
response = response[-4:-2]
return responseType[response]
except KeyError:
return 'Invalid response from remote radio'
#for version commands only
class RemoteAT2():
command_type = '1701'
def __init__(self, message, radio):
message = message.upper()
new_str = message[0].encode("hex") + message[1].encode("hex")
# if len(message) == 3:
# new_str += '0' + message[2]
# else:
# new_str += message[2:4]
self.message = new_str
try:
self.address = addresses[radio]
except KeyError:
self.address = radio
def update(self):
request = self.command_type + self.address + 'FFFE' + '01' + self.message
request = '7E' + length(request) + request
request = request + checksum(request)
self.frame = request.decode("hex")
def send(self, serial, response_length):
serial.write(self.frame)
try:
response = serial.read(response_length)
response = binascii.hexlify(response)
status = response[-8:-6]
data = response[-6:-2]
return [ responseType[status], data ]
except KeyError:
return 'Invalid response from remote radio'
class Transmit():
command_type = '1001'
def __init__(self, message, radio):
self.message = messageToHex(message)
try:
self.address = addresses[radio]
except KeyError:
self.address = radio
def update(self):
request = self.command_type + self.address + 'FFFE' + '0000' + self.message
request = '7E' + length(request) + request
request = request + checksum(request)
self.frame = request.decode("hex")
def send(self, serial, response_length):
serial.write(self.frame)
try:
response = serial.read(response_length)
response = binascii.hexlify(response)
response = response[-6:-4]
return responseType[response]
except KeyError:
return 'Invalid response from remote radio'
class ATCommand():
command_type = '0801'
parameter = True
def __init__(self, message):
message = message.upper()
new_str = message[0].encode("hex") + message[1].encode("hex")
if len(message) == 3:
new_str += '0' + message[2]
self.message = new_str
elif len(message) == 2:
self.parameter = False
self.message = new_str
else:
new_str += message[2:4]
self.message = new_str
def update(self):
request = self.command_type + self.message
request = '7E' + length(request) + request
request = request + checksum(request)
# print request
self.frame = request.decode("hex")
def send(self, serial, response_length):
serial.write(self.frame)
try:
if self.parameter:
response = serial.read(response_length)
response = binascii.hexlify(response)
response = response[-4:-2]
return responseType[response]
else:
response = serial.read(response_length)
response = binascii.hexlify(response)
status = response[-6:-4]
data = response[-4:-2]
data = int(data,16)
return [ responseType[status], data ]
except KeyError:
return 'Invalid response from remote radio'
class Distance():
command_type = '08015247'
def __init__(self, radio):
try:
self.address = addresses[radio]
except KeyError:
self.address = radio
def update(self):
request = self.command_type + self.address
request = '7E' + length(request) + request
request = request + checksum(request)
# print request
self.frame = request.decode("hex")
def send(self, serial, response_length):
serial.write(self.frame)
try:
response = serial.read(response_length)
response = binascii.hexlify(response)
status = response[-10:-8]
data = response[-8:-2]
return [ responseType[status], int(data,16) ]
except KeyError:
return 'Invalid response from remote radio'
class RemoteDistance():
command_type = '1701'
def __init__(self, radio, target):
message = '5247'+addresses[target]
self.message = message
try:
self.address = addresses[radio]
except KeyError:
self.address = radio
def update(self):
request = self.command_type + self.address + 'FFFE' + '01' + self.message
request = '7E' + length(request) + request
request = request + checksum(request)
# return request
self.frame = request.decode("hex")
def send(self, serial, response_length):
serial.write(self.frame)
try:
response = serial.read(response_length)
response = binascii.hexlify(response)
# return response
status = response[-10:-8]
data = response[-8:-2]
return [ responseType[status], int(data,16) ]
except KeyError:
return 'Invalid response from remote radio' |
#ex016.py:私有成员的访问
class A:
def __init__(self, value1=0, value2=0):
self._value1 = value1
self.__value2 = value2
def setValue(self, value1, value2):
self._value1 = value1
self.__value2 = value2
def show(self):
print(self._value1)
print(self.__value2)
|
import torch
import torch.nn as nn
import numpy as np
from edflow.util import retrieve
from iin.models.ae import FeatureLayer, DenseEncoderLayer, weights_init
class Distribution(object):
def __init__(self, value):
self.value = value
def sample(self):
return self.value
def mode(self):
return self.value
class Model(nn.Module):
def __init__(self, config):
super().__init__()
import torch.backends.cudnn as cudnn
cudnn.benchmark = True
n_down = retrieve(config, "Model/n_down")
z_dim = retrieve(config, "Model/z_dim")
in_size = retrieve(config, "Model/in_size")
z_dim = retrieve(config, "Model/z_dim")
bottleneck_size = in_size // 2**n_down
in_channels = retrieve(config, "Model/in_channels")
norm = retrieve(config, "Model/norm")
n_classes = retrieve(config, "n_classes")
self.feature_layers = nn.ModuleList()
self.feature_layers.append(FeatureLayer(0, in_channels=in_channels, norm=norm))
for scale in range(1, n_down):
self.feature_layers.append(FeatureLayer(scale, norm=norm))
self.dense_encode = DenseEncoderLayer(n_down, bottleneck_size, z_dim)
self.classifier = torch.nn.Linear(z_dim, n_classes)
self.apply(weights_init)
self.n_down = n_down
self.bottleneck_size = bottleneck_size
def forward(self, input):
h = self.encode(input).mode()
assert h.shape[2] == h.shape[3] == 1
h = h[:,:,0,0]
h = self.classifier(h)
return h
def encode(self, input):
h = input
for layer in self.feature_layers:
h = layer(h)
h = self.dense_encode(h)
return Distribution(h)
|
import keras
from keras import models, layers
from keras import backend
class CNN(models.Sequential):
def __init__(self, inputShape, numOfClass):
super().__init__()
self.add(layers.Conv2D(32, kernel_size = (3, 3),
activation = 'relu',
input_shape = inputShape))
self.add(layers.Conv2D(32, kernel_size = (3, 3),
activation = 'relu'))
self.add(layers.MaxPooling2D(pool_size = (2, 2)))
self.add(layers.Dropout(0.25))
self.add(layers.Flatten())
self.add(layers.Dense(128, activation = 'relu'))
self.add(layers.Dropout(0.5))
self.add(layers.Dense(numOfClass, activation = 'softmax'))
self.compile(loss=keras.losses.categorical_crossentropy,
optimizer='rmsprop',
metrics=['accuracy']) |
from django import forms
from django.contrib.auth.models import User
from .models import *
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, UsernameField
from django.contrib.admin.widgets import AdminDateWidget
from django.forms.fields import DateField
from django.forms import CharField, ModelMultipleChoiceField, ModelChoiceField
from tour.models import *
from activity.models import *
from training.models import *
class UserRegisterForm(UserCreationForm):
# username = UsernameField(
# widget = forms.TextInput(
# attrs={
# 'placeholder' : 'Username',
# 'id': 'username'
# # 'class' : 'form-control',
# }))
# password1 = forms.CharField(
# widget = forms.PasswordInput(
# attrs={
# 'placeholder' : 'Password',
# }))
# password2 = forms.CharField(
# widget = forms.PasswordInput(
# attrs={
# 'placeholder' : 'Confirm password',
# }))
class Meta:
model = User
fields = ['username', 'password1', 'password2']
class OrganizerRegisterForm(forms.ModelForm):
# email = forms.EmailField(
# widget = forms.EmailInput(
# attrs={
# 'placeholder' : 'Email',
# }))
# avatar = forms.ImageField(
# widget = forms.FileInput(
# attrs={
# 'class': 'input-file'
# }
# )
# )
class Meta:
model = Organizer
fields = ['name', 'email', 'descriptionen', 'type', 'about', 'website', 'facebook', 'instagram', 'adress', 'number1', 'number2', 'avatar', 'cover']
class LoginForm(AuthenticationForm):
# username = forms.CharField(widget=forms.TextInput(attrs={'class': 'login-txt'}))
# password = forms.CharField(widget=forms.TextInput(attrs={'class': 'login-psw'}))
class Meta:
model = User
fields = ['username', 'password']
class ProfileEditForm(forms.ModelForm):
class Meta:
model = Organizer
fields = ['name', 'email', 'type', 'about', 'adress', 'number1', 'number2', 'avatar']
class OrganizerImageRegisterForm(forms.ModelForm):
class Meta:
model = OrganizerImage
fields = ('image', )
class OrganizerTourForm(forms.ModelForm):
# type = ModelMultipleChoiceField(queryset=Type.objects.all(),required=False)
datefrom = forms.DateField(widget = forms.SelectDateWidget())
dateto = forms.DateField(widget = forms.SelectDateWidget())
class Meta:
model = Tour
fields = ['title', 'descriptionen', 'type', 'country', 'city', 'price', 'pricefor', 'currency', 'durationday', 'durationnight', 'datefrom', 'dateto', 'avatar', 'cover', 'guide', ]
class OrganizerTourDetailForm(forms.ModelForm):
class Meta:
model = TourDetailEN
fields = ('title', 'text', )
class OrganizerTourImageForm(forms.ModelForm):
class Meta:
model = TourImage
fields = ('image', )
class OrganizerTourScheduleForm(forms.ModelForm):
class Meta:
model = TourSchedule
fields = ('image', )
class OrganizerActivityForm(forms.ModelForm):
class Meta:
model = Activity
fields = ['title', 'descriptionen', 'type', 'country', 'city', 'price', 'pricefor', 'currency', 'durationday', 'durationnight', 'datefrom', 'dateto', 'avatar', 'cover', ]
class OrganizerActivityDetailForm(forms.ModelForm):
class Meta:
model = ActivityDetailEN
fields = ('title', 'text', )
class OrganizerActivityImageForm(forms.ModelForm):
class Meta:
model = ActivityImage
fields = ('image', )
class OrganizerActivityScheduleForm(forms.ModelForm):
class Meta:
model = ActivitySchedule
fields = ('image', )
class OrganizerTrainingForm(forms.ModelForm):
class Meta:
model = Training
fields = ['title', 'descriptionen', 'type', 'country', 'city', 'price', 'pricefor', 'currency', 'durationday', 'durationnight', 'datefrom', 'dateto', 'avatar', 'cover', ]
class OrganizerTrainingDetailForm(forms.ModelForm):
class Meta:
model = TrainingDetailEN
fields = ('title', 'text', )
class OrganizerTrainingImageForm(forms.ModelForm):
class Meta:
model = TrainingImage
fields = ('image', )
class OrganizerTrainingScheduleForm(forms.ModelForm):
class Meta:
model = TrainingSchedule
fields = ('image', )
# class ProfileEditForm(forms.ModelForm):
# class Meta:
# model = Organizer
# fields = ['name', 'email', 'descriptionen', 'type', 'about', 'website', 'facebook', 'instagram', 'adress', 'number1', 'number2', 'avatar', 'cover']
|
def do():
month = int(input('Введите номер месяца: '))
print('Решение через списки')
winter = [1, 2, 12]
spring = [3, 4, 5]
summer = [6, 7, 8]
autumn = [9, 10, 11]
if month in winter:
print('Зима')
elif month in spring:
print('Весна')
elif month in summer:
print('Лето')
elif month in autumn:
print('Осень')
else:
print('Похоже, что это не месяц')
print('Решение через словарь')
seasons = {'Зима': [1, 2, 12], 'Весна': [3, 4, 5], 'Лето': [6, 7, 8], 'Осень': [9, 10, 11]}
for key in seasons:
if month in seasons[key]:
print(key)
if __name__ == '__main__':
do() |
import cv2
import time
from invoke import run
cmd = "xdotool search --onlyvisible --class 'Chrome' windowfocus key 'space'"
cap = cv2.VideoCapture(0)
r_t = (70,200)
r_b = (200,370)
last_jump = time.time()
while (True):
_, frame = cap.read()
frame_copy = frame.copy()
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
cv2.rectangle(frame_copy, r_t,r_b, (0,255,0), 2)
cv2.imshow("Original_With_ROI", frame_copy)
roi = frame[200:370,70:200]
ret,thresh1 = cv2.threshold(roi,100,255,cv2.THRESH_BINARY)
thresh1 = 255 - thresh1;
im2, contours, hierarchy = cv2.findContours(thresh1,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(frame_copy, contours, -1, (0,255,0), 3)
if len(contours) != 0:
x,y,w,h = 0,0,0,0
c = max(contours, key = cv2.contourArea)
x,y,w,h = cv2.boundingRect(c)
diff = time.time() - last_jump
if(h*w < 12000) & (diff > 0.5) :
print("jump",diff)
run(cmd, hide=True, warn=True)
last_jump = time.time()
k = cv2.waitKey(33)
if k == 27:
cv2.destroyAllWindows()
break
|
r"""
Composition Statistics (:mod:`skbio.stats.composition`)
=======================================================
.. currentmodule:: skbio.stats.composition
This module provides functions for compositional data analysis.
Many 'omics datasets are inherently compositional - meaning that they
are best interpreted as proportions or percentages rather than
absolute counts.
Formally, :math:`x` is a composition if :math:`\sum_{i=0}^D x_{i} = c`
and :math:`x_{i} > 0`, :math:`1 \leq i \leq D` and :math:`c` is a real
valued constant and there are :math:`D` components for each
composition. In this module :math:`c=1`. Compositional data can be
analyzed using Aitchison geometry. [1]_
However, in this framework, standard real Euclidean operations such as
addition and multiplication no longer apply. Only operations such as
perturbation and power can be used to manipulate this data.
This module allows two styles of manipulation of compositional data.
Compositional data can be analyzed using perturbation and power
operations, which can be useful for simulation studies. The
alternative strategy is to transform compositional data into the real
space. Right now, the centre log ratio transform (clr) and
the isometric log ratio transform (ilr) [2]_ can be used to accomplish
this. This transform can be useful for performing standard statistical
tools such as parametric hypothesis testing, regressions and more.
The major caveat of using this framework is dealing with zeros. In
the Aitchison geometry, only compositions with nonzero components can
be considered. The multiplicative replacement technique [3]_ can be
used to substitute these zeros with small pseudocounts without
introducing major distortions to the data.
Functions
---------
.. autosummary::
:toctree: generated/
closure
multiplicative_replacement
perturb
perturb_inv
power
inner
clr
clr_inv
ilr
ilr_inv
centralize
References
----------
.. [1] V. Pawlowsky-Glahn. "Lecture Notes on Compositional Data Analysis"
.. [2] J. J. Egozcue. "Isometric Logratio Transformations for
Compositional Data Analysis"
.. [3] J. A. Martin-Fernandez. "Dealing With Zeros and Missing Values in
Compositional Data Sets Using Nonparametric Imputation"
Examples
--------
>>> import numpy as np
Consider a very simple environment with only 3 species. The species
in the environment are equally distributed and their proportions are
equivalent:
>>> otus = np.array([1./3, 1./3., 1./3])
Suppose that an antibiotic kills off half of the population for the
first two species, but doesn't harm the third species. Then the
perturbation vector would be as follows
>>> antibiotic = np.array([1./2, 1./2, 1])
And the resulting perturbation would be
>>> perturb(otus, antibiotic)
array([ 0.25, 0.25, 0.5 ])
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
import scipy.stats as ss
from skbio.diversity.alpha import lladser_pe, robbins
def closure(mat):
"""
Performs closure to ensure that all elements add up to 1.
Parameters
----------
mat : array_like
a matrix of proportions where
rows = compositions
columns = components
Returns
-------
array_like, np.float64
A matrix of proportions where all of the values
are nonzero and each composition (row) adds up to 1
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import closure
>>> X = np.array([[2, 2, 6], [4, 4, 2]])
>>> closure(X)
array([[ 0.2, 0.2, 0.6],
[ 0.4, 0.4, 0.2]])
"""
mat = np.atleast_2d(mat)
if np.any(mat < 0):
raise ValueError("Cannot have negative proportions")
if mat.ndim > 2:
raise ValueError("Input matrix can only have two dimensions or less")
mat = mat / mat.sum(axis=1, keepdims=True)
return mat.squeeze()
def multiplicative_replacement(mat, delta=None):
r"""Replace all zeros with small non-zero values
It uses the multiplicative replacement strategy [1]_ ,
replacing zeros with a small positive :math:`\delta`
and ensuring that the compositions still add up to 1.
Parameters
----------
mat: array_like
a matrix of proportions where
rows = compositions and
columns = components
delta: float, optional
a small number to be used to replace zeros
If delta is not specified, then the default delta is
:math:`\delta = \frac{1}{N^2}` where :math:`N`
is the number of components
Returns
-------
numpy.ndarray, np.float64
A matrix of proportions where all of the values
are nonzero and each composition (row) adds up to 1
References
----------
.. [1] J. A. Martin-Fernandez. "Dealing With Zeros and Missing Values in
Compositional Data Sets Using Nonparametric Imputation"
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import multiplicative_replacement
>>> X = np.array([[.2,.4,.4, 0],[0,.5,.5,0]])
>>> multiplicative_replacement(X)
array([[ 0.1875, 0.375 , 0.375 , 0.0625],
[ 0.0625, 0.4375, 0.4375, 0.0625]])
"""
mat = closure(mat)
z_mat = (mat == 0)
num_feats = mat.shape[-1]
tot = z_mat.sum(axis=-1, keepdims=True)
if delta is None:
delta = (1. / num_feats)**2
zcnts = 1 - tot * delta
mat = np.where(z_mat, delta, zcnts * mat)
return mat.squeeze()
def coverage_replacement(count_mat, uncovered_estimator=robbins):
r"""Replace all zeros with small non-zero values
using a coverage estimator
It uses the multiplicative replacement strategy [1]_ ,
replacing zeros with a small positive :math:`\delta`
and ensuring that the compositions still add up to 1.
However, :math:`\delta` is determined using a coverage
estimator such that all of the non-zero values add up
to the coverage probability
Parameters
----------
count_mat: array_like
a matrix of counts where
rows = samples and
columns = components
uncovered_estimator : function, optional
function to estimate the uncovered probability
Returns
-------
numpy.ndarray, np.float64
A matrix of proportions where all of the values
are nonzero and each composition (row) adds up to 1
"""
mat = closure(count_mat)
mat = np.atleast_2d(mat)
z_mat = (mat == 0)
tot = z_mat.sum(axis=-1)
def func(x):
up = uncovered_estimator(x)
if up >= 1:
return 1 - 0.999999 / x.sum()
elif up <= 0:
return 0.999999 / x.sum()
else:
return up
p_unobs = np.apply_along_axis(func,
-1, count_mat)
delta = np.zeros(len(p_unobs))
for i in range(len(p_unobs)):
if tot[i] == 0:
delta[i] = 0
else:
delta[i] = p_unobs[i] / tot[i]
# delta = p_unobs / tot
p_obs = 1 - p_unobs
p_obs = np.repeat(p_obs[np.newaxis, :],
mat.shape[-1], 0).T
delta = np.repeat(delta[np.newaxis, :],
mat.shape[-1], 0).T
rounded_zeros = np.multiply(z_mat, delta)
non_zeros = np.multiply(mat, p_obs)
mat = rounded_zeros + non_zeros
return mat.squeeze()
def perturb(x, y):
r"""
Performs the perturbation operation.
This operation is defined as
.. math::
x \oplus y = C[x_1 y_1, \ldots, x_D y_D]
:math:`C[x]` is the closure operation defined as
.. math::
C[x] = \left[\frac{x_1}{\sum_{i=1}^{D} x_i},\ldots,
\frac{x_D}{\sum_{i=1}^{D} x_i} \right]
for some :math:`D` dimensional real vector :math:`x` and
:math:`D` is the number of components for every composition.
Parameters
----------
x : array_like, float
a matrix of proportions where
rows = compositions and
columns = components
y : array_like, float
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray, np.float64
A matrix of proportions where all of the values
are nonzero and each composition (row) adds up to 1
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import perturb
>>> x = np.array([.1,.3,.4, .2])
>>> y = np.array([1./6,1./6,1./3,1./3])
>>> perturb(x,y)
array([ 0.0625, 0.1875, 0.5 , 0.25 ])
"""
x, y = closure(x), closure(y)
return closure(x * y)
def perturb_inv(x, y):
r"""
Performs the inverse perturbation operation.
This operation is defined as
.. math::
x \ominus y = C[x_1 y_1^{-1}, \ldots, x_D y_D^{-1}]
:math:`C[x]` is the closure operation defined as
.. math::
C[x] = \left[\frac{x_1}{\sum_{i=1}^{D} x_i},\ldots,
\frac{x_D}{\sum_{i=1}^{D} x_i} \right]
for some :math:`D` dimensional real vector :math:`x` and
:math:`D` is the number of components for every composition.
Parameters
----------
x : array_like
a matrix of proportions where
rows = compositions and
columns = components
y : array_like
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray, np.float64
A matrix of proportions where all of the values
are nonzero and each composition (row) adds up to 1
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import perturb_inv
>>> x = np.array([.1,.3,.4, .2])
>>> y = np.array([1./6,1./6,1./3,1./3])
>>> perturb_inv(x,y)
array([ 0.14285714, 0.42857143, 0.28571429, 0.14285714])
"""
x, y = closure(x), closure(y)
return closure(x / y)
def power(x, a):
r"""
Performs the power operation.
This operation is defined as follows
.. math::
`x \odot a = C[x_1^a, \ldots, x_D^a]
:math:`C[x]` is the closure operation defined as
.. math::
C[x] = \left[\frac{x_1}{\sum_{i=1}^{D} x_i},\ldots,
\frac{x_D}{\sum_{i=1}^{D} x_i} \right]
for some :math:`D` dimensional real vector :math:`x` and
:math:`D` is the number of components for every composition.
Parameters
----------
x : array_like, float
a matrix of proportions where
rows = compositions and
columns = components
a : float
a scalar float
Returns
-------
numpy.ndarray, np.float64
A matrix of proportions where all of the values
are nonzero and each composition (row) adds up to 1
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import power
>>> x = np.array([.1,.3,.4, .2])
>>> power(x, .1)
array([ 0.23059566, 0.25737316, 0.26488486, 0.24714631])
"""
x = closure(x)
return closure(x**a).squeeze()
def inner(x, y):
r"""
Calculates the Aitchson inner product.
This inner product is defined as follows
.. math::
\langle x, y \rangle_a =
\frac{1}{2D} \sum\limits_{i=1}^{D} \sum\limits_{j=1}^{D}
\ln\left(\frac{x_i}{x_j}\right) \ln\left(\frac{y_i}{y_j}\right)
Parameters
----------
x : array_like
a matrix of proportions where
rows = compositions and
columns = components
y : array_like
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray
inner product result
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import inner
>>> x = np.array([.1, .3, .4, .2])
>>> y = np.array([.2, .4, .2, .2])
>>> inner(x, y)
0.21078524737545556
"""
x = closure(x)
y = closure(y)
a, b = clr(x), clr(y)
return a.dot(b.T)
def norm(x):
"""
Calculates the Aitchison norm
The norm is calculated as follows
.. math::
\norm{x}_a = \sqrt{\langle x, x \rangle_a}
Parameters
----------
x : array_like
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray
list of norms
"""
return np.sqrt(np.diag(inner(x, x)))
def distance(x, y):
"""
Calculates the Aitchison distance. This is a measure
of distance or dissimiliarity between two compositions
The norm is calculated as follows
.. math::
d_a(x, y) = \norm{ x \ominus y }
Parameters
----------
x : array_like
a matrix of proportions where
rows = compositions and
columns = components
y : array_like
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray
list of distances
"""
return norm(perturb_inv(x, y))
def clr(mat):
r"""
Performs centre log ratio transformation.
This function transforms compositions from Aitchison geometry to
the real space. The :math:`clr` transform is both an isometry and an
isomorphism defined on the following spaces
:math:`clr: S^D \rightarrow U`
where :math:`U=
\{x :\sum\limits_{i=1}^D x = 0 \; \forall x \in \mathbb{R}^D\}`
It is defined for a composition :math:`x` as follows:
.. math::
clr(x) = \ln\left[\frac{x_1}{g_m(x)}, \ldots, \frac{x_D}{g_m(x)}\right]
where :math:`g_m(x) = (\prod\limits_{i=1}^{D} x_i)^{1/D}` is the geometric
mean of :math:`x`.
Parameters
----------
mat : array_like, float
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray
clr transformed matrix
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import clr
>>> x = np.array([.1, .3, .4, .2])
>>> clr(x)
array([-0.79451346, 0.30409883, 0.5917809 , -0.10136628])
Notes
-----
If there are zeros present, only the nonzero components are considered
"""
mat = closure(mat)
lmat = np.atleast_2d(np.log(mat))
# If zeros are present, only consider the nonzero components
idx = (lmat != -np.inf).astype(np.int)
lmat[lmat == -np.inf] = 0
gm = np.diag(lmat.dot(idx.T) / idx.sum(axis=1))
gm = np.atleast_2d(gm).T
res = lmat - gm
res[mat == 0] = 0
return (res).squeeze()
def clr_inv(mat):
r"""
Performs inverse centre log ratio transformation.
This function transforms compositions from the real space to
Aitchison geometry. The :math:`clr^{-1}` transform is both an isometry,
and an isomorphism defined on the following spaces
:math:`clr^{-1}: U \rightarrow S^D`
where :math:`U=
\{x :\sum\limits_{i=1}^D x = 0 \; \forall x \in \mathbb{R}^D\}`
This transformation is defined as follows
.. math::
clr^{-1}(x) = C[\exp( x_1, \ldots, x_D)]
Parameters
----------
mat : numpy.ndarray, float
a matrix of real values where
rows = transformed compositions and
columns = components
Returns
-------
numpy.ndarray
inverse clr transformed matrix
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import clr_inv
>>> x = np.array([.1, .3, .4, .2])
>>> clr_inv(x)
array([ 0.21383822, 0.26118259, 0.28865141, 0.23632778])
"""
return closure(np.exp(mat))
def ilr(mat, basis=None, check=True):
r"""
Performs isometric log ratio transformation.
This function transforms compositions from Aitchison simplex to
the real space. The :math: ilr` transform is both an isometry,
and an isomorphism defined on the following spaces
:math:`ilr: S^D \rightarrow \mathbb{R}^{D-1}`
The ilr transformation is defined as follows
.. math::
ilr(x) =
[\langle x, e_1 \rangle_a, \ldots, \langle x, e_{D-1} \rangle_a]
where :math:`[e_1,\ldots,e_{D-1}]` is an orthonormal basis in the simplex.
If an orthornormal basis isn't specified, the J. J. Egozcue orthonormal
basis derived from Gram-Schmidt orthogonalization will be used by
default.
Parameters
----------
mat: numpy.ndarray
a matrix of proportions where
rows = compositions and
columns = components
basis: numpy.ndarray, optional
orthonormal basis for Aitchison simplex
defaults to J.J.Egozcue orthonormal basis
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import ilr
>>> x = np.array([.1, .3, .4, .2])
>>> ilr(x)
array([-0.7768362 , -0.68339802, 0.11704769])
"""
mat = closure(mat)
if basis is None:
basis = clr_inv(_gram_schmidt_basis(mat.shape[-1]))
elif check:
_check_orthogonality(basis)
return inner(mat, basis)
def ilr_inv(mat, basis=None, check=True):
r"""
Performs inverse isometric log ratio transform.
This function transforms compositions from the real space to
Aitchison geometry. The :math:`ilr^{-1}` transform is both an isometry,
and an isomorphism defined on the following spaces
:math:`ilr^{-1}: \mathbb{R}^{D-1} \rightarrow S^D`
The inverse ilr transformation is defined as follows
.. math::
ilr^{-1}(x) = \bigoplus\limits_{i=1}^{D-1} x \odot e_i
where :math:`[e_1,\ldots, e_{D-1}]` is an orthonormal basis in the simplex.
If an orthornormal basis isn't specified, the J. J. Egozcue orthonormal
basis derived from Gram-Schmidt orthogonalization will be used by
default.
Parameters
----------
mat: numpy.ndarray
a matrix of transformed proportions where
rows = compositions and
columns = components
basis: numpy.ndarray, optional
orthonormal basis for Aitchison simplex
defaults to J.J.Egozcue orthonormal basis
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import ilr
>>> x = np.array([.1, .3, .6,])
>>> ilr_inv(x)
array([ 0.34180297, 0.29672718, 0.22054469, 0.14092516])
"""
if basis is None:
basis = _gram_schmidt_basis(mat.shape[-1] + 1)
elif check:
_check_orthogonality(basis)
return clr_inv(np.dot(mat, basis))
def centralize(mat):
r"""Center data around its geometric average.
Parameters
----------
mat : array_like, float
a matrix of proportions where
rows = compositions and
columns = components
Returns
-------
numpy.ndarray
centered composition matrix
Examples
--------
>>> import numpy as np
>>> from skbio.stats.composition import centralize
>>> X = np.array([[.1,.3,.4, .2],[.2,.2,.2,.4]])
>>> centralize(X)
array([[ 0.17445763, 0.30216948, 0.34891526, 0.17445763],
[ 0.32495488, 0.18761279, 0.16247744, 0.32495488]])
"""
mat = closure(mat)
cen = ss.gmean(mat, axis=0)
return perturb_inv(mat, cen)
def phylogenetic_basis(treenode):
"""
Determines the basis based on phylogenetic tree
Parameters
----------
treenode : skbio.TreeNode
Phylogenetic tree. MUST be a bifurcating tree
Returns
-------
basis : dict, {str, np.array}
Returns a set of orthonormal bases in the Aitchison simplex
corresponding to the phylogenetic tree. The order of the
basis is index by the level order of the internal nodes
Raises
------
ValueError
The tree doesn't contain two branches
ValueError
The tree doesn't have unique node names
Examples
--------
>>> from skbio.stats.composition import phylogenetic_basis
>>> from six import StringIO
>>> from skbio imoprt TreeNode
>>> tree = "((b,c)a, d)root;"
>>> t = TreeNode.read(StringIO(tree))
>>> phylogenetic_basis(t)
array([[ 0.62985567, 0.18507216, 0.18507216],
[ 0.28399541, 0.57597535, 0.14002925]])
"""
nodes = [n for n in treenode.levelorder(include_self=True)]
D = len(nodes)
n_tips = sum([n.is_tip() for n in nodes])
# keeps track of k, r, s, t for all of the internal nodes
history = np.zeros((4, D-1))
basis = np.zeros((n_tips-1, n_tips))
# Fill in r and s for all of the nodes
for i in range(1, D):
j = D-i
# left or right child
child_idx = int(nodes[j].parent.children[0] == nodes[j])
parent_idx = (j+1)//2-1
if len(nodes[j].children) == 0:
history[child_idx+1, parent_idx] = 1
else:
# number of tips in child node
parent_history = history[1, j] + history[2, j]
history[child_idx+1, parent_idx] = parent_history
# Fill in k and t for all of the nodes
# and find the basis
idx = 0
for n in nodes:
if len(n.children) == 0:
idx += 1
continue
if len(n.children) != 2:
raise ValueError("Not a bifurcating tree!")
parent_idx = (j+1)//2-1
r = history[1, idx]
s = history[2, idx]
# get parent values
_k = history[0, parent_idx]
_r = history[1, parent_idx]
_s = history[2, parent_idx]
_t = history[3, parent_idx]
a = np.sqrt(s / (r*(r+s)))
b = -1*np.sqrt(r / (s*(r+s)))
if n.parent is None:
basis[idx, :] = clr_inv([a]*r + [b]*s)
idx += 1
continue
if n.parent.children[0] == n: # right child
k = _r + _k
t = _t
else: # left child
k = _k
t = _s + _t
basis[idx, :] = clr_inv([0]*k + [a]*r + [b]*s + [0]*t)
history[0, idx] = k
history[1, idx] = r
history[2, idx] = s
history[3, idx] = t
idx += 1
return basis
def _merge_two_dicts(x, y):
'''
Given two dicts, merge them into a new dict as a shallow copy.
'''
z = x.copy()
z.update(y)
if len(z) < len(x) + len(y):
raise ValueError("Non unique node names!")
return z
def _gram_schmidt_basis(n):
"""
Builds clr transformed basis derived from
gram schmidt orthogonalization
Parameters
----------
n : int
Dimension of the Aitchison simplex
"""
basis = np.zeros((n, n-1))
for j in range(n-1):
i = j + 1
e = np.array([(1/i)]*i + [-1] +
[0]*(n-i-1))*np.sqrt(i/(i+1))
basis[:, j] = e
return basis.T
def _check_orthogonality(basis):
"""
Checks to see if basis is truly orthonormal in the
Aitchison simplex
Parameters
----------
basis: numpy.ndarray
basis in the Aitchison simplex
"""
if not np.allclose(inner(basis, basis), np.identity(len(basis)),
rtol=1e-4, atol=1e-6):
raise ValueError("Aitchison basis is not orthonormal")
|
#!/usr/bin/python
# Copyright (C) 2009, Sugar Labs
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Activity information microformat parser.
Activity information is embedded in HTML/XHTML/XML pages using a
Resource Description Framework (RDF) http://www.w3.org/RDF/ .
An example::
<?xml version="1.0" encoding="UTF-8"?>
<RDF:RDF xmlns:RDF="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:em="http://www.mozilla.org/2004/em-rdf#"><RDF:Description about="urn:mozilla:extension:bounce">
<em:updates>
<RDF:Seq>
<RDF:li resource="urn:mozilla:extension:bounce:7"/>
</RDF:Seq>
</em:updates>
</RDF:Description>
<RDF:Description about="urn:mozilla:extension:bounce:7">
<em:version>7</em:version>
<em:targetApplication>
<RDF:Description>
<em:id>{3ca105e0-2280-4897-99a0-c277d1b733d2}</em:id>
<em:minVersion>0.82</em:minVersion>
<em:maxVersion>0.84</em:maxVersion>
<em:updateLink>http://activities.sugarlabs.org/downloads/file/25986/bounce-7.xo</em:updateLink>
<em:updateHash>sha256:816a7c43b4f1ea4769c61c03fea24842ec5fa566b7d41626ffc52ec37b37b6c5</em:updateHash>
</RDF:Description>
</em:targetApplication>
</RDF:Description></RDF:RDF>
"""
import urllib2
from urllib2 import HTTPError
import socket
from xml.etree.ElementTree import ElementTree, XML
from jarabe import config
class ASLOParser():
"""XML parser to pull out data expressed in our aslo format."""
def __init__(self, xml_data):
self.elem = XML(xml_data)
def parse(self):
try:
self.version = self.elem.find(".//{http://www.mozilla.org/2004/em-rdf#}version").text
self.link = self.elem.find(".//{http://www.mozilla.org/2004/em-rdf#}updateLink").text
except:
self.version = 0
self.link = None
def parse_aslo(xml_data):
"""Parse the activity information embedded in the given string
containing XML data. Returns a list containing the activity version and url.
"""
ap = ASLOParser(xml_data)
ap.parse()
return ap.version, ap.link
def parse_url(url):
"""Parse the activity information at the given URL. Returns the same
information as `parse_xml` does, and raises the same exceptions.
The `urlopen_args` can be any keyword arguments accepted by
`bitfrost.util.urlrange.urlopen`."""
response = urllib2.urlopen(url)
return parse_aslo(response.read())
def fetch_update_size(url):
try:
site = urllib2.urlopen(url)
meta = site.info()
return meta.getheaders("Content-Length")[0]
except (HTTPError, IOError, socket.error):
return 0 # there is no update file at url.
def fetch_update_info(bundle):
"""Return a tuple of new version, url for new version.
All the information about the new version is `None` if no newer
update can be found.
"""
url = 'http://activities.sugarlabs.org/services/update.php?id=' + bundle.get_bundle_id() + '&appVersion=' + config.version
new_version, new_url = parse_url(url)
if new_url is None:
return 0, None # no updates
return new_version, new_url
#########################################################################
# Self-test code.
def _main():
"""Self-test."""
print parse_url('http://activities.sugarlabs.org/services/update.php?id=bounce')
if __name__ == '__main__': _main ()
|
import re
from DisplayFile import DisplayFile
from Figuras import Poligono
class DescritorOBj:
def __init__(self):
self.DisplayFile = DisplayFile()
def importFile(self, path):
self.DisplayFile.limpar()
vertices = dict()
vertice_counter = 0
nome = ""
self.file = open(path, "r+") # read and write
for line in self.file:
if(line[0] == "v"): # store vertices in a dictionary
vertice_counter += 1
vertices[vertice_counter] = line
elif(line[0] == "o"):
match = re.findall(r"\S+", line)
nome = match[1]
elif(line[0] == "p"):
match = re.findall(r"\S+", line)
vertice_for_point = vertices[float(match[1])]
match = re.findall(r"\S+", vertice_for_point)
coord = [float(match[1]), float(match[2]) ]
p1 = Poligono(nome)
p1.addPonto(coord[0], coord[1])
p1.setTipo("ponto")
self.DisplayFile.addObjeto(p1)
elif(line[0] == "l"):
match = re.findall(r"\S+", line)
l = Poligono(nome)
for item in match:
if(item != "l"):
vertice_for_point = vertices[float(item)]
match = re.findall(r"\S+", vertice_for_point)
coord = [float(match[1]), float(match[2])]
l.addPonto(coord[0], coord[1])
if int(len(l.getPontosNormalizados())) > 2:
l.setTipo("poligono")
elif int(len(l.getPontosNormalizados())) == 2:
l.setTipo("reta")
self.DisplayFile.addObjeto(l)
def exportFile(self, path):
output_file = open(path, "w+") # write, overwrite and create if needed
temp = "" # this variable holds the objects related to the vertices
vertice_counter = 0
for obj in DisplayFile.objetos:
tipo_obj = obj.getTipo()
pontos_m = obj.getPontosMundo()
if(tipo_obj == "ponto"):
vertice_counter += 1
output_file.write("v {} {} 0\n".format(pontos_m[0][0], pontos_m[0][1]))
temp += "o {}\n".format(obj.getNome())
temp += "p {}\n".format(vertice_counter)
elif(tipo_obj == "reta"):
vertice_counter += 1
output_file.write("v {} {} 0\n".format(pontos_m[0][0], pontos_m[0][1]))
vertice_counter += 1
output_file.write("v {} {} 0\n".format(pontos_m[1][0], pontos_m[1][1]))
temp += "o {}\n".format(obj.getNome())
temp += "l {} {}\n".format(vertice_counter-1, vertice_counter)
elif(tipo_obj == "poligono"):
temp += "o {}\n".format(obj.getNome())
temp += "l"
for ponto in pontos_m:
vertice_counter += 1
output_file.write("v {} {} 0\n".format(ponto[0], ponto[1]))
temp += " {}".format(vertice_counter)
temp += "\n"
output_file.write("{}\n".format(temp))
output_file.close()
|
#Keyboard row
class Solution(object):
def findWords(self , words):
r1 = set('qwertyuiop')
r2 = set('asdfghjkl')
r3 = set('zxcvbnm')
return[w for w in words if any(set(w.lower()) <= r for r in (r1 , r2 , r3))]
s = Solution()
words = ["Hello", "Alaska", "Dad", "Peace"]
print(s.findWords(words))
|
from collections import OrderedDict
import numpy as np
from typing import Dict
from module import Module, Parameter
from operators import PlusOperator, MulOperator
class FullyConnectedLayer(Module):
def __init__(self,
input_size: int,
output_size: int,
w_init_parameter: Parameter = None,
b_init_parameter: Parameter = None) -> None:
super().__init__()
if w_init_parameter:
assert w_init_parameter.value.shape == (input_size, output_size)
W = w_init_parameter
else:
W = Parameter(0.001 * np.random.randn(input_size, output_size))
if b_init_parameter:
assert b_init_parameter.value.shape == (1, output_size)
B = b_init_parameter
else:
B = Parameter(0.001 * np.random.randn(1, output_size))
self.x_input: np.ndarray = None
self.register_parameter('W', W)
self.register_parameter('B', B)
def forward(self, x_input: np.ndarray) -> np.ndarray:
self.x_input = x_input
result = x_input @ self.parameters()['W'].value + self.parameters()['B'].value
return result
def backward(self, d_output: np.ndarray) -> np.ndarray:
self.parameters()['W'].grad = self.parameters()['W'].grad + self.x_input.T @ d_output
self.parameters()['B'].grad = self.parameters()['B'].grad + np.sum(d_output, axis=0)[np.newaxis, ...]
d_result = d_output @ self.parameters()['W'].value.T
return d_result
# FIXME : ugly packing-unpacking
class Sequential(Module):
def __init__(self, modules: Dict[str, Module]) -> None:
super().__init__()
assert isinstance(modules, OrderedDict)
self.modules = modules
for name, module in self.modules.items():
self.register_module_parameters(name, module)
def forward(self, *x_input: np.ndarray):
out = x_input
for module in self.modules.values():
out = module.forward(*out)
if not isinstance(out, tuple):
out = (out,)
return out[0] if len(out) == 1 else out
def backward(self, *d_output: np.ndarray):
df = d_output
for i, module in enumerate(reversed(self.modules.values())):
df = module.backward(*df)
if not isinstance(df, tuple):
df = (df, )
return df[0] if len(df) == 1 else df
def append(self, name: str, module: Module, add_params: bool = True) -> None:
if name in self.modules.keys():
raise Exception('Module name already exists')
self.modules[name] = module
if add_params:
self.register_module_parameters(name, module)
class LstmLayer(Module):
class _LstmTimeStamp(Module):
def __init__(self, input_size: int, hidden_size: int, prev=None) -> None:
"""
Arguments:
----------
hidden_size {int} -- hidden state size
Keyword Arguments:
------------------
prev {_LstmTimeStamp} -- use weights of previous lstm state
if None, create weights (default: {None})
"""
super().__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.d_out_hidden_2: np.ndarray = None
# print("init")
if prev is not None:
# print("prev")
assert isinstance(prev, type(self))
self.hidden_state: np.ndarray = prev.hidden_state
self.cell_state: np.ndarray = prev.cell_state
self.forget_gate = prev.forget_gate
self.input_gate_sg = prev.input_gate_sg
self.input_gate_th = prev.input_gate_th
self.output_gate = prev.output_gate
else:
self.hidden_state: np.ndarray = np.zeros((1, hidden_size))
self.cell_state: np.ndarray = np.zeros((1, hidden_size))
# self.hidden_state: np.ndarray = np.zeros((self.input_size, hidden_size))
# self.cell_state: np.ndarray = np.zeros((self.input_size, hidden_size))
self.forget_gate: Sequential = Sequential(OrderedDict({
'fc': FullyConnectedLayer(self.input_size + hidden_size, hidden_size),
'sigmoid': SigmoidLayer()
}))
self.input_gate_sg: Sequential = Sequential(OrderedDict({
'fc': FullyConnectedLayer(self.input_size + hidden_size, hidden_size),
'sigmoid': SigmoidLayer()
}))
self.input_gate_th: Sequential = Sequential(OrderedDict({
'fc': FullyConnectedLayer(self.input_size + hidden_size, hidden_size),
'activate': TanHLayer() # ReLULayer() #TanHLayer()
}))
self.output_gate: Sequential = Sequential(OrderedDict({
'fc': FullyConnectedLayer(self.input_size + hidden_size, hidden_size),
'sigmoid': SigmoidLayer()
}))
# print("self.forget_gate.W :", self.forget_gate.modules['fc'].parameters()['W'].value)
self.forget_gate_mul: MulOperator = MulOperator()
self.input_gate_mul: MulOperator = MulOperator()
self.input_gate_sum: PlusOperator = PlusOperator()
self.cell_output: TanHLayer = TanHLayer() # ReLULayer = ReLULayer()#TanHLayer = TanHLayer()
self.cell_output_mul: MulOperator = MulOperator()
self.register_module_parameters('forget_gate', self.forget_gate)
self.register_module_parameters('input_gate_sg', self.input_gate_sg)
self.register_module_parameters('input_gate_th', self.input_gate_th)
self.register_module_parameters('output_gate', self.output_gate)
def forward(self, x_input: np.ndarray) -> np.ndarray:
"""lstm forward propagation
Arguments:
----------
x_input {np.ndarray} -- input value of shape !TODO
Returns:
--------
np.ndarray -- result
"""
assert isinstance(x_input, (np.ndarray))
assert x_input.shape == (self.input_size,)
# arr_input = np.array([[x_input]])
cat: np.ndarray = np.concatenate([self.hidden_state, x_input[np.newaxis, ...]], axis=1)
forget_gate_out = self.forget_gate.forward(cat)
forgot_cell = self.forget_gate_mul.forward(self.cell_state, forget_gate_out)
input_gate_sg_out = self.input_gate_sg.forward(cat)
input_gate_th_out = self.input_gate_th.forward(cat)
input_gate_out = self.input_gate_mul.forward(input_gate_sg_out, input_gate_th_out)
updated_cell = self.input_gate_sum.forward(forgot_cell, input_gate_out)
updated_cell_tanh = self.cell_output.forward(updated_cell)
output_gate_out = self.output_gate.forward(cat)
updated_hidden = self.cell_output_mul.forward(updated_cell_tanh, output_gate_out)
self.hidden_state = updated_hidden
self.cell_state = updated_cell
return updated_hidden
def backward(self, d_out_hidden: np.ndarray, d_out_cell: np.ndarray = None,
d_out_x: np.ndarray = None) -> (np.ndarray, np.ndarray, np.ndarray):
"""back propagation
Arguments:
----------
d_out_hidden {np.ndarray} -- result of next layer back propagation
d_out_cell {np.ndarray} -- result of future cell_state derivative
if None, create zeros array (default: {None})
d_out_hidden_2 {np.ndarray} -- result of next layer back propagation
used with time distributed sequential model (default: {None})
Returns:
--------
(np.ndarray, np.ndarray) -- derivative of hidden and cell states
"""
assert isinstance(d_out_hidden, np.ndarray)
assert d_out_hidden.shape == (1, self.hidden_size)
if self.d_out_hidden_2 is not None:
assert isinstance(self.d_out_hidden_2, np.ndarray)
assert self.d_out_hidden_2.shape == (1, self.hidden_size)
d_out_hidden = d_out_hidden + self.d_out_hidden_2
self.d_out_hidden_2 = None
if d_out_cell is not None:
assert isinstance(d_out_cell, np.ndarray)
assert d_out_cell.shape == (1, self.hidden_size,)
else:
d_out_cell = np.zeros((1, self.hidden_size))
d_cell_output_mul_cell, d_cell_output_mul_hidden = self.cell_output_mul.backward(d_out_hidden)
d_result = self.output_gate.backward(d_cell_output_mul_hidden)
d_cell_output = self.cell_output.backward(d_cell_output_mul_cell)
d_cell_output = d_cell_output + d_out_cell
d_forgot_cell, d_input_gate_out = self.input_gate_sum.backward(d_cell_output)
d_input_gate_sg_out, d_input_gate_th_out = self.input_gate_mul.backward(d_input_gate_out)
d_result = d_result + self.input_gate_sg.backward(d_input_gate_sg_out)
d_result = d_result + self.input_gate_th.backward(d_input_gate_th_out)
d_cell_state, d_forget_gate_out = self.forget_gate_mul.backward(d_forgot_cell)
d_result = d_result + self.forget_gate.backward(d_forget_gate_out)
return d_result[:, :self.hidden_size], d_cell_state, d_result[0, self.hidden_size:]
def set_time_distributed(self, d_out_hidden_2: np.ndarray) -> None:
assert isinstance(d_out_hidden_2, np.ndarray)
assert d_out_hidden_2.shape == (1, self.hidden_size)
self.d_out_hidden_2 = d_out_hidden_2
def __init__(self, input_size: int, hidden_size: int) -> None:
"""LSTM layer
Arguments:
----------
input_size {int} -- input value size
hidden_size {int} -- hidden / cell state size
"""
super().__init__()
assert isinstance(input_size, int)
assert isinstance(hidden_size, int)
assert input_size >= 1
assert hidden_size >= 1
self.input_size = input_size
self.hidden_size = hidden_size
self.cell = self._LstmTimeStamp(input_size, hidden_size)
self.register_module_parameters('LSTM', self.cell)
self.history: Sequential = None
def forward(self, x_input: np.ndarray) -> (np.ndarray, np.ndarray):
"""forward propagation
Arguments:
x_input {np.ndarray} -- input sequence of shape !TODO
Returns:
np.ndarray -- predicted output embedding
"""
assert isinstance(x_input, np.ndarray)
assert x_input.ndim == 2
assert x_input.shape[-1] == self.input_size
future = self._LstmTimeStamp(self.input_size, self.hidden_size, self.cell)
self.history = Sequential(OrderedDict({}))
out: np.ndarray = None
hidden_history = []
for i, sample in enumerate(x_input):
out = future.forward(sample)
hidden_history.append(out)
past = future
future = self._LstmTimeStamp(self.input_size, self.hidden_size, past)
self.history.append(str(i), past)
return out, np.array(hidden_history)
def backward(self, d_output: np.ndarray, d_time_distributed: np.ndarray = None) -> np.ndarray:
assert self.history is not None
if d_time_distributed is not None:
for i, timestamp in enumerate(self.history.modules.values()):
timestamp.set_time_distributed(d_time_distributed[i][np.newaxis, ...])
d_result = self.history.backward(d_output)
return d_result
def reload(self):
self.step_num = 0
self.history = None
class DropoutLayer(Module):
def __init__(self, dropout_chance: float):
assert isinstance(dropout_chance, float)
super().__init__()
self.dropout_chance = float
self.dropout: np.ndarray = None
def forward(self, x_input: np.ndarray):
assert isinstance(x_input, np.ndarray)
if not self.is_train:
return x_input
self.dropout = np.random.rand(*x_input.shape)
self.dropout[self.dropout < self.dropout_chance] = 0
self.dropout[self.dropout > 0] = 1 / (1 - self.dropout)
result = self.dropout * x_input
return result
def backward(self, d_output: np.ndarray):
assert self.dropout is not None, "forward propagation required"
assert isinstance(d_output, np.ndarray)
assert d_output.shape == self.dropout.shape
return self.dropout * d_output
class TanHLayer(Module):
def __init__(self) -> None:
super().__init__()
self.grad: np.ndarray = None
def forward(self, x_input: np.ndarray) -> np.ndarray:
assert isinstance(x_input, np.ndarray)
tanh: np.ndarray = np.tanh(x_input)
self.grad = 1. - tanh ** 2
return tanh
def backward(self, d_output: np.ndarray) -> np.ndarray:
assert d_output.shape == self.grad.shape
return d_output * self.grad
class SigmoidLayer(Module):
def __init__(self) -> None:
super().__init__()
self.x_input: np.ndarray = None
self.grad: np.ndarray = None
def forward(self, x_input: np.ndarray) -> None:
assert isinstance(x_input, np.ndarray)
sigmoid = 1. / (1 + np.exp(-x_input))
self.grad = sigmoid * (1 - sigmoid)
return sigmoid
def backward(self, d_output: np.ndarray) -> np.ndarray:
assert isinstance(d_output, np.ndarray)
assert d_output.shape == self.grad.shape
return d_output * self.grad
class ReLULayer(Module):
def __init__(self):
super().__init__()
pass
def forward(self, X):
self.__grad = np.array(X > 0, dtype=np.float)
self.__grad[X == 0] = 0.5
X[X < 0] = 0
return X
def backward(self, d_out):
assert d_out.shape == self.__grad.shape
d_result = d_out * self.__grad
return d_result
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
from scrapy import Item,Field
class ScrapyItem(Item):
image_url=Field()
class JianshuItem(Item):
nickname = Field()
description = Field()
followed = Field()
following = Field()
articles = Field()
charlength = Field()
likes = Field() |
import maya.cmds as cmds
import os
from functools import partial
import Utils.Utils_File as fileUtils
#NOET: Remove this import!
import Utils.Utils_Part as Utils_Part
reload(Utils_Part)
class PartParam_UI:
def __init__(self, *args):
""" Create a dictionary to store UI elements """
self.UIElements = {}
""" Check to see if the UI exists """
self.windowName = "PartParams"
if cmds.window(self.windowName, exists=True):
cmds.deleteUI(self.windowName)
""" Define UI elements width and height """
self.windowWidth = 200
self.windowHeight = 400
self.rowHeight = 40
buttonWidth = 100
buttonHeight = 40
""" Define a window"""
self.UIElements["window"] = cmds.window(self.windowName, width=self.windowWidth, height=self.windowHeight, title="Window", sizeable=True)
self.UIElements["guiFlowLayout1"] = cmds.flowLayout(v=True, width=self.windowWidth, height=self.windowHeight, bgc=[0.2, 0.2, 0.2])
""" Edit and Create botton row """
self.UIElements["guiFlowLayout2"] = cmds.flowLayout(v=False, width=self.windowWidth, height=self.rowHeight, bgc=[0.4, 0.4, 0.4])
cmds.setParent(self.UIElements["guiFlowLayout1"])
self.UIElements["edit_button"] = cmds.button(label="Edit", width=buttonWidth, height=buttonHeight, p=self.UIElements["guiFlowLayout2"])
self.UIElements["createbutton"] = cmds.button(label="Create", width=buttonWidth, height=buttonHeight, p=self.UIElements["guiFlowLayout2"], command=self.createPart)
""" Name Row """
self.UIElements["guiFlowLayout3"] = cmds.flowLayout(v=False, width=self.windowWidth, height=self.rowHeight, bgc=[0.4, 0.4, 0.4])
cmds.setParent(self.UIElements["guiFlowLayout1"])
self.UIElements["name_text"] = cmds.textField(tx="Test", width=buttonWidth, height=buttonHeight, bgc=[1.0, 1.0, 1.0],p=self.UIElements["guiFlowLayout3"])
""" NumParts Row """
self.UIElements["guiFlowLayout4"] = cmds.flowLayout(v=False, width=self.windowWidth, height=self.rowHeight, bgc=[0.4, 0.4, 0.4])
cmds.setParent(self.UIElements["guiFlowLayout1"])
self.UIElements["num_text"] = cmds.intField( minValue=3, maxValue=10, step=1, width=buttonWidth, height=buttonHeight, bgc=[1.0, 1.0, 1.0], p=self.UIElements["guiFlowLayout4"])
""" Orientation Row """
self.UIElements["guiFlowLayout5"] = cmds.flowLayout(v=False, width=self.windowWidth, height=self.rowHeight, bgc=[0.4, 0.4, 0.4])
cmds.setParent(self.UIElements["guiFlowLayout1"])
self.UIElements["radio_grp1"] = cmds.radioButtonGrp( label='Orientation', labelArray3=['X', 'Y', 'Z'], width=110, numberOfRadioButtons=3, adj=1, p=self.UIElements["guiFlowLayout5"] )
""" Generate and Mirror Row """
self.UIElements["guiFlowLayout6"] = cmds.flowLayout(v=False, width=self.windowWidth, height=self.rowHeight, bgc=[0.4, 0.4, 0.4])
cmds.setParent(self.UIElements["guiFlowLayout1"])
self.UIElements["generate_button"] = cmds.button(label="Generate", width=buttonWidth, height=buttonHeight, p=self.UIElements["guiFlowLayout6"])
self.UIElements["mirror_button"] = cmds.button(label="Mirror", width=buttonWidth, height=buttonHeight, p=self.UIElements["guiFlowLayout6"])
cmds.showWindow(self.windowName)
def createPart(self, *args):
contained_nodes = []
# Collect info from the UI to build part
userDefinedName = cmds.textField(self.UIElements["name_text"], q=True, text=True)
numParts = cmds.intField(self.UIElements["num_text"], q=True, v=True)
partRoot = Utils_Part.rigNodeRoot(numParts, userDefinedName)
contained_nodes.append(partRoot)
parts = Utils_Part.rigNode(userDefinedName, numParts, partRoot)
partsLen = len(parts)
for p in range(len(parts)):
contained_nodes.append(parts[p])
if p < partsLen-1:
partList = (parts[p], parts[p+1])
partJoint = Utils_Part.createPJoints(partList)
for j in partJoint:
contained_nodes.append(j)
# Set drawing overide on joints
cmds.setAttr(j + '.overrideEnabled', 1)
cmds.setAttr(j + '.overrideDisplayType', 1)
ikHandleName = partJoint[0].replace('pjnt', 'ikh')
ikInfo = Utils_Part.scStretchyIk(partList, partJoint, ikHandleName)
for i in ikInfo[0]:
contained_nodes.append(i)
# Connect ikHnadles, parts, and joints
ptca =cmds.pointConstraint(partList[0], partJoint[0], mo=True)
cmds.connectAttr(partJoint[0] + '.rotate', partList[0] +'.rotateAxis')
#cmds.parent(partJoint[0], partList[0])
ptcb = cmds.pointConstraint(partList[1], ikInfo[0][0])
contained_nodes.append(ptca[0])
contained_nodes.append(ptcb[0])
# Cleanup nodes and add to a container.
print contained_nodes
containerName = (userDefinedName+'_container')
con1 = cmds.container(n=containerName)
for i in contained_nodes:
print i
cmds.container(containerName, edit=True, addNode=i, inc=True, ish=True, ihb=True, iha=True)
|
one = ["Primeiro", "Segundo", 3, 4]
print ("Lista eh", one);
print ("Olha o ", one[-4], " position -4")
|
#coding: utf8
import util_pickle as up
from char_feature import *
from char_feature_lib_builder import *
from switcher import *
# FeatureDiv = 10000
FDIV = 10000
def get_allowance_list():
lst = [0]
for k in range(10000 * 0.10): #容许度 10%
lst.extend([k, -k])
return lst
def search_recur(ftree, tgf, selection, sum_offset=0):
'''
given target feature
search nearest in ftree
with allowance within a range
'''
rlist = []
keys = ftree.keys()
if len(tgf) == 0:
selection.append((sum_offset, ftree.keys()[0]))
return None
keys.sort(key = lambda x: abs(x-tgf[0]))
for i in range(len(keys)):
k1 = keys[i]
new_offset = abs(k1 - tgf[0])
if new_offset > 10000 * 0.2: # 容许度20%
return None
new_sum_offset = sum_offset + new_offset
newInst = search_recur(ftree[k1], tgf[1:], selection ,new_sum_offset)
if newInst != None:
rlist.append(newInst)
if rlist == []:
return None
else:
return rlist
# [累计偏差, [累计偏差, [累计偏差, ...[累计偏差, 字]]]]...]
def search_for(ftree, tgf):
selection = []
search_recur(ftree, tgf, selection, 0)
return selection
def do_recognize(cimg, ftree):
sl = search_for( ftree, get_feature(cimg))
sl.sort(key = lambda x: x[0])
if len(sl) == 0:
print 'fail found 1'
return ' '
else:
return sl[0][1]
def strip_touch(fimg, ra, rb, func = lambda x,y: (x, y)):
for a in ra:
for b in rb:
if ToGrey(fimg.getpixel(func(a,b))) > CR_EPCLR:
return a
def img_strip(fimg):
w, h = fimg.size
wr = range(w)
hr = range(h)
xmin = strip_touch(fimg, wr, hr)
ymin = strip_touch(fimg, hr, wr, lambda x,y: (y,x))
wr.reverse()
xmax = strip_touch(fimg, wr, hr)
hr.reverse()
ymax = strip_touch(fimg, hr, wr, lambda x,y: (y,x))
return fimg.crop((xmin, ymin, xmax, ymax))
if __name__ == '__main__':
TESTIM = 1
if TESTIM == 1:
# 测试对不同字体的识别度
ftree = up.load('ftree.tre')
fnt = ['msyh.ttf', 'simsun.ttc']
cimg = get_char_img(u'你', fnt[0])
print do_recognize(cimg, ftree)
if TESTIM == 2:
# 测试去除白色边函数
#cimg = get_char_img(u'你', 'simsun.ttc')
cimg = Image.open('Z:/out1.png')
cimg = img_strip(cimg)
cimg.show() |
# This file is only intended for development purposes
from kubeflow.kubeflow.cd import base_runner
base_runner.main(component_name="notebook_controller",
workflow_name="nb-c-build")
|
from selenium import webdriver
from time import sleep
driver = webdriver.Chrome()
driver.get("https://www.baidu.com")
driver.set_window_size(480,800) #控制浏览器的大小
sleep(2)
driver.refresh()
sleep(2)
driver.maximize_window() #浏览器全屏
sleep(2)
driver.get("https://www.baidu.com")
sleep(1)
driver.back() #浏览器后退
sleep(1)
driver.forward() #浏览器前进
driver.quit() |
import math # 调用数学库
# from math import pi
def main(): # 计算圆的面积
r = eval(input("请输入待求圆的半径")) # input()输入圆的半径 并对输入的字符串进行格式转换(eval())
SquareR = pow(r, 2)
# pow()函数为幂计算所用 其中pow(a,b)代表的是a的b次幂
# pow(Exp,x)->e^x
S = SquareR * math.pi
# 此处的math.pi为精度很高的常数 存放在math库中
# S = math.pi*r*r
# print(S)
print("{:.2f}".format(S))
if __name__ == '__main__':
main() # 调用库的练习
|
#컴퓨터가 생각하는 수를 맞추기
#기회는 6번
#6번 이후에는 정답을 출력한다.
import random as r
root = tk.Tk()
root.geometry("200x200")
q_num = r.randint(1,100)
print("----숫자 맞추기---", q_num)
for num in range(1,7):
u_ans = int(input("%d번째 예상 숫자: "% num))
if u_ans == q_num:
print("정답이야!!")
break
if u_ans > q_num:
print("더 작은 수를 넣어봐!!")
else:
print("더 큰 수를 넣어봐!")
if num == 6:
print("정답은 %d!!" % q_num)
|
# Generated by Django 3.2.6 on 2021-08-30 04:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('product_register', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProductOption',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('product_name', models.CharField(max_length=100)),
('product_description', models.CharField(max_length=100)),
],
),
migrations.RemoveField(
model_name='product',
name='product_option',
),
migrations.DeleteModel(
name='product_option',
),
migrations.AddField(
model_name='productoption',
name='Product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='product_register.product'),
),
]
|
"""
Type definition for model parameters
"""
from pydantic import BaseModel as _BaseModel, Extra, root_validator, validator
from pydantic.dataclasses import dataclass
from functools import partial
from datetime import date
from typing import Any, Dict, List, Optional, Union
from autumn.settings.constants import (
COVID_BASE_DATETIME,
GOOGLE_MOBILITY_LOCATIONS,
COVID_BASE_AGEGROUPS,
)
from autumn.core.inputs.social_mixing.constants import LOCATIONS
from summer2.experimental.model_builder import (
ParamStruct,
parameter_class as pclass,
parameter_array_class as parray,
)
from numpyro.distributions import constraints
from numbers import Real
from math import inf
# Mysterious missing constraint in numpyro...
constraints.non_negative = constraints.interval(0.0, inf)
BASE_DATE = COVID_BASE_DATETIME.date()
# Forbid additional arguments to prevent extraneous parameter specification
_BaseModel.Config.extra = Extra.forbid
# ModelBuilder requires all parameters to be embedded in ParamStruct objects
class BaseModel(_BaseModel, ParamStruct):
pass
"""
Commonly used checking processes
"""
def validate_expected(field: str, expected: str):
"""Returns a validator that asserts that the member field {field}
has value {expected}
Args:
field: Member field to validate
expected: Expected value
"""
def check_field_value(value_to_check):
assert value_to_check == expected, f"Invalid {field}: {value_to_check}"
return value_to_check
return validator(field)(check_field_value)
def get_check_prop(name):
msg = f"Parameter '{name}' not in domain [0, 1], but is intended as a proportion"
def check_prop(value: float) -> float:
assert 0.0 <= value <= 1.0, msg
return value
return check_prop
def get_check_non_neg(name):
msg = f"Parameter '{name}' is negative, but is intended to be non-negative"
def check_non_neg(value: float) -> float:
assert 0.0 <= value, msg
return value
return check_non_neg
def get_check_all_prop(name):
msg = f"Parameter '{name}' contains values outside [0, 1], but is intended as a list of proportions"
def check_all_pos(values: list) -> float:
assert all([0.0 <= i_value <= 1.0 for i_value in values]), msg
return values
return check_all_pos
def get_check_all_non_neg(name):
msg = f"Parameter '{name}' contains negative values, but is intended as a list of proportions"
def check_all_non_neg(values: list) -> float:
assert all([0.0 <= i_value for i_value in values]), msg
return values
return check_all_non_neg
def get_check_all_dict_values_non_neg(name):
msg = f"Dictionary parameter '{name}' contains negative values, but is intended as a list of proportions"
def check_non_neg_values(dict_param: dict) -> float:
assert all([0.0 <= i_value for i_value in dict_param.values()]), msg
return dict_param
return check_non_neg_values
def get_check_all_non_neg_if_present(name):
msg = f"Parameter '{name}' contains negative values, but is intended as a list of proportions"
def check_all_non_neg(values: float) -> float:
if values:
assert all([0.0 <= i_value for i_value in values]), msg
return values
return check_all_non_neg
"""
Parameter validation models
"""
class Time(BaseModel):
"""
Parameters to define the model time period and evaluation steps.
"""
start: float
end: float
step: float
@root_validator(pre=True, allow_reuse=True)
def check_lengths(cls, values):
start, end = values.get("start"), values.get("end")
assert end >= start, f"End time: {end} before start: {start}"
return values
class TimeSeries(BaseModel):
"""
A set of values with associated time points.
"""
times: List[float]
values: List[float]
@root_validator(pre=True, allow_reuse=True)
def check_lengths(cls, inputs):
value_series, time_series = inputs.get("values"), inputs.get("times")
msg = f"TimeSeries length mismatch, times length: {len(time_series)}, values length: {len(value_series)}"
assert len(time_series) == len(value_series), msg
return inputs
@validator("times", pre=True, allow_reuse=True)
def parse_dates_to_days(dates):
return [(d - BASE_DATE).days if isinstance(d, date) else d for d in dates]
class Country(BaseModel):
"""
The country that the model is based in. (The country may be, and often is, the same as the region.)
"""
iso3: str
@validator("iso3", pre=True, allow_reuse=True)
def check_length(iso3):
assert len(iso3) == 3, f"ISO3 codes should have three digits, code is: {iso3}"
return iso3
class Population(BaseModel):
"""
Model population parameters.
"""
region: Optional[str] # None/null means default to parent country
year: int # Year to use to find the population data in the database
@validator("year", pre=True, allow_reuse=True)
def check_year(year):
msg = f"Year before 1900 or after 2050: {year}"
assert 1900 <= year <= 2050, msg
return year
class CompartmentSojourn(BaseModel):
"""
Compartment sojourn times, i.e. the mean period of time spent in a compartment.
"""
total_time: pclass(constraints.non_negative)
proportion_early: Optional[pclass(constraints.non_negative)]
class Sojourns(BaseModel):
"""
Parameters for determining how long a person stays in a given compartment.
"""
active: pclass(constraints.non_negative)
latent: pclass(constraints.non_negative)
class LatencyInfectiousness(BaseModel):
"""
Parameters to define how many latent compartments are infectious and their relative
infectiousness compared to the active disease compartments
"""
n_infectious_comps: int
rel_infectiousness: pclass(constraints.non_negative)
class MixingLocation(BaseModel):
append: bool # Whether to append or overwrite times / values
times: List[int] # Times for dynamic mixing func
values: List[Any] # Values for dynamic mixing func
@root_validator(pre=True, allow_reuse=True)
def check_lengths(cls, values):
value_series, time_series = values.get("values"), values.get("times")
assert len(time_series) == len(value_series), f"Mixing series length mismatch."
return values
@validator("times", pre=True, allow_reuse=True)
def parse_dates_to_days(dates):
return [(d - BASE_DATE).days if isinstance(d, date) else d for d in dates]
class Mobility(BaseModel):
region: Optional[str] # None/null means default to parent country
mixing: Dict[str, MixingLocation]
age_mixing: Optional[Dict[str, TimeSeries]]
smooth_google_data: bool
square_mobility_effect: bool
google_mobility_locations: Dict[str, Dict[str, float]]
microdistancing: Optional[
dict
] # this is not used for the sm_covid model. Still included to prevent crash in mixing matrix code
apply_unesco_school_data: bool
unesco_partial_opening_value: pclass()
unesco_full_closure_value: Optional[float]
@validator("google_mobility_locations", allow_reuse=True)
def check_location_weights(val):
for location in val:
location_total = sum(val[location].values())
msg = f"Mobility weights don't sum to one: {location_total}"
assert abs(location_total - 1.0) < 1e-6, msg
msg = "Google mobility key not recognised"
assert all([key in GOOGLE_MOBILITY_LOCATIONS for key in val[location].keys()]), msg
return val
class AgeSpecificProps(BaseModel):
values: Dict[int, float]
multiplier: pclass(constraints.non_negative)
class AgeStratification(BaseModel):
"""
Parameters used in age based stratification.
"""
susceptibility: Optional[
Union[Dict[int, float], float]
] # Dictionary that represents each age group, single float or None
prop_symptomatic: Optional[Union[Dict[int, float], float]] # As for susceptibility
prop_hospital: AgeSpecificProps
ifr: AgeSpecificProps
class VaccineEffects(BaseModel):
ve_infection: pclass()
ve_hospitalisation: pclass()
ve_death: pclass()
class VocSeed(BaseModel):
time_from_gisaid_report: pclass()
seed_duration: pclass(constraints.non_negative)
class VocComponent(BaseModel):
"""
Parameters defining the emergence profile of the Variants of Concerns
"""
starting_strain: bool
seed_prop: float
new_voc_seed: Optional[VocSeed]
contact_rate_multiplier: pclass()
incubation_overwrite_value: Optional[float]
vacc_immune_escape: pclass(constraints.unit_interval)
cross_protection: Dict[str, pclass()]
hosp_risk_adjuster: Optional[pclass(constraints.non_negative)]
death_risk_adjuster: Optional[pclass(constraints.non_negative)]
icu_risk_adjuster: Optional[pclass(constraints.non_negative)]
@root_validator(pre=True, allow_reuse=True)
def check_starting_strain_multiplier(cls, values):
if values["starting_strain"]:
multiplier = values["contact_rate_multiplier"]
msg = f"Starting or 'wild type' strain must have a contact rate multiplier of one: {multiplier}"
assert multiplier == 1.0, msg
return values
validate_dist = partial(validate_expected, "distribution")
@dataclass
class GammaDistribution(ParamStruct):
distribution: str
shape: pclass(constraints.positive, desc="shape")
mean: pclass(desc="mean")
_check_dist = validate_dist("gamma")
def __repr__(self):
return f"Gamma: {self.shape},{self.mean}"
TimeDistribution = GammaDistribution
class TimeToEvent(BaseModel):
hospitalisation: TimeDistribution
icu_admission: TimeDistribution
death: TimeDistribution
class HospitalStay(BaseModel):
hospital_all: TimeDistribution
icu: TimeDistribution
class RandomProcessParams(BaseModel):
coefficients: Optional[List[float]]
noise_sd: Optional[float]
delta_values: Optional[parray()]
order: int
time: Time
affected_locations: List[str]
class ParamConfig:
"""
Config for parameter models.
"""
anystr_strip_whitespace = True # Strip whitespace
allow_mutation = False # Params should be immutable
@dataclass(config=ParamConfig)
class Parameters(ParamStruct):
# Metadata
description: Optional[str]
country: Country
population: Population
age_groups: List[int]
time: Time
infectious_seed_time: pclass()
seed_duration: float
serodata_age: dict
# Values
contact_rate: pclass()
sojourns: Sojourns
is_dynamic_mixing_matrix: bool
mobility: Mobility
school_multiplier: pclass()
hh_contact_increase: pclass()
compartment_replicates: Dict[str, int]
latency_infectiousness: LatencyInfectiousness
time_from_onset_to_event: TimeToEvent
hospital_stay: HospitalStay
prop_icu_among_hospitalised: float
age_stratification: AgeStratification
vaccine_effects: VaccineEffects
voc_emergence: Optional[Dict[str, VocComponent]]
# Random process
activate_random_process: bool
random_process: Optional[RandomProcessParams]
# Output-related
requested_cumulative_outputs: List[str]
cumulative_start_time: Optional[float]
request_incidence_by_age: bool
request_immune_prop_by_age: bool
@validator("age_groups", allow_reuse=True)
def validate_age_groups(age_groups):
msg = "Not all requested age groups in the available age groups of 5-year increments from zero to 75"
int_age_groups = [int(i_group) for i_group in COVID_BASE_AGEGROUPS]
assert all([i_group in int_age_groups for i_group in age_groups]), msg
return age_groups
@validator("compartment_replicates", allow_reuse=True)
def validate_comp_replicates(compartment_replicates):
replicated_comps = list(compartment_replicates.keys())
msg = "Replicated compartments must be latent and infectious"
assert replicated_comps == ["latent", "infectious"], msg
n_replicates = list(compartment_replicates.values())
msg = "Number of requested replicates should be positive"
assert all([n > 0 for n in n_replicates]), msg
return compartment_replicates
|
import PyPDF2
import nltk
from os import walk
from nltk.tokenize import word_tokenize, sent_tokenize
from nltk.corpus import stopwords
def get_all_files(folder_name):
f =[]
for (dirpath, dirnames, filenames) in walk(folder_name):
f.extend(filenames)
return f
def save_text_file(file_name, file_text):
f = open(file_name,'w')
f.write(file_text)
f.close()
def read_text_file(file_name):
f = open(file_name,'r')
file_text = f.read()
f.close()
return file_text
# get the text corpus from the files
def get_pdf_corpus(file_name):
# creating a pdf file object
pdfFileObj = open(file_name, 'rb')
# creating a pdf reader object
pdfReader = PyPDF2.PdfFileReader(pdfFileObj)
corpus_=''
for i in range(pdfReader.numPages):
pageObj = pdfReader.getPage(i)
corpus_ +=pageObj.extractText()
pdfFileObj.close()
return corpus_
# returnsthe bag of words with counts, list of cleaned sentences
def get_dict_words(corpus_):
stopword=set(stopwords.words('english'))
updated_sentences=[] # array of words per sentence
updated_corpus_all={} # all bag of words with counts
for i in sent_tokenize(corpus_):
#print(word_tokenize(i))
updated_corpus={} # bag of words per sentence
for j in word_tokenize(i):
if j not in stopword:
if j in updated_corpus:
updated_corpus[j] += 1
else:
updated_corpus[j] = 1
updated_sentences.append(updated_corpus.keys()) # append new list of words
updated_corpus_all.update(updated_corpus) # append current sentence bag of words to our final dict
return updated_corpus_all, updated_sentences # return both dict of words, count and new updated sentences
# print sorted list of key words based on the count of words
def print_sorted_dict_byval(dict_):
for key, value in sorted(dict_.iteritems(), key=lambda (k,v): (-v,k)):
print(key, value)
|
# -*- coding: utf-8 -*-
# flake8: noqa
# Generated by Django 1.10.7 on 2017-06-08 15:27
from __future__ import unicode_literals
import ckeditor_uploader.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0012_auto_20170531_1612'),
]
operations = [
migrations.CreateModel(
name='PartnersPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Заголовок страницы')),
('title_ru', models.CharField(max_length=255, null=True, verbose_name='Заголовок страницы')),
('title_en', models.CharField(max_length=255, null=True, verbose_name='Заголовок страницы')),
('title_fr', models.CharField(max_length=255, null=True, verbose_name='Заголовок страницы')),
('subtitle', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('subtitle_ru', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('subtitle_en', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('subtitle_fr', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('howto_title', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_title_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_title_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_title_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_subtitle', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_subtitle_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_subtitle_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_subtitle_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_body', ckeditor_uploader.fields.RichTextUploadingField(blank=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_body_ru', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_body_en', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_body_fr', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_button_caption', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('howto_button_caption_ru', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('howto_button_caption_en', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('howto_button_caption_fr', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('questions_title_left', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title_left_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title_left_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title_left_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_title_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_title_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_title_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_subtitle', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
('questions_subtitle_ru', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
('questions_subtitle_en', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
('questions_subtitle_fr', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
],
options={
'verbose_name': 'Страница "Дилеры"',
},
),
]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 23 17:04:33 2021
@author: THIS-PC
"""
from tensorflow import keras
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.keras import regularizers
from tensorflow.keras import metrics
import scipy.misc
import os
import numpy as np
from tensorflow.keras.models import *
from tensorflow.keras.layers import *
from tensorflow.keras.optimizers import *
from tensorflow.keras.callbacks import ModelCheckpoint, LearningRateScheduler
import math
from PIL import Image
from tqdm import tqdm
import random
import os.path
import imageio
def getPatches(watermarked_image,clean_image,mystride):
watermarked_patches=[]
clean_patches=[]
h = ((watermarked_image.shape [0] // 256) +1)*256
w = ((watermarked_image.shape [1] // 256 ) +1)*256
image_padding=np.ones((h,w))
image_padding[:watermarked_image.shape[0],:watermarked_image.shape[1]]=watermarked_image
for j in range (0,h-256,mystride): #128 not 64
for k in range (0,w-256,mystride):
watermarked_patches.append(image_padding[j:j+256,k:k+256])
h = ((clean_image.shape [0] // 256) +1)*256
w = ((clean_image.shape [1] // 256 ) +1)*256
image_padding=np.ones((h,w))*255
image_padding[:clean_image.shape[0],:clean_image.shape[1]]=clean_image
for j in range (0,h-256,mystride): #128 not 64
for k in range (0,w-256,mystride):
clean_patches.append(image_padding[j:j+256,k:k+256]/255)
return np.array(watermarked_patches),np.array(clean_patches)
input_size = (256,256,1)
def unet(pretrained_weights = None,input_size = input_size):
inputs = Input(input_size)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = Conv2D(512, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate ([drop4,up6])
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(256, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate ([conv3,up7])
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(128, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate ([conv2,up8])
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(64, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate ([conv1,up9])
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'sigmoid')(conv9)
model = Model(inputs = inputs, outputs = conv10)
return model
def get_optimizer():
return Adam(lr=1e-4)
def build_discriminator(input_size = input_size):
def d_layer(layer_input, filters, f_size=4, bn=True):
d = Conv2D(filters, kernel_size=f_size, strides=2, padding='same')(layer_input)
d = LeakyReLU(alpha=0.2)(d)
if bn:
d = BatchNormalization(momentum=0.8)(d)
return d
img_A = Input(input_size)
img_B = Input(input_size)
df=64
combined_imgs = Concatenate(axis=-1)([img_A, img_B])
d1 = d_layer(combined_imgs, df, bn=False)
d2 = d_layer(d1, df*2)
d3 = d_layer(d2, df*4)
d4 = d_layer(d3, df*4)
validity = Conv2D(1, kernel_size=4, strides=1, padding='same', activation='sigmoid')(d4)
discriminator = Model([img_A, img_B], validity)
discriminator.compile(loss='mse', optimizer=Adam(lr=1e-4), metrics = ['accuracy'])
return discriminator
def train_gan(generator,discriminator, ep_start=1, epochs=1, batch_size=128):
list_deg_images= os.listdir('data/A/')
list_clean_images= os.listdir('data/A/')
list_deg_images.sort()
list_clean_images.sort()
adam = get_optimizer()
gan = get_gan_network(discriminator, generator, adam)
for e in range(ep_start, epochs+1):
print ('\n Epoch:' ,e)
for im in tqdm(range (len(list_deg_images))):
deg_image_path = ('data/A/'+list_deg_images[im])
deg_image = Image.open(deg_image_path)# /255.0
deg_image = deg_image.convert('L')
deg_image.save('curr_deg_image.png')
deg_image = plt.imread('curr_deg_image.png')
clean_image_path = ('data/B/'+list_clean_images[im])
clean_image = Image.open(clean_image_path)# /255.0
clean_image = clean_image.convert('L')
clean_image.save('curr_clean_image.png')
clean_image = plt.imread('curr_clean_image.png')#[:,:,0]
wat_batch, gt_batch = getPatches(deg_image,clean_image,mystride=128+64)
batch_count = wat_batch.shape[0] // batch_size
for b in (range(batch_count)):
seed= range(b*batch_size, (b*batch_size) + batch_size)
b_wat_batch = wat_batch[seed].reshape(batch_size,256,256,1)
b_gt_batch = gt_batch[seed].reshape(batch_size,256,256,1)
generated_images = generator.predict(b_wat_batch)
valid = np.ones((b_gt_batch.shape[0],) + (16, 16, 1))
fake = np.zeros((b_gt_batch.shape[0],) + (16, 16, 1))
discriminator.trainable = True
discriminator.train_on_batch([b_gt_batch, b_wat_batch], valid)
discriminator.train_on_batch([generated_images, b_wat_batch], fake)
discriminator.trainable = False
gan.train_on_batch([b_wat_batch], [valid, b_gt_batch])
# if (e == 1 or e % 2 == 0):
# evaluate(generator,discriminator,e)
# return generator,discriminator
def get_gan_network(discriminator, generator, optimizer,input_size = input_size):
discriminator.trainable = False
gan_input2 = Input(input_size)
x = generator(gan_input2)
valid = discriminator([x,gan_input2])
gan = Model(inputs=[gan_input2], outputs=[valid,x])
gan.compile(loss=['mse','binary_crossentropy'],loss_weights=[1, 100], optimizer=optimizer,metrics = ['accuracy'])
return gan
def psnr(img1, img2):
mse = np.mean( (img1 - img2) ** 2 )
if (mse == 0):
return (100)
PIXEL_MAX = 1.0
return (20 * math.log10(PIXEL_MAX / math.sqrt(mse)))
def split2(dataset,size,h,w):
newdataset=[]
nsize1=256
nsize2=256
for i in range (size):
im=dataset[i]
for ii in range(0,h,nsize1): #2048
for iii in range(0,w,nsize2): #1536
newdataset.append(im[ii:ii+nsize1,iii:iii+nsize2,:])
return np.array(newdataset)
def merge_image2(splitted_images, h,w):
image=np.zeros(((h,w,1)))
nsize1=256
nsize2=256
ind =0
for ii in range(0,h,nsize1):
for iii in range(0,w,nsize2):
image[ii:ii+nsize1,iii:iii+nsize2,:]=splitted_images[ind]
ind=ind+1
return np.array(image)
def predic(generator, epoch):
if not os.path.exists('Results/epoch'+str(epoch)):
os.makedirs('Results/epoch'+str(epoch))
for i in range(0,31):
watermarked_image_path = ('CLEAN/VALIDATION/DATA/'+ str(i+1) + '.png')
test_image = plt.imread(watermarked_image_path)
h = ((test_image.shape [0] // 256) +1)*256
w = ((test_image.shape [1] // 256 ) +1)*256
test_padding=np.zeros((h,w))+1
test_padding[:test_image.shape[0],:test_image.shape[1]]=test_image
test_image_p=split2(test_padding.reshape(1,h,w,1),1,h,w)
predicted_list=[]
for l in range(test_image_p.shape[0]):
predicted_list.append(generator.predict(test_image_p[l].reshape(1,256,256,1)))
predicted_image = np.array(predicted_list)#.reshape()
predicted_image=merge_image2(predicted_image,h,w)
predicted_image=predicted_image[:test_image.shape[0],:test_image.shape[1]]
predicted_image=predicted_image.reshape(predicted_image.shape[0],predicted_image.shape[1])
predicted_image = (predicted_image[:,:])*255
predicted_image =predicted_image.astype(np.uint8)
imageio.imwrite('Results/epoch'+str(epoch)+'/predicted'+str(i+1)+'.png', predicted_image)
### if you want to evaluate each epoch:
# def evaluate(generator,discriminator,epoch):
# predic(generator,epoch)
# avg_psnr=0
# qo=0
# for i in range (0,31):
# test_image= plt.imread('CLEAN/VALIDATION/GT/'+ str(i+1) + '.png')
# predicted_image= plt.imread('Results/epoch'+str(epoch)+'/predicted'+ str(i+1) + '.png')
# avg_psnr= avg_psnr + psnr(test_image,predicted_image)
# qo=qo+1
# avg_psnr=avg_psnr/qo
# print('psnr= ',avg_psnr)
# if not os.path.exists('Results/epoch'+str(epoch)+'/weights'):
# os.makedirs('Results/epoch'+str(epoch)+'/weights')
# discriminator.save_weights("Results/epoch"+str(epoch)+"/weights/discriminator_weights.h5")
# generator.save_weights("Results/epoch"+str(epoch)+"/weights/generator_weights.h5")
##################################
epo = 1
generator = unet()
discriminator = build_discriminator()
### to load pretrained models ################""
# epo = 41
# generator.load_weights("Results/epoch"+str(epo-1)+"/weights/generator_weights.h5")
# discriminator.load_weights("Results/epoch"+str(epo-1)+"/weights/discriminator_weights.h5")
###############################################
train_gan(generator,discriminator, ep_start =epo, epochs=80, batch_size=4) |
import numpy as np
import PIL.Image
import matplotlib.pyplot as plt
def load_image(filename, max_size=None, shape=None):
# PIL.Image.LANCZOS is one of resampling filter
image = PIL.Image.open(filename)
if max_size is not None:
factor = max_size / np.max(image.size)
# Scale the image"s height and width.
size = np.array(image.size) * factor
size = size.astype(int)
image = image.resize(size, PIL.Image.LANCZOS)
if shape is not None:
image = image.resize(shape, PIL.Image.LANCZOS)
return np.float32(image)
# VGG19 requires input dimension to be (batch, height, width, channel)
def add_one_dim(image):
shape = (1,) + image.shape
return np.reshape(image, shape)
def save_image(image, filename):
# Ensure the pixel-values are between 0 and 255.
image = np.clip(image, 0.0, 255.0)
image = image.astype(np.uint8)
with open(filename, "wb") as file:
PIL.Image.fromarray(image).save(file, "jpeg")
def image_big(image):
image = np.clip(image, 0.0, 255.0)
image = image.astype(np.uint8)
return PIL.Image.fromarray(image)
def plot_images(content_image, style_image, mixed_image):
# Create figure with sub-plots.
fig, axes = plt.subplots(1, 3, figsize=(10, 10))
# Adjust vertical spacing.
fig.subplots_adjust(hspace=0.1, wspace=0.1)
# Use interpolation to smooth pixels?
smooth = True
if smooth:
interpolation = "sinc"
else:
interpolation = "nearest"
# Plot the content-image.
# Note that the pixel-values are normalized to
# the [0.0, 1.0] range by dividing with 255.
ax = axes.flat[0]
ax.imshow(content_image / 255.0, interpolation=interpolation)
ax.set_xlabel("Content")
# Plot the mixed-image.
ax = axes.flat[1]
ax.imshow(mixed_image / 255.0, interpolation=interpolation)
ax.set_xlabel("Mixed")
# Plot the style-image
ax = axes.flat[2]
ax.imshow(style_image / 255.0, interpolation=interpolation)
ax.set_xlabel("Style")
# Remove ticks from all the plots.
for ax in axes.flat:
ax.set_xticks([])
ax.set_yticks([])
# Ensure the plot is shown correctly with multiple plots
# in a single Notebook cell.
plt.show()
|
a=list(input('Enter the list'))
a*=0
print(a) |
from flask import Flask
from flask import jsonify
from flask import request
from logging.handlers import RotatingFileHandler
from chat_service import chat
from config import Config
import logging
app = Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = False
@app.route("/chat", methods=['POST'])
def login():
data = request.get_json()
question = data['text']
text = chat(question)
answer = [{"text":text}]
qa = "Q:::{0},A:::{1}".format(question,text)
app.logger.error(qa)
return jsonify({"answer": answer}) # 返回布尔值
class RequestFormatter(logging.Formatter): # 自定义格式化类
def format(self, record):
record.url = request.url # 获取请求的url
record.remote_addr = request.remote_addr # 获取客户端的ip
return super().format(record) # 执行父类的默认操作
def create_logger():
qa_logger = logging.getLogger('flask.app')
qa_logger.setLevel(logging.WARNING)
console_handler = logging.StreamHandler()
console_formatter = logging.Formatter(fmt='[%(asctime)s] [%(message)s]')
console_handler.setFormatter(console_formatter)
qa_logger.addHandler(console_handler)
file_handler = RotatingFileHandler('logs/qa.log', maxBytes=100 * 1024 * 1024, backupCount=10) # 转存文件处理器 当达到限定的文件大小时, 可以将日志转存到其他文件中
file_formatter = RequestFormatter(fmt='[%(asctime)s] [%(message)s]')
file_handler.setFormatter(file_formatter)
file_handler.setLevel(logging.WARNING)
qa_logger.addHandler(file_handler)
if __name__ == '__main__':
create_logger()
app.run()
|
import sys
import os
from conans.client.output import ConanOutput
from conans.client.rest.uploader_downloader import Downloader
from conans.client.tools.files import unzip, check_md5, check_sha1, check_sha256
from conans.errors import ConanException
_global_requester = None
def get(url, md5='', sha1='', sha256=''):
""" high level downloader + unzipper + (optional hash checker) + delete temporary zip
"""
filename = os.path.basename(url)
download(url, filename)
if md5:
check_md5(filename, md5)
if sha1:
check_sha1(filename, sha1)
if sha256:
check_sha256(filename, sha256)
unzip(filename)
os.unlink(filename)
def ftp_download(ip, filename, login='', password=''):
import ftplib
try:
ftp = ftplib.FTP(ip, login, password)
ftp.login()
filepath, filename = os.path.split(filename)
if filepath:
ftp.cwd(filepath)
with open(filename, 'wb') as f:
ftp.retrbinary('RETR ' + filename, f.write)
except Exception as e:
raise ConanException("Error in FTP download from %s\n%s" % (ip, str(e)))
finally:
try:
ftp.quit()
except:
pass
def download(url, filename, verify=True, out=None, retry=2, retry_wait=5, overwrite=False,
auth=None, headers=None):
out = out or ConanOutput(sys.stdout, True)
if verify:
# We check the certificate using a list of known verifiers
import conans.client.rest.cacert as cacert
verify = cacert.file_path
downloader = Downloader(_global_requester, out, verify=verify)
downloader.download(url, filename, retry=retry, retry_wait=retry_wait, overwrite=overwrite,
auth=auth, headers=headers)
out.writeln("")
|
# -*- coding: utf-8 -*-
import re
from math import ceil
from ipcalc import Network
from django.db import models
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from datetime import datetime
from django.utils.timezone import get_default_timezone
STATUS_ALLOCATED = u'allocated'
STATUS_ASSIGNED = u'assigned'
STATUS_RESERVED = u'reserved'
BLOCK_STATUSES = [STATUS_ALLOCATED, STATUS_RESERVED]
class Vrf(models.Model):
"""
Stores a single VRF table entry, related to :model:`ipam.Prefix4`
"""
name = models.SlugField(max_length=64, verbose_name=u'name', unique=True, help_text=u'VRF name')
rd = models.CharField(max_length=16, verbose_name=u'route-distinguisher', unique=True,
help_text=u'Route Distinguisher')
description = models.TextField(verbose_name=u'description', blank=True)
parent = models.ForeignKey('self', null=True, blank=True, verbose_name=u'Parent VRF')
class Meta:
ordering = ['name', ]
verbose_name = u'VRF'
verbose_name_plural = u'VRFs'
permissions = (
('view', 'Can view IPAM module content'),
)
def __str__(self):
return 'VRF ' + self.name
def __unicode__(self):
return unicode(self.__str__())
def recursive_children(self):
def recursive_list(vrf):
"""
Return children VRFs
:param vrf: current VRF
:type vrf: Vrf
:return:
"""
result = []
for v in vrf.vrf_set.all():
result += recursive_list(v)
result.append(vrf)
return result
return recursive_list(self)
def prefixes(self, root_only=False, networks_only=False, hosts_only=False, statuses=None, subnet=None,
recursion=False):
"""
Return QuerySet with VRF's prefixes
:param root_only: return top-level's prefixes only
:type root_only: bool
:param networks_only: return networks only (without hosts)
:type networks_only: bool
:param hosts_only: return hosts only (without networks)
:type hosts_only: bool
:param statuses: statuses list for filter
:type statuses: list
:param recursion: Include children VRFs
:type recursion: bool
:return: QuerySet
:rtype: django.db.models.QuerySet
"""
args = {}
if statuses:
args['status__in'] = statuses
if root_only:
args['parent'] = None
if networks_only:
args['size__gt'] = 1
elif hosts_only:
args['size'] = 1
if subnet:
network = Network(subnet)
args['first_ip_dec__gte'] = network.ip
args['last_ip_dec__lte'] = network.broadcast_long()
if recursion:
args['vrf__in'] = self.recursive_children()
return Prefix4.objects.filter(**args)
else:
return self.prefixes_list if len(args) == 0 else self.prefixes_list.filter(**args)
def networks(self):
return self.prefixes(networks_only=True)
def networks_root(self):
return self.prefixes(networks_only=True, root_only=True)
def hosts(self):
return self.prefixes(hosts_only=True)
def hosts_root(self):
return self.prefixes(hosts_only=True, root_only=True)
def size_total(self):
r = self.prefixes(root_only=True).aggregate(sum_size=models.Sum('size'))['sum_size']
return r if r else 0
def size_reserved(self):
r = self.prefixes(statuses=[STATUS_RESERVED, ]).aggregate(sum_size=models.Sum('size'))['sum_size']
return r if r else 0
def size_allocated(self):
r = self.prefixes(statuses=[STATUS_ALLOCATED, ]).aggregate(sum_size=models.Sum('size'))['sum_size']
return r if r else 0
def size_free(self):
return self.size_total() - self.size_allocated() - self.size_reserved()
def fqdn(self, ip):
ip = Network(ip)
prefix = self.prefixes().filter(first_ip_dec__lte=ip.ip, last_ip_dec__gte=ip.ip, size__gte=ip.size()).last()
return prefix.fqdn() if prefix else None
def delete(self, *args, **kwargs):
for prefix in self.prefixes().all():
prefix.delete()
super(Vrf, self).delete(*args, **kwargs)
def journal(self):
from www.models import Journal
return Journal.objects.by_objects(self)
def get_absolute_url(self):
return reverse('ipam.vrf_detail', kwargs={'slug': self.name, })
def get_update_url(self):
return reverse('ipam.vrf_update', kwargs={'slug': self.name, })
def get_delete_url(self):
return reverse('ipam.vrf_delete', kwargs={'slug': self.name, })
def clean(self):
super(Vrf, self).clean()
if self.name[:3] == u'vrf':
raise ValidationError(u'"vrf..." is bad name for the VRF')
def save(self, user=None, *args, **kwargs):
message = None
if user:
if self.id:
p = Vrf.objects.get(id=self.id)
message = u'User {0} ({1}) modified VRF {2}'.format(user.profile.get_short_name(), user.email,
self.name)
if p.name != self.name:
message += u' New name: "{0}".'.format(self.name)
if p.rd != self.rd:
message += u' New RD: {0}'.format(self.rd)
if p.description != self.description:
message += u' New description: "{0}"'.format(self.description)
else:
message = u'User {0} ({1}) create VRF table {2}'.format(user.profile.get_short_name(), user.email,
self.name)
super(Vrf, self).save(*args, **kwargs)
if message and user:
from www.models import Journal
from www.constatnts import JL_INFO
Journal.objects.create(level=JL_INFO, message=message, objects=[self, user])
class Prefix4Manager(models.Manager):
def by_vrf(self, vrf, networks_only=False, hosts_only=False):
if networks_only:
return self.filter(vrf=vrf, size__gt=1)
elif hosts_only:
return self.filter(vrf=vrf, size=1)
else:
return self.filter(vrf=vrf)
class Prefix4(models.Model):
STATUSES = (
(STATUS_ALLOCATED, u'Allocated'),
(STATUS_ASSIGNED, u'Assigned'),
(STATUS_RESERVED, u'Reserved'),
)
vrf = models.ForeignKey('Vrf', verbose_name=u'VRF', related_name='prefixes_list')
prefix = models.CharField(verbose_name=u'IP Address', max_length=18)
size = models.IntegerField(verbose_name=u'subnet size', blank=True, null=True)
description = models.TextField(verbose_name=u'description', blank=True)
status = models.CharField(verbose_name=u'Status', max_length=64, choices=STATUSES, default=STATUS_ASSIGNED)
parent = models.ForeignKey('self', verbose_name=u'parent', related_name='child', blank=True, null=True,
on_delete=models.SET_NULL)
domain = models.CharField(max_length=255, verbose_name=u'domain', blank=True)
host_name = models.CharField(max_length=255, verbose_name=u'host name', blank=True)
sequence_number = models.FloatField(blank=True, null=True)
first_ip_dec = models.IntegerField(blank=True, null=True)
last_ip_dec = models.IntegerField(blank=True, null=True)
objects = Prefix4Manager()
class Meta:
ordering = ['sequence_number', ]
unique_together = ('vrf', 'prefix')
verbose_name = 'IPv4 prefix'
verbose_name_plural = u'IPv4 Prefixes'
def __str__(self):
if self.size == 1:
return u'Host {0} [vrf:{1}]'.format(self.ip, self.vrf.name)
else:
return u'Prefix {0} [vrf:{1}]'.format(self.prefix, self.vrf.name)
def __unicode__(self):
return unicode(self.__str__())
def length(self):
return int(self.prefix.split('/')[1])
def vrf_list(self, exclude_self=False):
if self.id and exclude_self:
return Prefix4.objects.by_vrf(self.vrf).exclude(id=self.id)
else:
return Prefix4.objects.by_vrf(self.vrf)
def full_domain(self):
if self.domain and self.domain[-1] == u'.':
return self.domain
elif self.domain:
return u'{0}.{1}'.format(self.domain, self.parent.full_domain() if self.parent else '')
else:
return self.parent.full_domain() if self.parent else ''
def fqdn_list(self):
if self.host_name:
return [name.strip() if name[-1] == u'.' else (name.strip() + u'.' + self.full_domain()) for name in
self.host_name.split(u',')]
else:
return [self.full_domain(), ]
def fqdn(self):
return u', '.join(self.fqdn_list())
def prefixes_lower(self, root_only=False, networks_only=False, hosts_only=False, statuses=None,
ignore_stored_values=False):
if self.id and root_only and not ignore_stored_values:
args = {}
if networks_only:
args['size__gt'] = 1
elif hosts_only:
args['size'] = 1
if statuses:
args['status__in'] = statuses
return self.child if len(args) == 0 else self.child.filter(**args)
network = Network(str(self.prefix))
f_ip = network.ip
l_ip = network.broadcast_long()
qs = self.vrf.prefixes(
networks_only=networks_only,
hosts_only=hosts_only,
statuses=statuses).filter(first_ip_dec__gte=f_ip, last_ip_dec__lte=l_ip)
if ignore_stored_values:
# TODO Add check with ignoring stored data
pass
else:
if self.id:
qs = qs.exclude(id=self.id)
if root_only:
return qs.filter(parent=self.find_parent())
else:
return qs
def prefixes_upper(self, networks_only=False, hosts_only=False, statuses=None):
network = Network(str(self.prefix))
f_ip = network.ip
l_ip = network.broadcast_long()
if self.id:
return self.vrf.prefixes(networks_only=networks_only, hosts_only=hosts_only, statuses=statuses).filter(
first_ip_dec__lte=f_ip, last_ip_dec__gte=l_ip, size__gt=network.size()).exclude(id=self.id)
else:
return self.vrf.prefixes(networks_only=networks_only, hosts_only=hosts_only, statuses=statuses).filter(
first_ip_dec__lte=f_ip, last_ip_dec__gte=l_ip, size__gt=network.size())
def find_parent(self):
return self.prefixes_upper(networks_only=True).last()
def networks(self):
return self.prefixes_lower(root_only=True, networks_only=True)
def networks_root(self):
return self.networks()
def networks_recursive(self):
return self.prefixes_lower(networks_only=True)
def hosts(self):
return self.prefixes_lower(root_only=True, hosts_only=True)
def hosts_root(self):
return self.hosts()
def hosts_recursive(self):
return self.prefixes_lower(hosts_only=True)
def size_total(self):
return self.size
def size_allocated(self):
if self.status == STATUS_ALLOCATED:
return self.size_total()
else:
r = self.prefixes_lower(statuses=[STATUS_ALLOCATED]).aggregate(sum_size=models.Sum('size'))['sum_size']
return r if r else 0
def size_reserved(self):
if self.status == STATUS_RESERVED:
return self.size_total()
else:
r = self.prefixes_lower(statuses=[STATUS_RESERVED]).aggregate(sum_size=models.Sum('size'))['sum_size']
return r if r else 0
def size_free(self):
return self.size - self.size_allocated() - self.size_reserved()
def allocated_percents(self):
return int(ceil(float(self.size_allocated()) / float(self.size_total()) * 100))
def reserved_percents(self):
return int(ceil(float(self.size_reserved()) / float(self.size_total()) * 100))
def free_percents(self):
return 100 - self.allocated_percents() - self.reserved_percents()
def journal(self):
from www.models import Journal
return Journal.objects.by_objects(self)
@property
def ip(self):
return Network(self.prefix).dq
def get_absolute_url(self):
return reverse('ipam.prefix4_detail', kwargs={'slug': self.prefix, 'vrf': self.vrf.name, })
def get_update_url(self):
if self.size == 1:
return reverse('ipam.prefix4_update', kwargs={'vrf': self.vrf.name, 'slug': self.ip, })
else:
return reverse('ipam.prefix4_update', kwargs={'vrf': self.vrf.name, 'slug': self.prefix, })
def get_delete_url(self):
return reverse('ipam.prefix4_delete', kwargs={'slug': self.prefix, 'vrf': self.vrf.name, })
def clean(self):
super(Prefix4, self).clean()
prefix = self.prefix
if re.match(r'(\d{1,3}\.){3}\d{1,3}[\s]*$', prefix):
self.prefix = prefix + '/32'
elif re.match(r'(\d{1,3}\.){3}\d{1,3}/\d{1,2}', prefix):
pass
else:
raise ValidationError(u'Invalid network prefix "{0}"'.format(prefix))
network = Network(str(self.prefix))
if network.network().dq != network.dq:
raise ValidationError(u'Invalid prefix length /{0}'
u' for the network {1}'.format(self.prefix.split('/')[1], self.prefix.split('/')[0]))
# qs = self.vrf_list(exclude_self=True)
if not self.find_parent() and not self.domain:
ValidationError(u'Top-level prefix must have domain name')
if self.status in BLOCK_STATUSES:
p = self.prefixes_lower(statuses=BLOCK_STATUSES).first()
if not p:
p = self.prefixes_upper(statuses=BLOCK_STATUSES).last()
if p:
raise ValidationError(u'Network {0} is already {1}'.format(p.prefix, p.get_status_display().lower()))
def save(self, recursion=True, user=None, *args, **kwargs):
self.full_clean()
network = Network(str(self.prefix))
self.size = network.size()
self.first_ip_dec = network.ip
self.last_ip_dec = self.first_ip_dec + long(self.size) - 1
self.sequence_number = self.first_ip_dec + self.length() * 0.01
self.parent = self.find_parent()
if self.size == 1:
self.domain = ''
old_data = None
message = None
if user:
if self.id:
old_data = {
'prefix': self.prefix,
'description': self.description,
'status': self.status,
'domain': self.domain,
'host_name': self.host_name,
}
else:
message = u'User {user} ({email}) create prefix {prefix}. Status: {status}.'.format(
user=user.profile.get_short_name(), email=user.email, prefix=self.__str__(), status=self.status)
if self.description:
message += u' Description: {0}.'.format(self.description)
if self.domain:
message += u' Domain: {0}.'.format(self.domain)
if self.host_name:
message += u' Hostname: {0}.'.format(self.host_name)
super(Prefix4, self).save(*args, **kwargs)
if user:
from www.models import Journal
from www.constatnts import JL_INFO
if old_data:
message = u'User {user} ({email}) updated prefix {prefix}.'.format(user=user.profile.get_short_name(),
email=user.email,
prefix=self.__str__())
if self.status != old_data[u'status']:
message += u' Status was changed from "{0}" to "{1}".'.format(old_data['status'], self.status)
if self.description != old_data[u'description']:
message += u' New description: "{0}".'.format(self.description)
if self.domain != old_data[u'domain']:
message += u' Domain was changed from "{0}" to "{1}".'.format(old_data['domain'], self.domain)
if self.host_name != old_data[u'host_name']:
message += u' Hostname was changed from "{0}" to "{1}".'.format(old_data['host_name'],
self.host_name)
Journal.objects.create(level=JL_INFO, message=message, objects=[user, self, ])
if recursion:
for p in self.prefixes_lower():
print 'Check {0}'.format(p)
print p.find_parent()
print p.parent
if p.find_parent() != p.parent:
p.save(recursion=False)
def delete(self, using=None):
child_prefix_ids = [p.id for p in self.child.all()]
super(Prefix4, self).delete(using=using)
for p_id in child_prefix_ids:
Prefix4.objects.get(id=p_id).save()
@staticmethod
def get_by_ip(vrf, ip):
from ipcalc import IP
if type(ip) in [unicode, str]:
ip = IP(ip).ip
if type(ip) in [Network, IP]:
ip = ip.ip
prefix = vrf.prefixes(statuses=[STATUS_ALLOCATED, STATUS_ASSIGNED],
recursion=True).filter(first_ip_dec__lte=ip, last_ip_dec__gte=ip).last()
return prefix
def datetime_now():
return datetime.now(tz=get_default_timezone())
def datetime_now_str():
return datetime_now().strftime('%Y%m%d00')
class Domain4(models.Model):
zone_types = (
('in', 'IN'),)
zone = models.CharField(max_length=255, verbose_name=u'Zone FQDN', unique=True, help_text='Without finel dot.')
ttl = models.CharField(max_length=8, default='30m', verbose_name=u'Time-to-Live')
zone_type = models.CharField(max_length=4, choices=zone_types, default='in', verbose_name=u'Type')
soa_name_server = models.CharField(max_length=255, default=u'ns.sibttk.net', verbose_name=u'Name server')
soa_admin_mailbox = models.CharField(max_length=255, default=u'root.sibttk.net', verbose_name=u'Admin mailbox')
sn = models.IntegerField(default=datetime_now_str, verbose_name=u'Serial number')
refresh = models.CharField(max_length=8, default=u'20m', verbose_name=u'Refresh')
retry = models.CharField(max_length=8, default=u'2m', verbose_name=u'Retry')
expiry = models.CharField(max_length=8, default=u'2w', verbose_name=u'Expiry')
nx = models.CharField(max_length=8, default=u'5m', verbose_name=u'NXDomain TTL')
name_servers = models.TextField(default=u'@ IN NS ns.sibttk.net.\n@ IN NS ns2.sibttk.net.',
verbose_name=u'NS resource records')
vrf = models.ForeignKey('Vrf', verbose_name=u'VRF')
first_ip = models.IPAddressField(verbose_name=u'First IP address')
last_ip = models.IPAddressField(verbose_name=u'Last IP address')
control_hash = models.CharField(max_length=255, blank=True, default='')
last_updated = models.DateTimeField(default=datetime_now)
class Meta:
verbose_name = u'domain'
verbose_name_plural = u'domains'
def __unicode__(self):
if self.id:
return unicode(self.zone)
else:
return u'New Domain'
def clean(self):
from ipcalc import IP
super(Domain4, self).clean()
if IP(self.last_ip).ip > IP(self.first_ip).ip:
ValidationError(u'Last IP address can\' be lower than first IP address.')
def serial_number(self):
from hashlib import sha256
control_string = u' '.join([
self.zone,
self.ttl,
self.zone_type,
self.soa_name_server,
self.soa_admin_mailbox,
self.refresh,
self.retry,
self.expiry,
self.nx,
self.name_servers,
u' '.join(['{fqdn} {value}'.format(**rr) for rr in self.ptr_list()])
])
new_hash = sha256(control_string).hexdigest()
if self.control_hash != new_hash:
self.sn += 1
self.control_hash = new_hash
self.last_updated = datetime.now(tz=get_default_timezone())
self.save()
return self.sn
def ptr_list(self):
from ipcalc import IP
f_dec = IP(self.first_ip).ip
l_dec = IP(self.last_ip).ip
dec = f_dec
result = []
while dec <= l_dec:
for value in Prefix4.get_by_ip(self.vrf, dec).fqdn_list():
result.append({
'fqdn': IP(dec).to_reverse() + '.',
'value': value})
dec += 1
return result |
import mmh3
from bitarray import bitarray
import math
class BloomFilter:
def __init__(self, false_positive_rate, estimated_word_count):
#find size and number of hashes desired for false positive rate and word count
self.size = int((-estimated_word_count * math.log(false_positive_rate)) / (math.log(2) **2))
self.hash_count = int((self.size / estimated_word_count) * math.log(2))
self.bit_array = bitarray(self.size)
self.bit_array.setall(0)
#Insert a word into the bloom filter
def insert(self, item):
for i in range(self.hash_count):
location = mmh3.hash(item, i) % self.size
self.bit_array[location] = 1
#check if the filter contains the desired word
def contains(self, item):
for i in range(self.hash_count):
if self.bit_array[mmh3.hash(item, i) % self.size] == 0:
return("This is not a real world")
return("This is a real word")
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class Consulta(models.Model):
codigo = models.AutoField(primary_key=True)
user_codigo = models.CharField(max_length=10)
date = models.CharField(max_length=14)
hora = models.CharField(max_length=10)
comentario = models.CharField(max_length=200) |
#This is a sketchy Ripoff of pong using TK graphics
#There will be two players, each controlled by a different set of keys, and there will be at least one ball
import tkinter # built-in Python graphics library
import os
import random
balls = []
players = []
class Thing():
def __init__(self,x,y):
self.x = x
self.y = y
class Ball(Thing):
"""docstring for Ball."""
def __init__(self, x,y):
Thing.__init__(self,x,y)
self.sizeX = 5
self.sizeY = 5
self.speedX = random.choice([-2,2])
self.speedY = random.randint(-2,2)
self.color = '#{0:0>6x}'.format(random.randint(00,16**6))
def move(self):
self.x += self.speedX
self.y += self.speedY
def collideY(self):
#Makes ball bounce off upper or lower ends of screen
if self.y + self.speedY <= 0 or self.y + self.speedY + self.sizeY >= 400:
self.speedY = self.speedY * -1
def drawBall(self,canvas):
canvas.create_oval(self.x, self.y, self.x + self.sizeX, self.y + self.sizeY,
fill=self.color, outline="black")
#Generic Player
class Player(Thing):
"""docstring for Player."""
def __init__(self,x,y):
Thing.__init__(self,x,y)
self.score = 0
self.sizeX = 10
self.sizeY = 40
self.score = 0
self.color = '#{0:0>6x}'.format(00)
def moveDown(self):
if self.y + self.sizeY < 400:
self.y += 10
def moveUp(self):
if self.y > 0:
self.y -= 10
def drawPlayer(self,canvas):
canvas.create_rectangle(self.x, self.y, self.x + self.sizeX, self.y + self.sizeY,
fill="black", outline="black")
#Specific Player
class Player1(Player):
def __init__(self,x,y):
Player.__init__(self,x,y)
def score(self,ball):
if ball.x + ball.sizeX >= 800:
self.score += 1
ball.x = 400
ball.y = 200
def hitBall(self,ball):
if ball.x + ball.speedX <= self.sizeX+10:
#might need to switch this
if ball.y >= self.y and ball.y + ball.sizeY <= self.y + self.sizeY:
ball.speedX = ball.speedX * -1
ball.speedY = (((ball.y + ball.sizeY/2) - (self.y + self.sizeY/2))/20)*5
#Specific Player
class Player2(Player):
def __init__(self,x,y):
Player.__init__(self,x,y)
def score(self,ball):
if ball.x <= 0:
self.score += 1
ball.x = 400
ball.y = 200
def hitBall(self,ball):
if ball.x + ball.sizeX + ball.speedX >= 790:
if ball.y >= self.y and ball.y + ball.sizeY <= self.y + self.sizeY:
ball.speedX = ball.speedX * -1
ball.speedY = (((ball.y + ball.sizeY/2) - (self.y + self.sizeY/2))/20)*5
def setup():
global balls, players
players.append(Player1(10,180))
players.append(Player2(790,180))
balls.append(Ball(400,200))
def player1Up(event):
global players
players[0].moveUp()
def player1Down(event):
global players
players[0].moveDown()
def player2Up(event):
global players
players[1].moveUp()
def player2Down(event):
global players
players[1].moveDown()
def draw(canvas):
'''Clear the canvas, have all game objects update and redraw, then set up the next draw.'''
delay = 15 # milliseconds, so about 30 frames per second
global balls, players
canvas.delete(tkinter.ALL)
for player in players:
player.drawPlayer(canvas)
for ball in balls:
player.hitBall(ball)
#player.score(ball)
for ball in balls:
ball.collideY()
ball.move()
ball.drawBall(canvas)
canvas.after(delay, draw, canvas) # call this draw function with the canvas argument again after the delay
def keydown(event):
global pressState
presState = 1
print(event.char)
def keyup(event):
global pressState
presState = 1
print("hi")
print(event.char)
if __name__ == '__main__':
os.system('xset r off')
# create the graphics root and a 400x400 canvas
root = tkinter.Tk()
setup()
canvas = tkinter.Canvas(root, width=800, height=400)
canvas.pack()
#canvas.bind("<KeyPress>", keydown)
#canvas.bind("<KeyRelease>", keyup)
# if the user presses a key, call our handlers
pressState = 0
root.bind('<KeyRelease-w>', player1Up)
root.bind('<Key-s>', player1Down)
root.bind('<Up>', player2Up)
root.bind('<Down>', player2Down)
# start the draw loop
draw(canvas)
root.mainloop() # keep the window open
|
with open("./learning_python.txt", "r") as f:
origin = f.read()
copyed=origin.replace("python","C")
with open("./learning_python_copyed.txt","w") as f:
f.write(origin+"\n"+copyed) |
import os, sys
import gmsh
import numpy as np
# ========================================================= #
# === make__magnet routine === #
# ========================================================= #
def make__magnet():
# ------------------------------------------------- #
# --- [1] load config --- #
# ------------------------------------------------- #
cnsFile = "dat/parameter.conf"
import nkUtilities.load__constants as lcn
const = lcn.load__constants( inpFile=cnsFile )
side = const["geometry.side"]
# ------------------------------------------------- #
# --- [2] initialization of the gmsh --- #
# ------------------------------------------------- #
gmsh.initialize()
gmsh.option.setNumber( "General.Terminal", 1 )
gmsh.option.setNumber( "Geometry.ToleranceBoolean", 1e-3 )
gmsh.option.setNumber( "Mesh.Algorithm" , const["mesh.algorithm2D"] )
gmsh.option.setNumber( "Mesh.Algorithm3D", const["mesh.algorithm3D"] )
gmsh.option.setNumber( "Mesh.SubdivisionAlgorithm", const["mesh.subdivision"] )
gmsh.model.add( "model" )
# ------------------------------------------------- #
# --- [3] Modeling --- #
# ------------------------------------------------- #
if ( const["geometry.import_model"] ):
stpFile = "msh/model.step"
gmsh.model.occ.importShapes( stpFile )
const["geometry.save_step"] = False
else:
import generate__magnetParts as mag
mag.generate__magnetParts( side=side )
gmsh.model.occ.synchronize()
gmsh.model.occ.removeAllDuplicates()
gmsh.model.occ.synchronize()
# ------------------------------------------------- #
# --- [4] define port --- #
# ------------------------------------------------- #
if ( const["geometry.add_port"] ):
# -- [4-1] save wo port model -- #
gmsh.write( "msh/model_woport.step" )
# -- [4-2] define ports -- #
import nkGmshRoutines.define__ports as dfp
inpFile = "dat/ports.conf"
portNums = dfp.define__ports( inpFile=inpFile )
gmsh.model.occ.synchronize()
# -- [4-3] boolean cut from yoke -- #
tools = [ (3,tool ) for tool in portNums ]
targets = [ (3,target) for target in const["geometry.yoke_tobecut"] ]
copy = gmsh.model.occ.copy( targets )
yoke_p = gmsh.model.occ.cut ( targets, tools, removeObject=True, removeTool=False )
holes = gmsh.model.occ.intersect( tools, copy, removeObject=True, removeTool=False )
gmsh.model.occ.synchronize()
gmsh.model.occ.removeAllDuplicates()
gmsh.model.occ.synchronize()
gmsh.write( "msh/model.geo_unrolled" )
# ------------------------------------------------- #
# --- [4] save model --- #
# ------------------------------------------------- #
if ( const["geometry.save_step"] ):
gmsh.write( "msh/model.step" )
# ------------------------------------------------- #
# --- [5] Mesh settings --- #
# ------------------------------------------------- #
meshFile = "dat/mesh.conf"
if ( side == "+" ):
physFile = "dat/phys_right.conf"
elif ( side == "-" ):
physFile = "dat/phys_left.conf"
elif ( side in ["+-","-+"] ):
physFile = "dat/phys_both.conf"
else:
sys.exit( "[make__magnet.py] side == {0} ??? ".format( side ) )
if ( const["mesh.uniform"] ):
gmsh.option.setNumber( "Mesh.CharacteristicLengthMin", 0.3 )
gmsh.option.setNumber( "Mesh.CharacteristicLengthMax", 0.3 )
else:
import nkGmshRoutines.assign__meshsize as ams
meshes = ams.assign__meshsize( meshFile=meshFile, physFile=physFile )
if ( const["mesh.compound"] ):
surfDim,voluDim = 2, 3
physNum_gap = 301
physNum_pole = 302
volu_gap = gmsh.model.getEntitiesForPhysicalGroup( 3, physNum_gap )
volu_pole = gmsh.model.getEntitiesForPhysicalGroup( 3, physNum_pole )
dimtag_gap = [ (voluDim,vnum) for vnum in volu_gap ]
dimtag_pole = [ (voluDim,vnum) for vnum in volu_pole ]
surf_gap = gmsh.model.getBoundary( dimtag_gap )
surf_pole = gmsh.model.getBoundary( dimtag_pole )
surf_gap = [ dimtag[1] for dimtag in surf_gap ]
surf_pole = [ dimtag[1] for dimtag in surf_pole ]
surf_common = list( set( surf_gap ) & set( surf_pole ) )
gmsh.model.mesh.setCompound( surfDim, surf_common )
# ------------------------------------------------- #
# --- [6] Meshing / save mesh --- #
# ------------------------------------------------- #
# -- [6-1] meshing -- #
gmsh.model.occ.synchronize()
gmsh.model.mesh.generate(3)
# -- [6-2] optimization -- #
if ( const["mesh.optimize"] ):
gmsh.option.setNumber( "Mesh.OptimizeThreshold", const["mesh.opt_threshold"] )
gmsh.model.mesh.optimize( "Netgen" )
gmsh.model.mesh.optimize( "Relocate3D" )
# -- [6-3] save mesh -- #
gmsh.option.setNumber( "Mesh.SaveElementTagType", 2 )
gmsh.option.setNumber( "Mesh.BdfFieldFormat" , 0 )
if ( const["mesh.save_bdf"] ):
gmsh.write( "msh/model.bdf" )
if ( const["mesh.save_msh"] ):
gmsh.write( "msh/model.msh" )
# ------------------------------------------------- #
# --- [7] post-process --- #
# ------------------------------------------------- #
gmsh.finalize()
return()
# ========================================================= #
# === 実行部 === #
# ========================================================= #
if ( __name__=="__main__" ):
make__magnet()
# import nkGmshRoutines.fuse__listed as fsl
# if ( side == "+" ):
# fusFile = "dat/fuse_right.conf"
# elif ( side == "-" ):
# fusFile = "dat/fuse_left.conf"
# elif ( side in ["+-","-+"] ):
# fusFile = "dat/fuse_both.conf"
# else:
# sys.exit( "[make__magnet.py] side == {0} ??? ".format( side ) )
# fsl.fuse__listed( inpFile=fusFile )
|
class Goods:
def __init__(self):
# 商品原始价格
self.original_price = 100
# 商品折扣
self.discount = 0.8
@property
def price(self):
# 实际价格 = 原价 * 折扣
return self.original_price * self.discount
@price.setter
def price(self,val):
self.original_price = val
@price.deleter
def price(self):
del self.original_price
obj = Goods()
print(obj.price)
obj.price = 200
print(obj.price)
del obj.price |
import pygame
from src.gameObject import GameObject
from src.sprite import SpriteSheet
class Character(GameObject):
def __init__(self, color, spritePath):
super().__init__()
self.color = color
self.dimension = [40.0, 60.0]
self.sprite = SpriteSheet(spritePath, 4, .25)
self.sprite.position = self.position
self.spriteIndex = 0
self.timeSinceSpriteUpdate = 0
def update(self, tDelta, actions):
"""
Updates the state of the object based on the following:
1. The time elapsed since the previous frame (tDelta)
2. A list of input actions to be interpreted by the update method.
"""
if actions['moveUp']:
self.position[1] -= self.maxVel * tDelta
if actions['moveDown']:
self.position[1] += self.maxVel * tDelta
if actions['moveLeft']:
self.position[0] -= self.maxVel * tDelta
if actions['moveRight']:
self.position[0] += self.maxVel * tDelta
self.timeSinceSpriteUpdate += tDelta
if self.timeSinceSpriteUpdate > 1:
self.timeSinceSpriteUpdate = 0
if self.spriteIndex == 120:
self.spriteIndex = 0
else:
self.spriteIndex += 40
self.sprite.update(tDelta, self.position)
def render(self, win):
self.sprite.render(win)
|
import numpy as np
from time import time
import random, string
from Model.model import Model
m = Model(print_obj={
'start_conf': True,
'end_conf': True
})
def get_by_key(arr,key):
result = []
for i in arr:
result.append(i[key])
return np.array(result)
blocks = [
{"pred": [], 'p': -100, 'c': 100}, # 0
{"pred": [], 'p': -150, 'c': 200}, # 1
{"pred": [0,1], 'p': -100, 'c': 100}, # 2
{"pred": [0,1], 'p': 250, 'c': 300}, # 3
{"pred": [1,2], 'p': 300, 'c': 100}, # 4
{"pred": [2,3], 'p': 1000, 'c': 1000}, # 5
{"pred": [4,5], 'p': 10000, 'c': 300}, # 6
{"pred": [4,5,6], 'p': 15000, 'c': 1000}, # 7
{"pred": [1,2,3], 'p': 15000, 'c': 1000}, # 8
{"pred": [6,7], 'p': 15000, 'c': 3000}, # 9
]
max_c = 4000
x = []
for i in range(len(blocks)):
x.append(m.add_var("real+", name=i))
x = np.array(x)
m.maximize(sum(get_by_key(blocks,"p")*x))
# binary
for i in range(len(blocks)):
m.add_constraint(x[i] <= 1)
# cost
m.add_constraint(sum(get_by_key(blocks,"c")*x) <= max_c)
for i in range(len(blocks)):
if len(blocks[i]["pred"]) > 0:
m.add_constraint(len(blocks[i]["pred"])*x[i]-sum(x[blocks[i]["pred"]]) <= 0)
print("all added")
t0 = time()
m.solve(revised=True)
# m.solve()
print("Solved first in %f" % (time()-t0))
m.print_solution(slack=False)
|
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import random
plt.style.use('fivethirtyeight')
data = pd.read_csv('insurance.csv')
#data.describe()
#data.info()
data.hist('charges') #A single variable plot, showing how often it meets
quant_95 = data['charges'].quantile(0.95) #Find value that will not be exceeded in 95% cases
quant_05 = data['charges'].quantile(0.05)
def corr_func(x, y, **kwargs): #Func calculation correlation between columns
r = np.corrcoef(x, y)[0][1]
ax = plt.gca()
ax.annotate("r = {:.2f}".format(r),
xy=(.2, .8), xycoords=ax.transAxes,
size = 20)
grid = sns.PairGrid(data[['charges', 'age', 'bmi', 'children']]) #Pairs plot, upper triangle has scatterplots,
grid.map_upper(plt.scatter, color = 'red', alpha = 0.6) #diagonal - histograms, lower - correlation
grid.map_diag(plt.hist, color = 'red', edgecolor = 'black')
grid.map_lower(corr_func)
grid.map_lower(sns.kdeplot, cmap = plt.cm.Reds)
sns.lmplot('age', 'charges', hue = 'smoker', data = data, #Plot between two variables, shows dependence from 'smoker'
scatter_kws = {'alpha': 0.8, 's': 60}, fit_reg = False, size = 12, aspect = 1.2)
plt.xlabel("Age", size = 28), plt.ylabel('Charges', size = 28)
#plt.show()
age = int(input("Age: ")) #Predicting charge with input age and smoking
smoker_check = input("Smoker: ")
minbet = 1758
semi_bet = 3500
maxbet = 27700
tube = 18
if "yes" in smoker_check:
minbet += 11500
semi_bet += 16500
maxbet += 11000
semi_maxbet = 32500
while tube < age:
minbet += 250
semi_bet += 250
tube += 1
maxbet += 235
semi_maxbet +=310
x = random.random()
if x < 0.45:
maybe_bet = random.uniform(minbet,semi_bet)
elif x > 0.55:
maybe_bet = random.uniform(semi_maxbet, maxbet)
else:
maybe_bet = random.uniform(semi_bet, semi_maxbet)
else:
while tube < age:
minbet += 250
semi_bet += 250
tube += 1
maxbet += 210
x = random.random()
if x > 0.15:
maybe_bet = random.uniform(minbet, semi_bet)
else:
maybe_bet = random.uniform(semi_bet, maxbet)
print(maybe_bet)
#Highest impact factors: age, smoker
#Quality: 64/100 -> 64%
#Result count as positive, if it was in 2000 radius from table |
'''
Created on Nov 3, 2015
@author: Jonathan
'''
def clubsize(names, club):
return len(set(names) & set(club))
if __name__ == '__main__':
pass |
#Read an integer N . For all non-negative integers i < N, print i^2. See the sample for details.
if __name__ == '__main__':
n = int(raw_input())
for i in range(n):
print(i*i)
|
#!/usr/bin/env python
# Jamie Bodeau
# Imports -------------------------------------------------
import sys
# Classes -------------------------------------------------
# Functions -----------------------------------------------
# Main Execution ------------------------------------------
if __name__ == "__main__":
nums = []
for line in sys.stdin:
for num in line.split():
nums.append(int(num))
index = 0;
steps = 0;
while index >= 0 and index < len(nums):
if nums[index] >= 3:
nums[index] -= 1
index += nums[index] + 1
else:
nums[index] += 1
index += nums[index] - 1
steps += 1
print steps
|
a=input()
b=input()
a=int(a)
b=int(b)
c=(a**2+b**2)**0.5
print(c) |
# Generated by Django 3.0.8 on 2020-07-17 14:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reviews', '0004_auto_20200717_2200'),
('message', '0002_auto_20200717_2200'),
('profiles', '0008_auto_20200717_2200'),
]
operations = [
migrations.DeleteModel(
name='User',
),
]
|
import torch
import torch.utils.data as Data
import json
import os
from PIL import Image
import git_ssd-transform as ssd_transform
"""
创建自己的数据集
需要定义__len__方法,返回的是dataset的数量
需要定义__getitem__方法,返回的是第i个图像,bboxes、labels.基于的是json文件
Dataset是一个抽象类,所有自定义的Dataset需要继承它并复习__getitem__()函数,即接收一个索引,返回一个样本
__getitem__:返回一条数据或一个样本
__len__:返回样本的数量
"""
class PascalVOCDataset(Data.Dataset):
"""
定义一个pytorch 数据集,然后再pytorch DataLoader中使用,来创建bctahes
"""
def __init__(self,data_folder,split,keep_difficult=False):
"""
:param data_folder: 存储数据文件的文件夹
:param split: split,TRAIN或TEST中的一个
:param keep_difficult: 保留或抛弃被定义为难检测的目标
"""
#实例化某个数据集的时候,传入保存的文件夹,并定义是训练还是测试
#一共五个文件:
# train_images.json
# train_objects.json
# label_map.json
# test_images.json
# test_objects.json
self.split=split.upper() #大写
assert self.split in {'TRAIN','TEST'} #检查并抛出异常
self.data_folder=data_folder
self.keep_difficult=keep_difficult
#读取数据文件
#json文件中是由list得来的
with open(os.path.join(data_folder,self.split+'_images.json'),'r') as j:
self.images=json.load(j) #json.load用来读取文件,json.laods用来读取字符串
with open(os.path.join(data_folder,self.split+'_objects.json'),'r') as j:
self.objects=json.load(j)
assert len(self.images)==len(self.objects)
def __getitem__(self,i):
#读取图像
image=Image.open(self.images[i],mode='r')
image=image.convert('RGB') #读取模式
#读取objects中的ground-truth数据
objects=self.objects[i]
boxes=torch.FloatTensor(objects['boxes']) #(n_objects,4) 一张图像中的目标数*4坐标
labels=torch.LongTensor(objects['labels']) #(n_objects) 一张图像中的目标数,在tensor中size是[]维,这里应该是整数
difficulties=torch.ByteTensor(objects['difficulties']) #(n_objects),这里应该是0或1
#如果想要忽略难识别的目标,则执行下面的操作
if not self.keep_difficult:
boxes=boxes[1-difficulties] #这里的索引时利用的表达式的方法,注意这里只有numpy和tensor才能这么做
labels=labels[1-difficulties]
difficulties=difficulties[1-difficulties]
#应用转换
image,boxes,labels,difficulties=ssd_transform(image,boxes,labels,difficulties,split=self.split)
return image,boxes,labels,difficulties
#有一个很大的问题,这里返回的还是PIL的数据
def __len__(self):
return len(self.images)
def collate_fn(self,batch): #这个有点没懂...???...
"""
因为每个图像包含不同数量的目标,因此需要一个整理功能(传入到DataLoader中)
描述如何将不同维度的tensor组合到一起,使用的是list
值得注意的是该函数可以不定义在该类中,可以单独定义
batch: 从__getitem__()中得到具有N个元素的迭代对象
return: 返回一个batch-images的tensor,一个list,该list包含具有变化尺寸的bbox、labels、difficulties的tensor
"""
images=list()
boxes=list()
labels=list()
difficulties=list()
for b in batch:
images.append(b[0])
boxes.append(b[1])
labels.append(b[2])
difficulties.append(b[3])
images=torch.stack(images,dim=0)
return images,boxes,labels,difficulties |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.